From db5a19e9cc9c86556b89741bbf268e8df1bde7da Mon Sep 17 00:00:00 2001 From: meninjar Date: Mon, 3 Nov 2025 05:56:41 +0000 Subject: [PATCH] Perbaikan Generate code --- README.md | 7 +- internal/config/config.go | 16 +- internal/database/database.go | 20 +- internal/handlers/pasien/pasien.go | 2149 ++++++++ internal/handlers/retribusi/retribusi.go | 74 +- internal/models/pasien/pasien.go | 397 ++ internal/routes/v1/routes.go | 15 + internal/utils/query/builder.go | 18 + .../{exemple.go.exemple => exemple.go.txt} | 0 internal/utils/query/exemple.txt | 918 ++++ tools/general/generate-handler.go | 4771 +++++++++++------ tools/general/services-config.yaml | 459 +- 12 files changed, 7018 insertions(+), 1826 deletions(-) create mode 100644 internal/handlers/pasien/pasien.go create mode 100644 internal/models/pasien/pasien.go rename internal/utils/query/{exemple.go.exemple => exemple.go.txt} (100%) create mode 100644 internal/utils/query/exemple.txt diff --git a/README.md b/README.md index 9c2a30d..402052a 100644 --- a/README.md +++ b/README.md @@ -340,11 +340,6 @@ BPJS_SECRETKEY=1bV36ASDQQ3512D **Generate Handler untuk Retribusi:** ```bash -# Generate handler dasar -go run tools/general/generate-handler.go retribusi get post put delete - -# Generate dengan fitur advanced -go run tools/general/generate-handler.go retribusi get post put delete dynamic search stats # Config go run tools/general/generate-handler.go --config tools/general/services-config.yaml --verbose @@ -352,7 +347,7 @@ go run tools/general/generate-handler.go --config tools/general/services-config. ``` *** - +OBANESTHESI0003 ## 🚀 Deployment ### 🐳 Docker Deployment diff --git a/internal/config/config.go b/internal/config/config.go index 20ea90c..740e634 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -394,22 +394,22 @@ func loadKeycloakConfig() KeycloakConfig { func (c *Config) loadDatabaseConfigs() { // Load PostgreSQL configurations - c.addPostgreSQLConfigs() + // c.addPostgreSQLConfigs() - // Load MySQL configurations - c.addMySQLConfigs() + // // Load MySQL configurations + // c.addMySQLConfigs() - // Load MongoDB configurations - c.addMongoDBConfigs() + // // Load MongoDB configurations + // c.addMongoDBConfigs() - // Load SQLite configurations - c.addSQLiteConfigs() + // // Load SQLite configurations + // c.addSQLiteConfigs() // Load custom database configurations from environment variables c.loadCustomDatabaseConfigs() // Remove duplicate database configurations - c.removeDuplicateDatabases() + // c.removeDuplicateDatabases() } func (c *Config) removeDuplicateDatabases() { diff --git a/internal/database/database.go b/internal/database/database.go index f2a5db7..0f79ae0 100644 --- a/internal/database/database.go +++ b/internal/database/database.go @@ -127,16 +127,16 @@ func (s *service) addDatabase(name string, config config.DatabaseConfig) error { defer s.mu.Unlock() // Check for duplicate database connections - for existingName, existingConfig := range s.configs { - if existingName != name && - existingConfig.Host == config.Host && - existingConfig.Port == config.Port && - existingConfig.Database == config.Database && - existingConfig.Type == config.Type { - log.Printf("⚠️ Database %s appears to be a duplicate of %s (same host:port:database), skipping connection", name, existingName) - return nil - } - } + // for existingName, existingConfig := range s.configs { + // if existingName != name && + // existingConfig.Host == config.Host && + // existingConfig.Port == config.Port && + // existingConfig.Database == config.Database && + // existingConfig.Type == config.Type { + // log.Printf("⚠️ Database %s appears to be a duplicate of %s (same host:port:database), skipping connection", name, existingName) + // return nil + // } + // } var db *sql.DB var err error diff --git a/internal/handlers/pasien/pasien.go b/internal/handlers/pasien/pasien.go new file mode 100644 index 0000000..22bfc75 --- /dev/null +++ b/internal/handlers/pasien/pasien.go @@ -0,0 +1,2149 @@ +package handlers + +import ( + "api-service/internal/config" + "api-service/internal/database" + models "api-service/internal/models" + pasienModels "api-service/internal/models/pasien" + queryUtils "api-service/internal/utils/query" + "api-service/internal/utils/validation" + "api-service/pkg/logger" + "context" + "database/sql" + "fmt" + "net/http" + "strconv" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/go-playground/validator/v10" + "github.com/jmoiron/sqlx" + "github.com/lib/pq" +) + +// ============================================================================= +// GLOBAL INITIALIZATION & VALIDATION +// ============================================================================= + +var ( + db database.Service + once sync.Once + validate *validator.Validate +) + +// Initialize the database connection and validator once +func init() { + once.Do(func() { + db = database.New(config.LoadConfig()) + validate = validator.New() + validate.RegisterValidation("pasien_status", validatePasienStatus) + if db == nil { + logger.Fatal("Failed to initialize database connection") + } + }) +} + +// Custom validation for pasien status +func validatePasienStatus(fl validator.FieldLevel) bool { + return models.IsValidStatus(fl.Field().String()) +} + +// ============================================================================= +// CACHE IMPLEMENTATION +// ============================================================================= + +// CacheEntry represents an entry in the cache +type CacheEntry struct { + Data interface{} + ExpiresAt time.Time +} + +// IsExpired checks if the cache entry has expired +func (e *CacheEntry) IsExpired() bool { + return time.Now().After(e.ExpiresAt) +} + +// InMemoryCache implements a simple in-memory cache with TTL +type InMemoryCache struct { + items sync.Map + mu sync.RWMutex +} + +// NewInMemoryCache creates a new in-memory cache +func NewInMemoryCache() *InMemoryCache { + return &InMemoryCache{} +} + +// Get retrieves an item from the cache +func (c *InMemoryCache) Get(key string) (interface{}, bool) { + val, ok := c.items.Load(key) + if !ok { + return nil, false + } + + entry, ok := val.(*CacheEntry) + if !ok || entry.IsExpired() { + c.items.Delete(key) + return nil, false + } + + return entry.Data, true +} + +// Set stores an item in the cache with a TTL +func (c *InMemoryCache) Set(key string, value interface{}, ttl time.Duration) { + entry := &CacheEntry{ + Data: value, + ExpiresAt: time.Now().Add(ttl), + } + c.items.Store(key, entry) +} + +// Delete removes an item from the cache +func (c *InMemoryCache) Delete(key string) { + c.items.Delete(key) +} + +// DeleteByPrefix removes all items with a specific prefix +func (c *InMemoryCache) DeleteByPrefix(prefix string) { + c.items.Range(func(key, value interface{}) bool { + if keyStr, ok := key.(string); ok && strings.HasPrefix(keyStr, prefix) { + c.items.Delete(key) + } + return true + }) +} + +// ============================================================================= +// Pasien HANDLER STRUCT +// ============================================================================= + +// PasienHandler handles pasien services +type PasienHandler struct { + db database.Service + queryBuilder *queryUtils.QueryBuilder + validator *validation.DynamicValidator + cache *InMemoryCache +} + +// NewPasienHandler creates a new PasienHandler with a pre-configured QueryBuilder +func NewPasienHandler() *PasienHandler { + // Initialize QueryBuilder with allowed columns list for security. + queryBuilder := queryUtils.NewQueryBuilder(queryUtils.DBTypePostgreSQL). + SetAllowedColumns([]string{ + "id", + "nomr", + "status", + "title", + "nama", + "tempat", + "tgllahir", + "jeniskelamin", + "alamat", + "kelurahan", + "kdkecamatan", + "kota", + "kdprovinsi", + "agama", + "no_kartu", + "noktp_baru", + "created_at", + "updated_at", + "idprovinsi", + "namaprovinsi", + "idkota", + "namakota", + "idkecamatan", + "namakecamatan", + "idkelurahan", + "namakelurahan", + }) + + return &PasienHandler{ + db: db, + queryBuilder: queryBuilder, + validator: validation.NewDynamicValidator(queryBuilder), + cache: NewInMemoryCache(), + } +} + +// ============================================================================= +// HANDLER ENDPOINTS +// ============================================================================= + +// GetPasienByAge godoc +// @Summary Get Pasien by Age Group +// @Description Get pasien statistics by age group +// @Tags Pasien +// @Accept json +// @Produce json +// @Param age_group query string false "Age group (child, teen, adult, senior)" +// @Success 200 {object} pasienModels.PasienAgeStatsResponse "Statistics data" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/by-age [get] +func (h *PasienHandler) GetPasienByAge(c *gin.Context) { + // Parse age group + ageGroup := c.Query("age_group") + validAgeGroups := map[string]bool{ + "child": true, // 0-12 years + "teen": true, // 13-17 years + "adult": true, // 18-59 years + "senior": true, // 60+ years + } + + if ageGroup == "" || !validAgeGroups[ageGroup] { + h.respondError(c, "Invalid age group", fmt.Errorf("age group must be one of: child, teen, adult, senior"), http.StatusBadRequest) + return + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Build query + query := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }}, + } + + // Execute query + var result struct { + Count int `db:"count"` + } + err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, query, &result) + if err != nil { + h.logAndRespondError(c, "Failed to get age statistics", err, http.StatusInternalServerError) + return + } + + // Build response + response := pasienModels.PasienAgeStatsResponse{ + Message: fmt.Sprintf("Age group '%s' statistics retrieved successfully", ageGroup), + Data: map[string]interface{}{ + "age_group": ageGroup, + "count": result.Count, + }, + } + c.JSON(http.StatusOK, response) +} + +// GetPasienDynamic godoc +// @Summary Get Pasien Dynamic +// @Description Get pasien with dynamic filtering +// @Tags Pasien +// @Accept json +// @Produce json +// @Param fields query string false "Fields to select (e.g., fields=*.*)" +// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[name][_eq]=value)" +// @Param sort query string false "Sort fields (e.g., sort=date_created,-name)" +// @Param limit query int false "Limit" default(10) +// @Param offset query int false "Offset" default(0) +// @Success 200 {object} pasienModels.PasienGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/dynamic [get] +func (h *PasienHandler) GetPasienDynamic(c *gin.Context) { + parser := queryUtils.NewQueryParser().SetLimits(10, 100) + dynamicQuery, err := parser.ParseQueryWithDefaultFields(c.Request.URL.Query(), "m_pasien", []string{ + "id", + "nomr", + "status", + "title", + "nama", + "tempat", + "tgllahir", + "jeniskelamin", + "alamat", + "kelurahan", + "kdkecamatan", + "kota", + "kdprovinsi", + "agama", + "no_kartu", + "noktp_baru", + "namakelurahan", + "namakecamatan", + "namakota", + "namaprovinsi", + }) + if err != nil { + h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) + return + } + + // Add joins for relationships using the correct structure + dynamicQuery.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + // Add default filter to exclude deleted records + dynamicQuery.Filters = append([]queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{{Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}}, + LogicOp: "AND", + }}, dynamicQuery.Filters...) + + // Try to get from cache first + // Create cache key from query string + cacheKey := fmt.Sprintf("pasien:dynamic:%s", c.Request.URL.RawQuery) + if cachedData, found := h.cache.Get(cacheKey); found { + logger.Info("Cache hit for dynamic query", map[string]interface{}{"cache_key": cacheKey}) + + // Convert from interface{} to expected type + if pasiens, ok := cachedData.([]pasienModels.Pasien); ok { + meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, len(pasiens)) + response := pasienModels.PasienGetResponse{ + Message: "Data pasien berhasil diambil (dari cache)", + Data: pasiens, + Meta: meta, + } + c.JSON(http.StatusOK, response) + return + } + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, dynamicQuery) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Save to cache + h.cache.Set(cacheKey, pasiens, 10*time.Minute) // Cache for 10 minutes + + meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) + response := pasienModels.PasienGetResponse{ + Message: "Data pasien berhasil diambil", + Data: pasiens, + Meta: meta, + } + c.JSON(http.StatusOK, response) +} + +// GetPasienByLocation godoc +// @Summary Get Pasien by Location +// @Description Get pasien by location (provinsi, kota, kecamatan, kelurahan) +// @Tags Pasien +// @Accept json +// @Produce json +// @Param kelurahan query int false "Filter by kelurahan ID" +// @Param kdkecamatan query int false "Filter by kdkecamatan ID" +// @Param kota query int false "Filter by kota ID" +// @Param kdprovinsi query int false "Filter by kdprovinsi ID" +// @Param limit query int false "Limit (max 100)" default(10) +// @Param offset query int false "Offset" default(0) +// @Success 200 {object} pasienModels.PasienGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/by-location [get] +func (h *PasienHandler) GetPasienByLocation(c *gin.Context) { + // Parse location filters + var filters []queryUtils.DynamicFilter + + if kelurahan := c.Query("kelurahan"); kelurahan != "" { + if kelurahanID, err := strconv.Atoi(kelurahan); err == nil { + filters = append(filters, queryUtils.DynamicFilter{Column: "kelurahan", Operator: queryUtils.OpEqual, Value: kelurahanID}) + } + } + + if kdkecamatan := c.Query("kdkecamatan"); kdkecamatan != "" { + if kdkecamatanID, err := strconv.Atoi(kdkecamatan); err == nil { + filters = append(filters, queryUtils.DynamicFilter{Column: "kdkecamatan", Operator: queryUtils.OpEqual, Value: kdkecamatanID}) + } + } + + if kota := c.Query("kota"); kota != "" { + if kotaID, err := strconv.Atoi(kota); err == nil { + filters = append(filters, queryUtils.DynamicFilter{Column: "kota", Operator: queryUtils.OpEqual, Value: kotaID}) + } + } + + if kdprovinsi := c.Query("kdprovinsi"); kdprovinsi != "" { + if kdprovinsiID, err := strconv.Atoi(kdprovinsi); err == nil { + filters = append(filters, queryUtils.DynamicFilter{Column: "kdprovinsi", Operator: queryUtils.OpEqual, Value: kdprovinsiID}) + } + } + + // Parse pagination + limit, offset := 10, 0 + if limitStr := c.Query("limit"); limitStr != "" { + if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 { + limit = l + } + } + if offsetStr := c.Query("offset"); offsetStr != "" { + if o, err := strconv.Atoi(offsetStr); err == nil && o >= 0 { + offset = o + } + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) + defer cancel() + + // Build query + query := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "id"}, + {Expression: "nomr"}, + {Expression: "status"}, + {Expression: "title"}, + {Expression: "nama"}, + {Expression: "tempat"}, + {Expression: "tgllahir"}, + {Expression: "jeniskelamin"}, + {Expression: "alamat"}, + {Expression: "kelurahan"}, + {Expression: "kdkecamatan"}, + {Expression: "kota"}, + {Expression: "kdprovinsi"}, + {Expression: "agama"}, + {Expression: "no_kartu"}, + {Expression: "noktp_baru"}, + {Expression: "namakelurahan"}, + {Expression: "namakecamatan"}, + {Expression: "namakota"}, + {Expression: "namaprovinsi"}, + }, + Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}}, + Limit: limit, + Offset: offset, + } + + // Add joins for relationships using the correct structure + query.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + // Add filters if any + if len(filters) > 0 { + query.Filters = append(query.Filters, queryUtils.FilterGroup{ + Filters: filters, + LogicOp: "AND", + }) + } + + // Execute query + pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Build response + meta := h.calculateMeta(limit, offset, total) + response := pasienModels.PasienGetResponse{ + Message: "Data pasien by location retrieved successfully", + Data: pasiens, + Meta: meta, + } + c.JSON(http.StatusOK, response) +} + +// CreatePasien godoc +// @Summary Create Pasien +// @Description Create a new pasien +// @Tags Pasien +// @Accept json +// @Produce json +// @Param request body pasienModels.PasienCreateRequest true "Pasien creation request" +// @Success 201 {object} pasienModels.PasienCreateResponse "Pasien created successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/ [post] +func (h *PasienHandler) CreatePasien(c *gin.Context) { + var req pasienModels.PasienCreateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + if err := validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Validate id must be unique + if req.ID != nil { + rule := validation.NewUniqueFieldRule( + "m_pasien", // Table name + "id", // Column that must be unique + queryUtils.DynamicFilter{ // Additional condition + Column: "status", + Operator: queryUtils.OpNotEqual, + Value: "deleted", + }, + ) + + // Prepare data from request for validation + dataToValidate := map[string]interface{}{ + "id": *req.ID, + } + + // Execute validation + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate id", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "id already exists", fmt.Errorf("duplicate id: %d", *req.ID), http.StatusConflict) + return + } + } + + // Validate nomr must be unique + if req.Nomr != nil && *req.Nomr != "" { + rule := validation.NewUniqueFieldRule( + "m_pasien", // Table name + "nomr", // Column that must be unique + queryUtils.DynamicFilter{ // Additional condition + Column: "status", + Operator: queryUtils.OpNotEqual, + Value: "deleted", + }, + ) + + // Prepare data from request for validation + dataToValidate := map[string]interface{}{ + "nomr": *req.Nomr, + } + + // Execute validation + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate nomr", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "nomr already exists", fmt.Errorf("duplicate nomr: %s", *req.Nomr), http.StatusConflict) + return + } + } + + // Validate no_kartu must be unique + if req.NoKartu != nil && *req.NoKartu != "" { + rule := validation.NewUniqueFieldRule( + "m_pasien", // Table name + "no_kartu", // Column that must be unique + queryUtils.DynamicFilter{ // Additional condition + Column: "status", + Operator: queryUtils.OpNotEqual, + Value: "deleted", + }, + ) + + // Prepare data from request for validation + dataToValidate := map[string]interface{}{ + "no_kartu": *req.NoKartu, + } + + // Execute validation + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate no_kartu", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "no_kartu already exists", fmt.Errorf("duplicate no_kartu: %s", *req.NoKartu), http.StatusConflict) + return + } + } + + data := queryUtils.InsertData{ + Columns: []string{ + "status", + "date_created", "date_updated", + "id", + "nomr", + "status", + "title", + "nama", + "tempat", + "tgllahir", + "jeniskelamin", + "alamat", + "kelurahan", + "kdkecamatan", + "kota", + "kdprovinsi", + "agama", + "no_kartu", + "noktp_baru", + }, + Values: []interface{}{ + req.Status, + time.Now(), time.Now(), + req.ID, + req.Nomr, + req.Status, + req.Title, + req.Nama, + req.Tempat, + req.Tgllahir, + req.Jeniskelamin, + req.Alamat, + req.Kelurahan, + req.Kdkecamatan, + req.Kota, + req.Kdprovinsi, + req.Agama, + req.NoKartu, + req.NoktpBaru, + }, + } + returningCols := []string{ + "status", + "sort", "user_created", "date_created", "user_updated", "date_updated", + "id", + "nomr", + "status", + "title", + "nama", + "tempat", + "tgllahir", + "jeniskelamin", + "alamat", + "kelurahan", + "kdkecamatan", + "kota", + "kdprovinsi", + "agama", + "no_kartu", + "noktp_baru", + } + + sql, args, err := h.queryBuilder.BuildInsertQuery("m_pasien", data, returningCols...) + if err != nil { + h.logAndRespondError(c, "Failed to build insert query", err, http.StatusInternalServerError) + return + } + + var dataPasien pasienModels.Pasien + err = dbConn.GetContext(ctx, &dataPasien, sql, args...) + if err != nil { + h.logAndRespondError(c, "Failed to create pasien", err, http.StatusInternalServerError) + return + } + + // Invalidate cache that might be affected + h.invalidateRelatedCache() + + response := pasienModels.PasienCreateResponse{Message: "Pasien berhasil dibuat", Data: &dataPasien} + c.JSON(http.StatusCreated, response) +} + +// GetPasien godoc +// @Summary Get Pasien List +// @Description Get list of pasien with pagination and filters +// @Tags Pasien +// @Accept json +// @Produce json +// @Param limit query int false "Limit (max 100)" default(10) +// @Param offset query int false "Offset" default(0) +// @Param status query string false "Filter by status" +// @Param search query string false "Search in multiple fields" +// @Success 200 {object} pasienModels.PasienGetResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/ [get] +func (h *PasienHandler) GetPasien(c *gin.Context) { + // Increase timeout for complex queries + ctx, cancel := context.WithTimeout(c.Request.Context(), 120*time.Second) + defer cancel() + + // Use the core fetchPasiensDynamic function for all data retrieval logic. + query := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "id"}, + {Expression: "nomr"}, + {Expression: "status"}, + {Expression: "title"}, + {Expression: "nama"}, + {Expression: "tempat"}, + {Expression: "tgllahir"}, + {Expression: "jeniskelamin"}, + {Expression: "alamat"}, + {Expression: "kelurahan"}, + {Expression: "kdkecamatan"}, + {Expression: "kota"}, + {Expression: "kdprovinsi"}, + {Expression: "agama"}, + {Expression: "no_kartu"}, + {Expression: "noktp_baru"}, + {Expression: "namakelurahan"}, + {Expression: "namakecamatan"}, + {Expression: "namakota"}, + {Expression: "namaprovinsi"}, + }, + Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}}, + } + + // Add joins for relationships using the correct structure + query.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + // Parse pagination + if limit, err := strconv.Atoi(c.DefaultQuery("limit", "10")); err == nil && limit > 0 && limit <= 100 { + query.Limit = limit + } + if offset, err := strconv.Atoi(c.DefaultQuery("offset", "0")); err == nil && offset >= 0 { + query.Offset = offset + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Parse simple filters + var filters []queryUtils.DynamicFilter + if status := c.Query("status"); status != "" && models.IsValidStatus(status) { + filters = append(filters, queryUtils.DynamicFilter{Column: "status", Operator: queryUtils.OpEqual, Value: status}) + } + + // Optimize query search with caching + search := c.Query("search") + var searchFilters []queryUtils.DynamicFilter + var cacheKey string + var useCache bool + + // Initialize searchFilters before using it in the cache hit section + if search != "" { + // Limit search length to prevent slow queries + if len(search) > 50 { + search = search[:50] + } + + // Generate cache key for search + cacheKey = fmt.Sprintf("pasien:search:%s:%d:%d", search, query.Limit, query.Offset) + + // Initialize searchFilters here + searchFilters = []queryUtils.DynamicFilter{ + {Column: "nomr", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "title", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "nama", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "no_kartu", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + } + + // Try to get from cache first + if cachedData, found := h.cache.Get(cacheKey); found { + logger.Info("Cache hit for search", map[string]interface{}{"search": search, "cache_key": cacheKey}) + + // Convert from interface{} to expected type + pasiens, ok := cachedData.([]pasienModels.Pasien) + if !ok { + logger.Error("Failed to convert cached data", map[string]interface{}{"cache_key": cacheKey}) + } else { + // If requested, get aggregation data + var aggregateData *models.AggregateData + if c.Query("include_summary") == "true" { + // Build full filter groups for aggregate data (including search filters) + fullFilterGroups := []queryUtils.FilterGroup{ + {Filters: searchFilters, LogicOp: "OR"}, + } + if len(filters) > 0 { + fullFilterGroups = append(fullFilterGroups, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"}) + } + aggregateData, err = h.getAggregateData(ctx, dbConn, fullFilterGroups) + if err != nil { + h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, len(pasiens)) + response := pasienModels.PasienGetResponse{ + Message: "Data pasien berhasil diambil (dari cache)", + Data: pasiens, + Meta: meta, + } + + if aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) + return + } + } + + // If not in cache, mark for saving after query + useCache = true + + // If there's search, create OR filter group + query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: searchFilters, LogicOp: "OR"}) + } + + // Add other filters (if any) as AND group + if len(filters) > 0 { + query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"}) + } + + pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Save search results to cache if there's a search parameter + if useCache && len(pasiens) > 0 { + h.cache.Set(cacheKey, pasiens, 15*time.Minute) // Cache for 15 minutes + logger.Info("Cached search results", map[string]interface{}{"search": search, "cache_key": cacheKey, "count": len(pasiens)}) + } + + // If requested, get aggregation data + var aggregateData *models.AggregateData + if c.Query("include_summary") == "true" { + aggregateData, err = h.getAggregateData(ctx, dbConn, query.Filters) + if err != nil { + h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := pasienModels.PasienGetResponse{ + Message: "Data pasien berhasil diambil", + Data: pasiens, + Meta: meta, + } + + if aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) +} + +// GetPasienByID godoc +// @Summary Get Pasien by ID +// @Description Get pasien by ID +// @Tags Pasien +// @Accept json +// @Produce json +// @Param id path string true "Pasien ID" +// @Success 200 {object} pasienModels.PasienGetByIDResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "Pasien not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/:id [get] +func (h *PasienHandler) GetPasienByID(c *gin.Context) { + id := c.Param("id") + if id == "" { + h.respondError(c, "Invalid ID format", fmt.Errorf("id cannot be empty"), http.StatusBadRequest) + return + } + + // Try to get from cache first + cacheKey := fmt.Sprintf("pasien:id:%s", id) + if cachedData, found := h.cache.Get(cacheKey); found { + logger.Info("Cache hit for id", map[string]interface{}{"id": id, "cache_key": cacheKey}) + + // Convert from interface{} to expected type + if cachedPasien, ok := cachedData.(pasienModels.Pasien); ok { + response := pasienModels.PasienGetByIDResponse{ + Message: "Pasien details retrieved successfully (dari cache)", + Data: &cachedPasien, + } + c.JSON(http.StatusOK, response) + return + } + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + dynamicQuery := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "id"}, + {Expression: "nomr"}, + {Expression: "status"}, + {Expression: "title"}, + {Expression: "nama"}, + {Expression: "tempat"}, + {Expression: "tgllahir"}, + {Expression: "jeniskelamin"}, + {Expression: "alamat"}, + {Expression: "kelurahan"}, + {Expression: "kdkecamatan"}, + {Expression: "kota"}, + {Expression: "kdprovinsi"}, + {Expression: "agama"}, + {Expression: "no_kartu"}, + {Expression: "noktp_baru"}, + {Expression: "namakelurahan"}, + {Expression: "namakecamatan"}, + {Expression: "namakota"}, + {Expression: "namaprovinsi"}, + }, + Filters: []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: id}, + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }}, + Limit: 1, + } + + // Add joins for relationships using the correct structure + dynamicQuery.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + var dataPasien pasienModels.Pasien + err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &dataPasien) + if err != nil { + if err == sql.ErrNoRows { + h.respondError(c, "Pasien not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to get pasien", err, http.StatusInternalServerError) + } + return + } + + // Save to cache + h.cache.Set(cacheKey, dataPasien, 30*time.Minute) // Cache for 30 minutes + + response := pasienModels.PasienGetByIDResponse{ + Message: "Pasien details retrieved successfully", + Data: &dataPasien, + } + c.JSON(http.StatusOK, response) +} + +// GetPasienByNomr godoc +// @Summary Get Pasien by Nomr +// @Description Get pasien by Nomr +// @Tags Pasien +// @Accept json +// @Produce json +// @Success 200 {object} pasienModels.PasienGetByNomrResponse "Success response" +// @Failure 400 {object} models.ErrorResponse "Bad request" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/nomr/:nomr [get] +func (h *PasienHandler) GetPasienByNomr(c *gin.Context) { + // Increase timeout for complex queries + ctx, cancel := context.WithTimeout(c.Request.Context(), 120*time.Second) + defer cancel() + + // Use the core fetchPasiensDynamic function for all data retrieval logic. + query := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "id"}, + {Expression: "nomr"}, + {Expression: "status"}, + {Expression: "title"}, + {Expression: "nama"}, + {Expression: "tempat"}, + {Expression: "tgllahir"}, + {Expression: "jeniskelamin"}, + {Expression: "alamat"}, + {Expression: "kelurahan"}, + {Expression: "kdkecamatan"}, + {Expression: "kota"}, + {Expression: "kdprovinsi"}, + {Expression: "agama"}, + {Expression: "no_kartu"}, + {Expression: "noktp_baru"}, + {Expression: "namakelurahan"}, + {Expression: "namakecamatan"}, + {Expression: "namakota"}, + {Expression: "namaprovinsi"}, + }, + Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}}, + } + + // Add joins for relationships using the correct structure + query.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + // Parse pagination + if limit, err := strconv.Atoi(c.DefaultQuery("limit", "10")); err == nil && limit > 0 && limit <= 100 { + query.Limit = limit + } + if offset, err := strconv.Atoi(c.DefaultQuery("offset", "0")); err == nil && offset >= 0 { + query.Offset = offset + } + + // Use GetSQLXDB to get database connection + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + + // Parse simple filters + var filters []queryUtils.DynamicFilter + if status := c.Query("status"); status != "" && models.IsValidStatus(status) { + filters = append(filters, queryUtils.DynamicFilter{Column: "status", Operator: queryUtils.OpEqual, Value: status}) + } + + // Optimize query search with caching + search := c.Query("search") + var searchFilters []queryUtils.DynamicFilter + var cacheKey string + var useCache bool + + // Initialize searchFilters before using it in the cache hit section + if search != "" { + // Limit search length to prevent slow queries + if len(search) > 50 { + search = search[:50] + } + + // Generate cache key for search + cacheKey = fmt.Sprintf("pasien:search:%s:%d:%d", search, query.Limit, query.Offset) + + // Initialize searchFilters here + searchFilters = []queryUtils.DynamicFilter{ + {Column: "nomr", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "title", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "nama", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + {Column: "no_kartu", Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + } + + // Try to get from cache first + if cachedData, found := h.cache.Get(cacheKey); found { + logger.Info("Cache hit for search", map[string]interface{}{"search": search, "cache_key": cacheKey}) + + // Convert from interface{} to expected type + pasiens, ok := cachedData.([]pasienModels.Pasien) + if !ok { + logger.Error("Failed to convert cached data", map[string]interface{}{"cache_key": cacheKey}) + } else { + // If requested, get aggregation data + var aggregateData *models.AggregateData + if c.Query("include_summary") == "true" { + // Build full filter groups for aggregate data (including search filters) + fullFilterGroups := []queryUtils.FilterGroup{ + {Filters: searchFilters, LogicOp: "OR"}, + } + if len(filters) > 0 { + fullFilterGroups = append(fullFilterGroups, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"}) + } + aggregateData, err = h.getAggregateData(ctx, dbConn, fullFilterGroups) + if err != nil { + h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, len(pasiens)) + response := pasienModels.PasienGetByNomrResponse{ + Message: "Data pasien berhasil diambil (dari cache)", + Data: pasiens, + Meta: meta, + } + + if aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) + return + } + } + + // If not in cache, mark for saving after query + useCache = true + + // If there's search, create OR filter group + query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: searchFilters, LogicOp: "OR"}) + } + + // Add other filters (if any) as AND group + if len(filters) > 0 { + query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: filters, LogicOp: "AND"}) + } + + pasiens, total, err := h.fetchPasiensDynamic(ctx, dbConn, query) + if err != nil { + h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) + return + } + + // Save search results to cache if there's a search parameter + if useCache && len(pasiens) > 0 { + h.cache.Set(cacheKey, pasiens, 15*time.Minute) // Cache for 15 minutes + logger.Info("Cached search results", map[string]interface{}{"search": search, "cache_key": cacheKey, "count": len(pasiens)}) + } + + // If requested, get aggregation data + var aggregateData *models.AggregateData + if c.Query("include_summary") == "true" { + aggregateData, err = h.getAggregateData(ctx, dbConn, query.Filters) + if err != nil { + h.logAndRespondError(c, "Failed to get aggregate data", err, http.StatusInternalServerError) + return + } + } + + // Build response + meta := h.calculateMeta(query.Limit, query.Offset, total) + response := pasienModels.PasienGetByNomrResponse{ + Message: "Data pasien berhasil diambil", + Data: pasiens, + Meta: meta, + } + + if aggregateData != nil { + response.Summary = aggregateData + } + + c.JSON(http.StatusOK, response) +} + +// UpdatePasien godoc +// @Summary Update Pasien +// @Description Update an existing pasien +// @Tags Pasien +// @Accept json +// @Produce json +// @Param id path string true "Pasien ID" +// @Param request body pasienModels.PasienUpdateRequest true "Pasien update request" +// @Success 200 {object} pasienModels.PasienUpdateResponse "Pasien updated successfully" +// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" +// @Failure 404 {object} models.ErrorResponse "Pasien not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/:nomr [put] +func (h *PasienHandler) UpdatePasien(c *gin.Context) { + id := c.Param("id") + if id == "" { + h.respondError(c, "Invalid ID format", fmt.Errorf("id cannot be empty"), http.StatusBadRequest) + return + } + var req pasienModels.PasienUpdateRequest + if err := c.ShouldBindJSON(&req); err != nil { + h.respondError(c, "Invalid request body", err, http.StatusBadRequest) + return + } + // Set the ID from path parameter to request + idInt, err := strconv.Atoi(id) + if err != nil { + h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) + return + } + idInt32 := int32(idInt) + req.ID = &idInt32 + if err := validate.Struct(&req); err != nil { + h.respondError(c, "Validation failed", err, http.StatusBadRequest) + return + } + + // Try to get old data for cache invalidation + var oldData pasienModels.Pasien + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err == nil { + ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second) + defer cancel() + + dynamicQuery := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{{Expression: "*"}}, + Filters: []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: id}, + }, + LogicOp: "AND", + }}, + Limit: 1, + } + + // Add joins for relationships using the correct structure + dynamicQuery.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &oldData) + if err != nil { + logger.Error("Failed to fetch old data for cache invalidation", map[string]interface{}{"error": err.Error(), "id": id}) + } + } + + // Use GetSQLXDB to get database connection + dbConn, err = h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Validate id must be unique, except for record with this id + if req.ID != nil { + rule := validation.ValidationRule{ + TableName: "m_pasien", + UniqueColumns: []string{"id"}, + Conditions: []queryUtils.DynamicFilter{ + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + ExcludeIDColumn: "id", // Exclude based on 'id' column + ExcludeIDValue: id, // ...with id value from parameter + } + + dataToValidate := map[string]interface{}{ + "id": *req.ID, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate id", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "id already exists", fmt.Errorf("duplicate id: %d", *req.ID), http.StatusConflict) + return + } + } + + // Validate nomr must be unique, except for record with this id + if req.Nomr != nil && *req.Nomr != "" { + rule := validation.ValidationRule{ + TableName: "m_pasien", + UniqueColumns: []string{"nomr"}, + Conditions: []queryUtils.DynamicFilter{ + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + ExcludeIDColumn: "id", // Exclude based on 'id' column + ExcludeIDValue: id, // ...with id value from parameter + } + + dataToValidate := map[string]interface{}{ + "nomr": *req.Nomr, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate nomr", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "nomr already exists", fmt.Errorf("duplicate nomr: %s", *req.Nomr), http.StatusConflict) + return + } + } + + // Validate no_kartu must be unique, except for record with this id + if req.NoKartu != nil && *req.NoKartu != "" { + rule := validation.ValidationRule{ + TableName: "m_pasien", + UniqueColumns: []string{"no_kartu"}, + Conditions: []queryUtils.DynamicFilter{ + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + ExcludeIDColumn: "id", // Exclude based on 'id' column + ExcludeIDValue: id, // ...with id value from parameter + } + + dataToValidate := map[string]interface{}{ + "no_kartu": *req.NoKartu, + } + + isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate) + if err != nil { + h.logAndRespondError(c, "Failed to validate no_kartu", err, http.StatusInternalServerError) + return + } + + if isDuplicate { + h.respondError(c, "no_kartu already exists", fmt.Errorf("duplicate no_kartu: %s", *req.NoKartu), http.StatusConflict) + return + } + } + + updateData := queryUtils.UpdateData{ + Columns: []string{ + "status", + "date_updated", + "id", + "nomr", + "status", + "title", + "nama", + "tempat", + "tgllahir", + "jeniskelamin", + "alamat", + "kelurahan", + "kdkecamatan", + "kota", + "kdprovinsi", + "agama", + "no_kartu", + "noktp_baru", + }, + Values: []interface{}{ + req.Status, + time.Now(), + req.ID, + req.Nomr, + req.Status, + req.Title, + req.Nama, + req.Tempat, + req.Tgllahir, + req.Jeniskelamin, + req.Alamat, + req.Kelurahan, + req.Kdkecamatan, + req.Kota, + req.Kdprovinsi, + req.Agama, + req.NoKartu, + req.NoktpBaru, + }, + } + filters := []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: req.ID}, + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }} + returningCols := []string{ + "status", + "sort", "user_created", "date_created", "user_updated", "date_updated", + "id", + "nomr", + "status", + "title", + "nama", + "tempat", + "tgllahir", + "jeniskelamin", + "alamat", + "kelurahan", + "kdkecamatan", + "kota", + "kdprovinsi", + "agama", + "no_kartu", + "noktp_baru", + } + + sql, args, err := h.queryBuilder.BuildUpdateQuery("m_pasien", updateData, filters, returningCols...) + if err != nil { + h.logAndRespondError(c, "Failed to build update query", err, http.StatusInternalServerError) + return + } + + var dataPasien pasienModels.Pasien + err = dbConn.GetContext(ctx, &dataPasien, sql, args...) + if err != nil { + if err.Error() == "sql: no rows in result set" { + h.respondError(c, "Pasien not found", err, http.StatusNotFound) + } else { + h.logAndRespondError(c, "Failed to update pasien", err, http.StatusInternalServerError) + } + return + } + + // Invalidate cache that might be affected + // Invalidate cache for id that was updated + cacheKey := fmt.Sprintf("pasien:id:%s", id) + h.cache.Delete(cacheKey) + + // Invalidate cache for old and new data + if oldData.ID != 0 { + h.invalidateRelatedCache() + } + h.invalidateRelatedCache() + + response := pasienModels.PasienUpdateResponse{Message: "Pasien berhasil diperbarui", Data: &dataPasien} + c.JSON(http.StatusOK, response) +} + +// DeletePasien godoc +// @Summary Delete Pasien +// @Description Delete a pasien +// @Tags Pasien +// @Accept json +// @Produce json +// @Param id path string true "Pasien ID" +// @Success 200 {object} pasienModels.PasienDeleteResponse "Pasien deleted successfully" +// @Failure 400 {object} models.ErrorResponse "Invalid ID format" +// @Failure 404 {object} models.ErrorResponse "Pasien not found" +// @Failure 500 {object} models.ErrorResponse "Internal server error" +// @Router /api/v1/pasien/:nomr [delete] +func (h *PasienHandler) DeletePasien(c *gin.Context) { + id := c.Param("id") + if id == "" { + h.respondError(c, "Invalid ID format", fmt.Errorf("id cannot be empty"), http.StatusBadRequest) + return + } + + // Try to get data for cache invalidation + var dataToDelete pasienModels.Pasien + dbConn, err := h.db.GetSQLXDB("postgres_simrs") + if err == nil { + ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second) + defer cancel() + + dynamicQuery := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{{Expression: "*"}}, + Filters: []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: id}, + }, + LogicOp: "AND", + }}, + Limit: 1, + } + + // Add joins for relationships using the correct structure + dynamicQuery.Joins = []queryUtils.Join{ + { + Type: "LEFT", + Table: "m_provinsi", + Alias: "m_provinsi", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdprovinsi", Operator: queryUtils.OpEqual, Value: "m_provinsi.idprovinsi"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kota", + Alias: "m_kota", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kota", Operator: queryUtils.OpEqual, Value: "m_kota.idkota"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kecamatan", + Alias: "m_kecamatan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kdkecamatan", Operator: queryUtils.OpEqual, Value: "m_kecamatan.idkecamatan"}, + }, + }, + }, + { + Type: "LEFT", + Table: "m_kelurahan", + Alias: "m_kelurahan", + OnConditions: queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "m_pasien.kelurahan", Operator: queryUtils.OpEqual, Value: "m_kelurahan.idkelurahan"}, + }, + }, + }, + } + + err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &dataToDelete) + if err != nil { + logger.Error("Failed to fetch data for cache invalidation", map[string]interface{}{"error": err.Error(), "id": id}) + } + } + + // Use GetSQLXDB to get database connection + dbConn, err = h.db.GetSQLXDB("postgres_simrs") + if err != nil { + h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) + return + } + ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) + defer cancel() + + // Use ExecuteUpdate for soft delete by changing status + updateData := queryUtils.UpdateData{ + Columns: []string{"status", "date_updated"}, + Values: []interface{}{"deleted", time.Now()}, + } + filters := []queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: "id", Operator: queryUtils.OpEqual, Value: id}, + {Column: "status", Operator: queryUtils.OpNotEqual, Value: "deleted"}, + }, + LogicOp: "AND", + }} + + // Use ExecuteUpdate instead of ExecuteDelete + result, err := h.queryBuilder.ExecuteUpdate(ctx, dbConn, "m_pasien", updateData, filters) + if err != nil { + h.logAndRespondError(c, "Failed to delete pasien", err, http.StatusInternalServerError) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + h.logAndRespondError(c, "Failed to get affected rows", err, http.StatusInternalServerError) + return + } + if rowsAffected == 0 { + h.respondError(c, "Pasien not found", sql.ErrNoRows, http.StatusNotFound) + return + } + + // Invalidate cache that might be affected + // Invalidate cache for id that was deleted + cacheKey := fmt.Sprintf("pasien:id:%s", id) + h.cache.Delete(cacheKey) + + // Invalidate cache for data that was deleted + if dataToDelete.ID != 0 { + h.invalidateRelatedCache() + } + + response := pasienModels.PasienDeleteResponse{Message: "Pasien berhasil dihapus", ID: id} + c.JSON(http.StatusOK, response) +} + +// ============================================================================= +// HELPER FUNCTIONS +// ============================================================================= + +// invalidateRelatedCache invalidates cache that might be affected by data changes +func (h *PasienHandler) invalidateRelatedCache() { + // Invalidate cache for search that might be affected + h.cache.DeleteByPrefix("pasien:search:") + h.cache.DeleteByPrefix("pasien:dynamic:") + h.cache.DeleteByPrefix("pasien:stats:") + h.cache.DeleteByPrefix("pasien:id:") +} + +// fetchPasiensDynamic executes dynamic query with timeout handling +func (h *PasienHandler) fetchPasiensDynamic(ctx context.Context, dbConn *sqlx.DB, query queryUtils.DynamicQuery) ([]pasienModels.Pasien, int, error) { + logger.Info("Starting fetchPasiensDynamic", map[string]interface{}{ + "limit": query.Limit, + "offset": query.Offset, + "from": query.From, + }) + + var total int + var pasiens []pasienModels.Pasien + + // Check if query has search + hasSearch := false + for _, filterGroup := range query.Filters { + for _, filter := range filterGroup.Filters { + if filter.Operator == queryUtils.OpILike { + hasSearch = true + break + } + } + if hasSearch { + break + } + } + + logger.Info("Query analysis", map[string]interface{}{ + "hasSearch": hasSearch, + "totalFilters": len(query.Filters), + }) + + // Optimize to prevent timeout on search queries + // Use shorter context for search and count queries + queryCtx, queryCancel := context.WithTimeout(ctx, 30*time.Second) + defer queryCancel() + + // For search queries, limit maximum to prevent timeout + if hasSearch { + search := getSearchTerm(query) + logger.Info("Executing search query with timeout context", map[string]interface{}{"search_term": search}) + + // Limit maximum search limit to prevent timeout + maxSearchLimit := 50 + if query.Limit > maxSearchLimit { + query.Limit = maxSearchLimit + logger.Info("Reduced search limit to prevent timeout", map[string]interface{}{ + "original_limit": query.Limit, + "new_limit": maxSearchLimit, + }) + } + + // Execute search query + err := h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &pasiens) + if err != nil { + // Check if it's a PostgreSQL statement timeout error + if pqErr, ok := err.(*pq.Error); ok && pqErr.Code == "57014" { + logger.Warn("Search query timed out, trying fallback strategy", map[string]interface{}{ + "search_term": search, + }) + + // Fallback: Search only in the most relevant column + // We need to rebuild the filters for the fallback + var fallbackFilters []queryUtils.FilterGroup + // Add other non-search filters back (e.g., status) + for _, fg := range query.Filters { + if fg.LogicOp == "AND" { + fallbackFilters = append(fallbackFilters, fg) + } + } + // Add the single, more specific search filter + searchableColumns := []string{ + "nomr", + "title", + "nama", + "no_kartu", + } + if len(searchableColumns) > 0 { + fallbackFilters = append([]queryUtils.FilterGroup{{ + Filters: []queryUtils.DynamicFilter{ + {Column: searchableColumns[0], Operator: queryUtils.OpILike, Value: "%" + search + "%"}, + }, + LogicOp: "AND", + }}, fallbackFilters...) + + fallbackQuery := query + fallbackQuery.Filters = fallbackFilters + + // Execute the fallback query with a shorter timeout + fallbackCtx, fallbackCancel := context.WithTimeout(ctx, 10*time.Second) + defer fallbackCancel() + + err = h.queryBuilder.ExecuteQuery(fallbackCtx, dbConn, fallbackQuery, &pasiens) + if err != nil { + logger.Error("Fallback search query also failed", map[string]interface{}{ + "error": err.Error(), + "query": fallbackQuery, + }) + // Return a more user-friendly error + return nil, 0, fmt.Errorf("search timed out. The search term '%s' is too general. Please try a more specific term", search) + } + logger.Info("Fallback search query successful", map[string]interface{}{ + "recordsFetched": len(pasiens), + }) + } + } else { + // It's a different error, handle it as before + logger.Error("Failed to execute search query", map[string]interface{}{ + "error": err.Error(), + "query": query, + }) + return nil, 0, fmt.Errorf("failed to execute search query: %w", err) + } + } + + // Estimate total for search query (don't count exact for performance) + total = len(pasiens) + if len(pasiens) == query.Limit { + // If reached limit, estimate there are more data + total = query.Offset + query.Limit + 100 + } else { + total = query.Offset + len(pasiens) + } + } else { + logger.Info("Executing regular query without search") + + // For queries without search, count total with shorter timeout + countCtx, countCancel := context.WithTimeout(ctx, 15*time.Second) + defer countCancel() + + count, err := h.queryBuilder.ExecuteCount(countCtx, dbConn, query) + if err != nil { + // If count failed, fallback to estimation or return error + logger.Warn("Failed to get exact count, using estimation", map[string]interface{}{"error": err.Error()}) + // For queries without search, we can estimate based on limit + total = query.Offset + query.Limit + 100 // Conservative estimation + } else { + total = int(count) + } + + logger.Info("Count query successful", map[string]interface{}{ + "count": total, + }) + + // Execute main data query + err = h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &pasiens) + if err != nil { + logger.Error("Failed to execute main query", map[string]interface{}{ + "error": err.Error(), + "query": query, + }) + return nil, 0, fmt.Errorf("failed to execute main query: %w", err) + } + + logger.Info("Data query successful", map[string]interface{}{ + "recordsFetched": len(pasiens), + }) + } + + logger.Info("Query execution completed", map[string]interface{}{ + "totalRecords": total, + "returnedRecords": len(pasiens), + "hasSearch": hasSearch, + }) + + return pasiens, total, nil +} + +// getSearchTerm extracts the search term from a DynamicQuery object. +// It assumes the search is the first filter group with an "OR" logic operator. +func getSearchTerm(query queryUtils.DynamicQuery) string { + for _, filterGroup := range query.Filters { + if filterGroup.LogicOp == "OR" && len(filterGroup.Filters) > 0 { + if valueStr, ok := filterGroup.Filters[0].Value.(string); ok { + return strings.Trim(valueStr, "%") + } + } + } + return "" +} + +// getAggregateData gets comprehensive statistics about pasien data +func (h *PasienHandler) getAggregateData(ctx context.Context, dbConn *sqlx.DB, filterGroups []queryUtils.FilterGroup) (*models.AggregateData, error) { + aggregate := &models.AggregateData{ + ByStatus: make(map[string]int), + } + + var wg sync.WaitGroup + var mu sync.Mutex + errChan := make(chan error, 4) + + // 1. Count by status + wg.Add(1) + go func() { + defer wg.Done() + // Use context with shorter timeout + queryCtx, queryCancel := context.WithTimeout(ctx, 20*time.Second) + defer queryCancel() + + query := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "status"}, + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: filterGroups, + GroupBy: []string{"status"}, + } + var results []struct { + Status string `db:"status"` + Count int `db:"count"` + } + err := h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &results) + if err != nil { + errChan <- fmt.Errorf("status query failed: %w", err) + return + } + mu.Lock() + for _, result := range results { + aggregate.ByStatus[result.Status] = result.Count + switch result.Status { + case "active": + aggregate.TotalActive = result.Count + case "draft": + aggregate.TotalDraft = result.Count + case "inactive": + aggregate.TotalInactive = result.Count + } + } + mu.Unlock() + }() + + // 4. Get last updated and today's stats + wg.Add(1) + go func() { + defer wg.Done() + // Use context with shorter timeout + queryCtx, queryCancel := context.WithTimeout(ctx, 20*time.Second) + defer queryCancel() + + // Last updated + query1 := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{{Expression: "MAX(date_updated)"}}, + Filters: filterGroups, + } + var lastUpdated sql.NullTime + err := h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, query1, &lastUpdated) + if err != nil { + errChan <- fmt.Errorf("last updated query failed: %w", err) + return + } + + // Using QueryBuilder for today's statistics + today := time.Now().Format("2006-01-02") + + // Query for created_today + createdTodayQuery := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: append(filterGroups, queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "DATE(date_created)", Operator: queryUtils.OpEqual, Value: today}, + }, + LogicOp: "AND", + }), + } + + var createdToday int + err = h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, createdTodayQuery, &createdToday) + if err != nil { + errChan <- fmt.Errorf("created today query failed: %w", err) + return + } + + // Query for updated_today (updated today but not created today) + updatedTodayQuery := queryUtils.DynamicQuery{ + From: "m_pasien", + Fields: []queryUtils.SelectField{ + {Expression: "COUNT(*)", Alias: "count"}, + }, + Filters: append(filterGroups, queryUtils.FilterGroup{ + Filters: []queryUtils.DynamicFilter{ + {Column: "DATE(date_updated)", Operator: queryUtils.OpEqual, Value: today}, + {Column: "DATE(date_created)", Operator: queryUtils.OpNotEqual, Value: today}, + }, + LogicOp: "AND", + }), + } + + var updatedToday int + err = h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, updatedTodayQuery, &updatedToday) + if err != nil { + errChan <- fmt.Errorf("updated today query failed: %w", err) + return + } + + mu.Lock() + if lastUpdated.Valid { + aggregate.LastUpdated = &lastUpdated.Time + } + aggregate.CreatedToday = createdToday + aggregate.UpdatedToday = updatedToday + mu.Unlock() + }() + + wg.Wait() + close(errChan) + + for err := range errChan { + if err != nil { + return nil, err + } + } + + return aggregate, nil +} + +// logAndRespondError logs an error and sends a JSON response +func (h *PasienHandler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { + logger.Error(message, map[string]interface{}{"error": err.Error(), "status_code": statusCode}) + h.respondError(c, message, err, statusCode) +} + +// respondError sends a standardized JSON error response +func (h *PasienHandler) respondError(c *gin.Context, message string, err error, statusCode int) { + errorMessage := message + if gin.Mode() == gin.ReleaseMode { + errorMessage = "Internal server error" + } + c.JSON(statusCode, models.ErrorResponse{Error: errorMessage, Code: statusCode, Message: err.Error(), Timestamp: time.Now()}) +} + +// calculateMeta creates pagination metadata +func (h *PasienHandler) calculateMeta(limit, offset, total int) models.MetaResponse { + totalPages, currentPage := 0, 1 + if limit > 0 { + totalPages = (total + limit - 1) / limit + currentPage = (offset / limit) + 1 + } + return models.MetaResponse{ + Limit: limit, Offset: offset, Total: total, TotalPages: totalPages, + CurrentPage: currentPage, HasNext: offset+limit < total, HasPrev: offset > 0, + } +} diff --git a/internal/handlers/retribusi/retribusi.go b/internal/handlers/retribusi/retribusi.go index 667a5a2..db5fb1e 100644 --- a/internal/handlers/retribusi/retribusi.go +++ b/internal/handlers/retribusi/retribusi.go @@ -172,6 +172,7 @@ func NewRetribusiHandler() *RetribusiHandler { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusis [get] func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) // CHANGE: Increase timeout for complex queries ctx, cancel := context.WithTimeout(c.Request.Context(), 120*time.Second) defer cancel() @@ -179,9 +180,31 @@ func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { // CHANGE: Use the core fetchRetribusisDynamic function for all data retrieval logic. // We only need to build DynamicQuery from simple parameters. query := queryUtils.DynamicQuery{ - From: "data_retribusi", - Fields: []queryUtils.SelectField{{Expression: "*"}}, - Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}}, + From: "data_retribusi", + Fields: []queryUtils.SelectField{ + {Expression: "id"}, + {Expression: "status"}, + {Expression: "sort"}, + {Expression: "user_created"}, + {Expression: "date_created"}, + {Expression: "user_updated"}, + {Expression: "date_updated"}, + {Expression: "Jenis"}, + {Expression: "Pelayanan"}, + {Expression: "Dinas"}, + {Expression: "Kelompok_obyek"}, + {Expression: "Kode_tarif"}, + {Expression: "Tarif"}, + {Expression: "Satuan"}, + {Expression: "Tarif_overtime"}, + {Expression: "Satuan_overtime"}, + {Expression: "Rekening_pokok"}, + {Expression: "Rekening_denda"}, + {Expression: "Uraian_1"}, + {Expression: "Uraian_2"}, + {Expression: "Uraian_3"}, + }, + Sort: []queryUtils.SortField{{Column: "date_created", Order: "DESC"}}, } // Parse pagination @@ -343,6 +366,7 @@ func (h *RetribusiHandler) GetRetribusi(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusi/{id} [get] func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) id := c.Param("id") if _, err := uuid.Parse(id); err != nil { h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) @@ -375,8 +399,30 @@ func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { defer cancel() dynamicQuery := queryUtils.DynamicQuery{ - From: "data_retribusi", - Fields: []queryUtils.SelectField{{Expression: "*"}}, + From: "data_retribusi", + Fields: []queryUtils.SelectField{ + {Expression: "id"}, + {Expression: "status"}, + {Expression: "sort"}, + {Expression: "user_created"}, + {Expression: "date_created"}, + {Expression: "user_updated"}, + {Expression: "date_updated"}, + {Expression: "Jenis"}, + {Expression: "Pelayanan"}, + {Expression: "Dinas"}, + {Expression: "Kelompok_obyek"}, + {Expression: "Kode_tarif"}, + {Expression: "Tarif"}, + {Expression: "Satuan"}, + {Expression: "Tarif_overtime"}, + {Expression: "Satuan_overtime"}, + {Expression: "Rekening_pokok"}, + {Expression: "Rekening_denda"}, + {Expression: "Uraian_1"}, + {Expression: "Uraian_2"}, + {Expression: "Uraian_3"}, + }, Filters: []queryUtils.FilterGroup{{ Filters: []queryUtils.DynamicFilter{ {Column: "id", Operator: queryUtils.OpEqual, Value: id}, @@ -424,6 +470,7 @@ func (h *RetribusiHandler) GetRetribusiByID(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusis/dynamic [get] func (h *RetribusiHandler) GetRetribusiDynamic(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) parser := queryUtils.NewQueryParser().SetLimits(10, 100) dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query(), "data_retribusi") if err != nil { @@ -495,6 +542,7 @@ func (h *RetribusiHandler) GetRetribusiDynamic(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusis [post] func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) var req retribusi.RetribusiCreateRequest if err := c.ShouldBindJSON(&req); err != nil { h.respondError(c, "Invalid request body", err, http.StatusBadRequest) @@ -599,6 +647,7 @@ func (h *RetribusiHandler) CreateRetribusi(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusi/{id} [put] func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) id := c.Param("id") if _, err := uuid.Parse(id); err != nil { h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) @@ -748,6 +797,7 @@ func (h *RetribusiHandler) UpdateRetribusi(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusi/{id} [delete] func (h *RetribusiHandler) DeleteRetribusi(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) id := c.Param("id") if _, err := uuid.Parse(id); err != nil { h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) @@ -843,6 +893,7 @@ func (h *RetribusiHandler) DeleteRetribusi(c *gin.Context) { // @Failure 500 {object} models.ErrorResponse "Internal server error" // @Router /api/v1/retribusis/stats [get] func (h *RetribusiHandler) GetRetribusiStats(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) // CHANGE: Try ambil dari cache terlebih dahulu cacheKey := fmt.Sprintf("retribusi:stats:%s", c.Query("status")) if cachedData, found := h.cache.Get(cacheKey); found { @@ -898,6 +949,19 @@ func (h *RetribusiHandler) GetRetribusiStats(c *gin.Context) { }) } +// GetWelcome godoc +// @Summary Get welcome message +// @Description Returns a welcome message and logs the request +// @Tags Retribusi +// @Accept json +// @Produce json +// @Success 200 {object} map[string]string "Welcome message" +// @Router /api/v1/retribusis/welcome [get] +func (h *RetribusiHandler) GetWelcome(c *gin.Context) { + logger.Info("Request received", map[string]interface{}{"method": c.Request.Method, "path": c.Request.URL.Path}) + c.JSON(http.StatusOK, gin.H{"message": "Welcome to the Retribusi API Service!"}) +} + // ============================================================================= // HELPER FUNCTIONS // ============================================================================= diff --git a/internal/models/pasien/pasien.go b/internal/models/pasien/pasien.go new file mode 100644 index 0000000..23021f7 --- /dev/null +++ b/internal/models/pasien/pasien.go @@ -0,0 +1,397 @@ +package pasien + +import ( + "api-service/internal/models" + "database/sql" + "encoding/json" + "time" +) + +// Pasien represents the data structure for the m_pasien table +// with proper null handling and optimized JSON marshaling +type Pasien struct { + ID int32 `json:"id" db:"id"` + Nomr sql.NullString `json:"nomr,omitempty" db:"nomr"` + Status sql.NullString `json:"status,omitempty" db:"status"` + Title sql.NullString `json:"title,omitempty" db:"title"` + Nama sql.NullString `json:"nama,omitempty" db:"nama"` + Tempat sql.NullString `json:"tempat,omitempty" db:"tempat"` + Tgllahir sql.NullTime `json:"tgllahir,omitempty" db:"tgllahir"` + Jeniskelamin sql.NullString `json:"jeniskelamin,omitempty" db:"jeniskelamin"` + Alamat sql.NullString `json:"alamat,omitempty" db:"alamat"` + Kelurahan sql.NullInt64 `json:"kelurahan,omitempty" db:"kelurahan"` + Kdkecamatan sql.NullInt32 `json:"kdkecamatan,omitempty" db:"kdkecamatan"` + Kota sql.NullInt32 `json:"kota,omitempty" db:"kota"` + Kdprovinsi sql.NullInt32 `json:"kdprovinsi,omitempty" db:"kdprovinsi"` + Agama sql.NullInt32 `json:"agama,omitempty" db:"agama"` + NoKartu sql.NullString `json:"noKartu,omitempty" db:"no_kartu"` + NoktpBaru sql.NullString `json:"noktpBaru,omitempty" db:"noktp_baru"` + CreatedAt sql.NullTime `json:"createdAt,omitempty" db:"created_at"` + UpdatedAt sql.NullTime `json:"updatedAt,omitempty" db:"updated_at"` + Idprovinsi int32 `json:"idprovinsi" db:"idprovinsi"` + Namaprovinsi sql.NullString `json:"namaprovinsi,omitempty" db:"namaprovinsi"` + Idkota int32 `json:"idkota" db:"idkota"` + Namakota sql.NullString `json:"namakota,omitempty" db:"namakota"` + Idkecamatan int64 `json:"idkecamatan" db:"idkecamatan"` + Namakecamatan sql.NullString `json:"namakecamatan,omitempty" db:"namakecamatan"` + Idkelurahan int64 `json:"idkelurahan" db:"idkelurahan"` + Namakelurahan sql.NullString `json:"namakelurahan,omitempty" db:"namakelurahan"` +} + +// Custom JSON marshaling for Pasien so NULL values don't appear in response +func (r Pasien) MarshalJSON() ([]byte, error) { + type Alias Pasien + aux := &struct { + *Alias + Nomr *string `json:"nomr,omitempty"` + Status *string `json:"status,omitempty"` + Title *string `json:"title,omitempty"` + Nama *string `json:"nama,omitempty"` + Tempat *string `json:"tempat,omitempty"` + Tgllahir *time.Time `json:"tgllahir,omitempty"` + Jeniskelamin *string `json:"jeniskelamin,omitempty"` + Alamat *string `json:"alamat,omitempty"` + Kelurahan *int64 `json:"kelurahan,omitempty"` + Kdkecamatan *int32 `json:"kdkecamatan,omitempty"` + Kota *int32 `json:"kota,omitempty"` + Kdprovinsi *int32 `json:"kdprovinsi,omitempty"` + Agama *int32 `json:"agama,omitempty"` + NoKartu *string `json:"noKartu,omitempty"` + NoktpBaru *string `json:"noktpBaru,omitempty"` + CreatedAt *time.Time `json:"createdAt,omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty"` + Namaprovinsi *string `json:"namaprovinsi,omitempty"` + Namakota *string `json:"namakota,omitempty"` + Namakecamatan *string `json:"namakecamatan,omitempty"` + Namakelurahan *string `json:"namakelurahan,omitempty"` + }{ + Alias: (*Alias)(&r), + } + + if r.Nomr.Valid { + aux.Nomr = &r.Nomr.String + } + if r.Status.Valid { + aux.Status = &r.Status.String + } + if r.Title.Valid { + aux.Title = &r.Title.String + } + if r.Nama.Valid { + aux.Nama = &r.Nama.String + } + if r.Tempat.Valid { + aux.Tempat = &r.Tempat.String + } + if r.Tgllahir.Valid { + aux.Tgllahir = &r.Tgllahir.Time + } + if r.Jeniskelamin.Valid { + aux.Jeniskelamin = &r.Jeniskelamin.String + } + if r.Alamat.Valid { + aux.Alamat = &r.Alamat.String + } + if r.Kelurahan.Valid { + aux.Kelurahan = &r.Kelurahan.Int64 + } + if r.Kdkecamatan.Valid { + aux.Kdkecamatan = &r.Kdkecamatan.Int32 + } + if r.Kota.Valid { + aux.Kota = &r.Kota.Int32 + } + if r.Kdprovinsi.Valid { + aux.Kdprovinsi = &r.Kdprovinsi.Int32 + } + if r.Agama.Valid { + aux.Agama = &r.Agama.Int32 + } + if r.NoKartu.Valid { + aux.NoKartu = &r.NoKartu.String + } + if r.NoktpBaru.Valid { + aux.NoktpBaru = &r.NoktpBaru.String + } + if r.CreatedAt.Valid { + aux.CreatedAt = &r.CreatedAt.Time + } + if r.UpdatedAt.Valid { + aux.UpdatedAt = &r.UpdatedAt.Time + } + if r.Namaprovinsi.Valid { + aux.Namaprovinsi = &r.Namaprovinsi.String + } + if r.Namakota.Valid { + aux.Namakota = &r.Namakota.String + } + if r.Namakecamatan.Valid { + aux.Namakecamatan = &r.Namakecamatan.String + } + if r.Namakelurahan.Valid { + aux.Namakelurahan = &r.Namakelurahan.String + } + return json.Marshal(aux) +} + +// Helper method to safely get Nomr +func (r *Pasien) GetNomr() string { + if r.Nomr.Valid { + return r.Nomr.String + } + return "" +} + +// Helper method to safely get Status +func (r *Pasien) GetStatus() string { + if r.Status.Valid { + return r.Status.String + } + return "" +} + +// Helper method to safely get Title +func (r *Pasien) GetTitle() string { + if r.Title.Valid { + return r.Title.String + } + return "" +} + +// Helper method to safely get Nama +func (r *Pasien) GetNama() string { + if r.Nama.Valid { + return r.Nama.String + } + return "" +} + +// Helper method to safely get Tempat +func (r *Pasien) GetTempat() string { + if r.Tempat.Valid { + return r.Tempat.String + } + return "" +} + +// Helper method to safely get Tgllahir +func (r *Pasien) GetTgllahir() time.Time { + if r.Tgllahir.Valid { + return r.Tgllahir.Time + } + return time.Time{} +} + +// Helper method to safely get Jeniskelamin +func (r *Pasien) GetJeniskelamin() string { + if r.Jeniskelamin.Valid { + return r.Jeniskelamin.String + } + return "" +} + +// Helper method to safely get Alamat +func (r *Pasien) GetAlamat() string { + if r.Alamat.Valid { + return r.Alamat.String + } + return "" +} + +// Helper method to safely get Kelurahan +func (r *Pasien) GetKelurahan() int64 { + if r.Kelurahan.Valid { + return r.Kelurahan.Int64 + } + return 0 +} + +// Helper method to safely get Kdkecamatan +func (r *Pasien) GetKdkecamatan() int32 { + if r.Kdkecamatan.Valid { + return r.Kdkecamatan.Int32 + } + return 0 +} + +// Helper method to safely get Kota +func (r *Pasien) GetKota() int32 { + if r.Kota.Valid { + return r.Kota.Int32 + } + return 0 +} + +// Helper method to safely get Kdprovinsi +func (r *Pasien) GetKdprovinsi() int32 { + if r.Kdprovinsi.Valid { + return r.Kdprovinsi.Int32 + } + return 0 +} + +// Helper method to safely get Agama +func (r *Pasien) GetAgama() int32 { + if r.Agama.Valid { + return r.Agama.Int32 + } + return 0 +} + +// Helper method to safely get NoKartu +func (r *Pasien) GetNoKartu() string { + if r.NoKartu.Valid { + return r.NoKartu.String + } + return "" +} + +// Helper method to safely get NoktpBaru +func (r *Pasien) GetNoktpBaru() string { + if r.NoktpBaru.Valid { + return r.NoktpBaru.String + } + return "" +} + +// Helper method to safely get CreatedAt +func (r *Pasien) GetCreatedAt() time.Time { + if r.CreatedAt.Valid { + return r.CreatedAt.Time + } + return time.Time{} +} + +// Helper method to safely get UpdatedAt +func (r *Pasien) GetUpdatedAt() time.Time { + if r.UpdatedAt.Valid { + return r.UpdatedAt.Time + } + return time.Time{} +} + +// Helper method to safely get Namaprovinsi +func (r *Pasien) GetNamaprovinsi() string { + if r.Namaprovinsi.Valid { + return r.Namaprovinsi.String + } + return "" +} + +// Helper method to safely get Namakota +func (r *Pasien) GetNamakota() string { + if r.Namakota.Valid { + return r.Namakota.String + } + return "" +} + +// Helper method to safely get Namakecamatan +func (r *Pasien) GetNamakecamatan() string { + if r.Namakecamatan.Valid { + return r.Namakecamatan.String + } + return "" +} + +// Helper method to safely get Namakelurahan +func (r *Pasien) GetNamakelurahan() string { + if r.Namakelurahan.Valid { + return r.Namakelurahan.String + } + return "" +} + +// Response struct for delete +type PasienDeleteResponse struct { + Message string `json:"message"` + ID string `json:"id"` +} + +// Response struct for by age +type PasienAgeStatsResponse struct { + Message string `json:"message"` + Data map[string]interface{} `json:"data"` +} + +// Response struct for create +type PasienCreateResponse struct { + Message string `json:"message"` + Data *Pasien `json:"data"` +} + +// Request struct for create +type PasienCreateRequest struct { + Status *string `json:"status" validate:"required,oneof=draft active inactive"` + ID *int32 `json:"id"` + Nomr *string `json:"nomr"` + Title *string `json:"title" validate:"required,min=1,max=100"` + Nama *string `json:"nama" validate:"required,min=1,max=100"` + Tempat *string `json:"tempat"` + Tgllahir *time.Time `json:"tgllahir"` + Jeniskelamin *string `json:"jeniskelamin" validate:"oneof=L P"` + Alamat *string `json:"alamat"` + Kelurahan *int64 `json:"kelurahan"` + Kdkecamatan *int32 `json:"kdkecamatan"` + Kota *int32 `json:"kota"` + Kdprovinsi *int32 `json:"kdprovinsi"` + Agama *int32 `json:"agama"` + NoKartu *string `json:"noKartu"` + NoktpBaru *string `json:"noktpBaru"` +} + +// Response struct for GET list +type PasienGetResponse struct { + Message string `json:"message"` + Data []Pasien `json:"data"` + Meta models.MetaResponse `json:"meta"` + Summary *models.AggregateData `json:"summary,omitempty"` +} + +// Response struct for update +type PasienUpdateResponse struct { + Message string `json:"message"` + Data *Pasien `json:"data"` +} + +// Update request +type PasienUpdateRequest struct { + ID *int32 `json:"-" validate:"required"` + Status *string `json:"status" validate:"required,oneof=draft active inactive"` + Nomr *string `json:"nomr"` + Title *string `json:"title" validate:"omitempty,min=1,max=255"` + Nama *string `json:"nama" validate:"required,min=1,max=100"` + Tempat *string `json:"tempat"` + Tgllahir *time.Time `json:"tgllahir"` + Jeniskelamin *string `json:"jeniskelamin" validate:"oneof=L P"` + Alamat *string `json:"alamat"` + Kelurahan *int64 `json:"kelurahan"` + Kdkecamatan *int32 `json:"kdkecamatan"` + Kota *int32 `json:"kota"` + Kdprovinsi *int32 `json:"kdprovinsi"` + Agama *int32 `json:"agama"` + NoKartu *string `json:"noKartu"` + NoktpBaru *string `json:"noktpBaru"` +} + +// Response struct for get by ID +type PasienGetByIDResponse struct { + Message string `json:"message"` + Data *Pasien `json:"data"` +} + +// Response struct for get by nomr +type PasienGetByNomrResponse struct { + Message string `json:"message"` + Data []Pasien `json:"data"` + Meta models.MetaResponse `json:"meta"` + Summary *models.AggregateData `json:"summary,omitempty"` +} + +// Filter struct for query parameters +type PasienFilter struct { + Search *string `json:"search,omitempty" form:"search"` + DateFrom *time.Time `json:"date_from,omitempty" form:"date_from"` + DateTo *time.Time `json:"date_to,omitempty" form:"date_to"` + Status *string `json:"status,omitempty" form:"status"` +} diff --git a/internal/routes/v1/routes.go b/internal/routes/v1/routes.go index 9cc5abe..9dab660 100644 --- a/internal/routes/v1/routes.go +++ b/internal/routes/v1/routes.go @@ -5,6 +5,8 @@ import ( "api-service/internal/database" authHandlers "api-service/internal/handlers/auth" healthcheckHandlers "api-service/internal/handlers/healthcheck" + + pasienPasienHandlers "api-service/internal/handlers/pasien" retribusiHandlers "api-service/internal/handlers/retribusi" "api-service/internal/middleware" services "api-service/internal/services/auth" @@ -121,6 +123,19 @@ func RegisterRoutes(cfg *config.Config) *gin.Engine { retribusiHandler.DeleteRetribusi(c) }) } + // Pasien endpoints + pasienPasienHandler := pasienPasienHandlers.NewPasienHandler() + pasienPasienGroup := v1.Group("/pasien") + { + pasienPasienGroup.PUT("/:nomr", pasienPasienHandler.UpdatePasien) + pasienPasienGroup.POST("/", pasienPasienHandler.CreatePasien) + pasienPasienGroup.DELETE("/:nomr", pasienPasienHandler.DeletePasien) + pasienPasienGroup.GET("/dynamic", pasienPasienHandler.GetPasienDynamic) + pasienPasienGroup.GET("/", pasienPasienHandler.GetPasien) + pasienPasienGroup.GET("/by-age", pasienPasienHandler.GetPasienByAge) + pasienPasienGroup.GET("/:nomr", pasienPasienHandler.GetPasienByNomr) + pasienPasienGroup.GET("/by-location", pasienPasienHandler.GetPasienByLocation) + } // ============================================================================= // PROTECTED ROUTES (Authentication Required) diff --git a/internal/utils/query/builder.go b/internal/utils/query/builder.go index ca351b6..95360b2 100644 --- a/internal/utils/query/builder.go +++ b/internal/utils/query/builder.go @@ -1812,6 +1812,24 @@ func (qp *QueryParser) parseSorting(values url.Values) ([]SortField, error) { return sorts, nil } +// ParseQueryWithDefaultFields parses URL query parameters into a DynamicQuery struct with default fields. +func (qp *QueryParser) ParseQueryWithDefaultFields(values url.Values, defaultTable string, defaultFields []string) (DynamicQuery, error) { + query, err := qp.ParseQuery(values, defaultTable) + if err != nil { + return query, err + } + + // If no fields specified, use default fields + if len(query.Fields) == 0 || (len(query.Fields) == 1 && query.Fields[0].Expression == "*") { + query.Fields = make([]SelectField, len(defaultFields)) + for i, field := range defaultFields { + query.Fields[i] = SelectField{Expression: field} + } + } + + return query, nil +} + // ============================================================================= // MONGODB QUERY BUILDER // ============================================================================= diff --git a/internal/utils/query/exemple.go.exemple b/internal/utils/query/exemple.go.txt similarity index 100% rename from internal/utils/query/exemple.go.exemple rename to internal/utils/query/exemple.go.txt diff --git a/internal/utils/query/exemple.txt b/internal/utils/query/exemple.txt new file mode 100644 index 0000000..a36651a --- /dev/null +++ b/internal/utils/query/exemple.txt @@ -0,0 +1,918 @@ +package main + +import ( + "context" + "database/sql" + "fmt" + "log" + "time" + + "github.com/jmoiron/sqlx" + _ "github.com/lib/pq" // PostgreSQL driver + "yourpackage/utils" +) + +func main() { + // Inisialisasi koneksi database + db, err := sqlx.Connect("postgres", "user=postgres dbname=testdb sslmode=disable") + if err != nil { + log.Fatalf("Failed to connect to database: %v", err) + } + defer db.Close() + + // Inisialisasi QueryBuilder + qb := utils.NewQueryBuilder(utils.DBTypePostgreSQL) + + // Contoh penggunaan + simpleQueryExample(db, qb) + complexQueryExample(db, qb) + nestedJoinExample(db, qb) + multiJoinExample(db, qb) + commonQueriesExample(db, qb) + jsonQueryExample(db, qb) + windowFunctionExample(db, qb) + cteExample(db, qb) + unionExample(db, qb) + aggregateExample(db, qb) +} + +func simpleQueryExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Simple Query Example ===") + + // Query sederhana dengan filter + query := utils.DynamicQuery{ + From: "users", + Fields: []utils.SelectField{ + {Expression: "id", Alias: "user_id"}, + {Expression: "name", Alias: "user_name"}, + {Expression: "email"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "active"}, + {Column: "created_at", Operator: utils.OpGreaterThanEqual, Value: time.Now().AddDate(0, -1, 0)}, + }, + LogicOp: "AND", + }, + }, + Sort: []utils.SortField{ + {Column: "name", Order: "ASC"}, + }, + Limit: 10, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing simple query: %v", err) + return + } + + fmt.Printf("Found %d users\n", len(results)) + for _, user := range results { + fmt.Printf("User: %+v\n", user) + } +} + +func complexQueryExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Complex Query Example ===") + + // Query dengan nested filter dan berbagai operator + query := utils.DynamicQuery{ + From: "orders", + Fields: []utils.SelectField{ + {Expression: "id", Alias: "order_id"}, + {Expression: "customer_id"}, + {Expression: "total_amount"}, + {Expression: "order_date"}, + {Expression: "status"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpIn, Value: []string{"completed", "processing"}}, + {Column: "total_amount", Operator: utils.OpGreaterThan, Value: 1000}, + }, + LogicOp: "AND", + }, + { + Filters: []utils.DynamicFilter{ + {Column: "order_date", Operator: utils.OpBetween, Value: []interface{}{time.Now().AddDate(0, -3, 0), time.Now()}}, + {Column: "customer_id", Operator: utils.OpNotIn, Value: []int{1, 2, 3}}, + }, + LogicOp: "OR", + }, + }, + Sort: []utils.SortField{ + {Column: "order_date", Order: "DESC"}, + {Column: "total_amount", Order: "DESC"}, + }, + Limit: 20, + Offset: 10, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing complex query: %v", err) + return + } + + fmt.Printf("Found %d orders\n", len(results)) + for _, order := range results { + fmt.Printf("Order: %+v\n", order) + } +} + +func nestedJoinExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Nested Join Example ===") + + // Query dengan nested join + query := utils.DynamicQuery{ + From: "customers", + Fields: []utils.SelectField{ + {Expression: "customers.id", Alias: "customer_id"}, + {Expression: "customers.name", Alias: "customer_name"}, + {Expression: "orders.id", Alias: "order_id"}, + {Expression: "orders.total_amount"}, + {Expression: "order_items.product_id"}, + {Expression: "order_items.quantity"}, + {Expression: "products.name", Alias: "product_name"}, + }, + Joins: []utils.Join{ + { + Type: "LEFT", + Table: "orders", + Alias: "orders", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "customers.id", Operator: utils.OpEqual, Value: "orders.customer_id"}, + }, + }, + }, + { + Type: "LEFT", + Table: "order_items", + Alias: "order_items", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "orders.id", Operator: utils.OpEqual, Value: "order_items.order_id"}, + }, + }, + }, + { + Type: "LEFT", + Table: "products", + Alias: "products", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "order_items.product_id", Operator: utils.OpEqual, Value: "products.id"}, + }, + }, + }, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "customers.status", Operator: utils.OpEqual, Value: "active"}, + {Column: "orders.status", Operator: utils.OpEqual, Value: "completed"}, + }, + LogicOp: "AND", + }, + }, + Sort: []utils.SortField{ + {Column: "customers.name", Order: "ASC"}, + {Column: "orders.id", Order: "DESC"}, + }, + Limit: 50, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing nested join query: %v", err) + return + } + + fmt.Printf("Found %d customer-order-product records\n", len(results)) + for _, record := range results { + fmt.Printf("Record: %+v\n", record) + } +} + +func multiJoinExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Multi Join Example ===") + + // Query dengan multiple join types + query := utils.DynamicQuery{ + From: "employees", + Fields: []utils.SelectField{ + {Expression: "employees.id", Alias: "employee_id"}, + {Expression: "employees.name", Alias: "employee_name"}, + {Expression: "departments.name", Alias: "department_name"}, + {Expression: "projects.name", Alias: "project_name"}, + {Expression: "tasks.title", Alias: "task_title"}, + {Expression: "task_assignments.assigned_date"}, + }, + Joins: []utils.Join{ + { + Type: "INNER", + Table: "departments", + Alias: "departments", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "employees.department_id", Operator: utils.OpEqual, Value: "departments.id"}, + }, + }, + }, + { + Type: "LEFT", + Table: "task_assignments", + Alias: "task_assignments", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "employees.id", Operator: utils.OpEqual, Value: "task_assignments.employee_id"}, + }, + }, + }, + { + Type: "LEFT", + Table: "tasks", + Alias: "tasks", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "task_assignments.task_id", Operator: utils.OpEqual, Value: "tasks.id"}, + }, + }, + }, + { + Type: "LEFT", + Table: "projects", + Alias: "projects", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "tasks.project_id", Operator: utils.OpEqual, Value: "projects.id"}, + }, + }, + }, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "employees.status", Operator: utils.OpEqual, Value: "active"}, + {Column: "departments.status", Operator: utils.OpEqual, Value: "active"}, + }, + LogicOp: "AND", + }, + }, + Sort: []utils.SortField{ + {Column: "departments.name", Order: "ASC"}, + {Column: "employees.name", Order: "ASC"}, + {Column: "task_assignments.assigned_date", Order: "DESC"}, + }, + Limit: 100, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing multi join query: %v", err) + return + } + + fmt.Printf("Found %d employee-task-project records\n", len(results)) + for _, record := range results { + fmt.Printf("Record: %+v\n", record) + } +} + +func commonQueriesExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Common Queries Example ===") + + // 1. Query dengan LIKE/ILIKE + likeQuery := utils.DynamicQuery{ + From: "products", + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "name"}, + {Expression: "price"}, + {Expression: "category"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "name", Operator: utils.OpILike, Value: "%laptop%"}, + {Column: "category", Operator: utils.OpEqual, Value: "electronics"}, + }, + LogicOp: "AND", + }, + }, + Sort: []utils.SortField{ + {Column: "price", Order: "ASC"}, + }, + Limit: 10, + } + + var products []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, likeQuery, &products) + if err != nil { + log.Printf("Error executing LIKE query: %v", err) + } else { + fmt.Printf("Found %d products matching 'laptop'\n", len(products)) + } + + // 2. Query dengan pagination + page := 2 + pageSize := 20 + paginationQuery := utils.DynamicQuery{ + From: "orders", + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "customer_id"}, + {Expression: "total_amount"}, + {Expression: "order_date"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "completed"}, + }, + }, + }, + Sort: []utils.SortField{ + {Column: "order_date", Order: "DESC"}, + }, + Limit: pageSize, + Offset: (page - 1) * pageSize, + } + + var orders []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, paginationQuery, &orders) + if err != nil { + log.Printf("Error executing pagination query: %v", err) + } else { + fmt.Printf("Found %d orders on page %d\n", len(orders), page) + } + + // 3. Query dengan NULL/NOT NULL + nullQuery := utils.DynamicQuery{ + From: "customers", + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "name"}, + {Expression: "email"}, + {Expression: "phone"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "email", Operator: utils.OpNotNull}, + {Column: "phone", Operator: utils.OpNull}, + }, + LogicOp: "AND", + }, + }, + Limit: 10, + } + + var customers []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, nullQuery, &customers) + if err != nil { + log.Printf("Error executing NULL query: %v", err) + } else { + fmt.Printf("Found %d customers with email but no phone\n", len(customers)) + } + + // 4. Query dengan BETWEEN + betweenQuery := utils.DynamicQuery{ + From: "transactions", + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "account_id"}, + {Expression: "amount"}, + {Expression: "transaction_date"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "amount", Operator: utils.OpBetween, Value: []interface{}{100, 1000}}, + {Column: "transaction_date", Operator: utils.OpBetween, Value: []interface{}{time.Now().AddDate(0, -1, 0), time.Now()}}, + }, + LogicOp: "AND", + }, + }, + Sort: []utils.SortField{ + {Column: "transaction_date", Order: "DESC"}, + }, + Limit: 20, + } + + var transactions []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, betweenQuery, &transactions) + if err != nil { + log.Printf("Error executing BETWEEN query: %v", err) + } else { + fmt.Printf("Found %d transactions between $100 and $1000 in the last month\n", len(transactions)) + } +} + +func jsonQueryExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== JSON Query Example ===") + + // Query dengan operasi JSON + query := utils.DynamicQuery{ + From: "products", + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "name"}, + {Expression: "price"}, + {Expression: "attributes"}, + }, + JsonOperations: []utils.JsonOperation{ + { + Type: "extract", + Column: "attributes", + Path: "$.color", + Alias: "color", + }, + { + Type: "extract", + Column: "attributes", + Path: "$.size", + Alias: "size", + }, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + { + Column: "attributes", + Operator: utils.OpJsonContains, + Value: map[string]interface{}{"category": "electronics"}, + Options: map[string]interface{}{"path": "$"}, + }, + }, + }, + }, + Sort: []utils.SortField{ + {Column: "name", Order: "ASC"}, + }, + Limit: 10, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing JSON query: %v", err) + return + } + + fmt.Printf("Found %d products with JSON attributes\n", len(results)) + for _, product := range results { + fmt.Printf("Product: %+v\n", product) + } +} + +func windowFunctionExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Window Function Example ===") + + // Query dengan window functions + query := utils.DynamicQuery{ + From: "sales", + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "salesperson_id"}, + {Expression: "amount"}, + {Expression: "sale_date"}, + }, + WindowFunctions: []utils.WindowFunction{ + { + Function: "ROW_NUMBER", + Over: "salesperson_id", + OrderBy: "amount DESC", + Alias: "sales_rank", + }, + { + Function: "SUM", + Over: "salesperson_id", + OrderBy: "sale_date", + Frame: "ROWS UNBOUNDED PRECEDING", + Alias: "running_total", + }, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "sale_date", Operator: utils.OpGreaterThanEqual, Value: time.Now().AddDate(0, -6, 0)}, + }, + }, + }, + Sort: []utils.SortField{ + {Column: "salesperson_id", Order: "ASC"}, + {Column: "amount", Order: "DESC"}, + }, + Limit: 50, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing window function query: %v", err) + return + } + + fmt.Printf("Found %d sales records with window functions\n", len(results)) + for _, sale := range results { + fmt.Printf("Sale: %+v\n", sale) + } +} + +func cteExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== CTE Example ===") + + // Query dengan CTE + query := utils.DynamicQuery{ + CTEs: []utils.CTE{ + { + Name: "monthly_sales", + Query: utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "salesperson_id"}, + {Expression: "EXTRACT(MONTH FROM sale_date) AS month"}, + {Expression: "SUM(amount) AS total"}, + }, + From: "sales", + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "sale_date", Operator: utils.OpGreaterThanEqual, Value: time.Now().AddDate(-1, 0, 0)}, + }, + }, + }, + GroupBy: []string{"salesperson_id", "EXTRACT(MONTH FROM sale_date)"}, + }, + }, + { + Name: "top_salespeople", + Query: utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "salesperson_id"}, + {Expression: "SUM(total) AS yearly_total"}, + }, + From: "monthly_sales", + GroupBy: []string{"salesperson_id"}, + Having: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "SUM(total)", Operator: utils.OpGreaterThan, Value: 10000}, + }, + }, + }, + }, + }, + }, + Fields: []utils.SelectField{ + {Expression: "salespeople.id"}, + {Expression: "salespeople.name"}, + {Expression: "top_salespeople.yearly_total"}, + }, + From: "salespeople", + Joins: []utils.Join{ + { + Type: "INNER", + Table: "top_salespeople", + Alias: "top_salespeople", + OnConditions: utils.FilterGroup{ + Filters: []utils.DynamicFilter{ + {Column: "salespeople.id", Operator: utils.OpEqual, Value: "top_salespeople.salesperson_id"}, + }, + }, + }, + }, + Sort: []utils.SortField{ + {Column: "top_salespeople.yearly_total", Order: "DESC"}, + }, + Limit: 10, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing CTE query: %v", err) + return + } + + fmt.Printf("Found %d top salespeople\n", len(results)) + for _, salesperson := range results { + fmt.Printf("Salesperson: %+v\n", salesperson) + } +} + +func unionExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== UNION Example ===") + + // Query dengan UNION + query := utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "name"}, + {Expression: "email"}, + {Expression: "'customer' AS user_type"}, + }, + From: "customers", + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "active"}, + }, + }, + }, + Unions: []utils.Union{ + { + Type: "UNION ALL", + Query: utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "id"}, + {Expression: "name"}, + {Expression: "email"}, + {Expression: "'employee' AS user_type"}, + }, + From: "employees", + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "active"}, + }, + }, + }, + }, + }, + }, + Sort: []utils.SortField{ + {Column: "name", Order: "ASC"}, + }, + Limit: 20, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing UNION query: %v", err) + return + } + + fmt.Printf("Found %d users (customers + employees)\n", len(results)) + for _, user := range results { + fmt.Printf("User: %+v\n", user) + } +} + +func aggregateExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Aggregate Example ===") + + // Query dengan fungsi agregasi + query := utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "category"}, + {Expression: "COUNT(*) AS product_count"}, + {Expression: "AVG(price) AS avg_price"}, + {Expression: "MIN(price) AS min_price"}, + {Expression: "MAX(price) AS max_price"}, + {Expression: "SUM(stock_quantity) AS total_stock"}, + }, + From: "products", + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "active"}, + }, + }, + }, + GroupBy: []string{"category"}, + Having: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "COUNT(*)", Operator: utils.OpGreaterThan, Value: 5}, + }, + }, + }, + Sort: []utils.SortField{ + {Column: "product_count", Order: "DESC"}, + }, + Limit: 10, + } + + var results []map[string]interface{} + err := qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing aggregate query: %v", err) + return + } + + fmt.Printf("Found %d product categories\n", len(results)) + for _, category := range results { + fmt.Printf("Category: %+v\n", category) + } +} + +func crudOperationsExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== CRUD Operations Example ===") + + // INSERT + insertData := utils.InsertData{ + Columns: []string{"name", "email", "status", "created_at"}, + Values: []interface{}{"John Doe", "john@example.com", "active", time.Now()}, + JsonValues: map[string]interface{}{ + "preferences": map[string]interface{}{ + "theme": "dark", + "language": "en", + }, + }, + } + + result, err := qb.ExecuteInsert(context.Background(), db, "customers", insertData, "id") + if err != nil { + log.Printf("Error executing INSERT: %v", err) + return + } + + id, err := result.LastInsertId() + if err != nil { + log.Printf("Error getting inserted ID: %v", err) + return + } + + fmt.Printf("Inserted customer with ID: %d\n", id) + + // UPDATE + updateData := utils.UpdateData{ + Columns: []string{"name", "status"}, + Values: []interface{}{"John Smith", "inactive"}, + JsonUpdates: map[string]utils.JsonUpdate{ + "preferences": { + Path: "$.theme", + Value: "light", + }, + }, + } + + filters := []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "id", Operator: utils.OpEqual, Value: id}, + }, + }, + } + + result, err = qb.ExecuteUpdate(context.Background(), db, "customers", updateData, filters, "updated_at") + if err != nil { + log.Printf("Error executing UPDATE: %v", err) + return + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + log.Printf("Error getting rows affected: %v", err) + return + } + + fmt.Printf("Updated %d customer(s)\n", rowsAffected) + + // DELETE + result, err = qb.ExecuteDelete(context.Background(), db, "customers", filters) + if err != nil { + log.Printf("Error executing DELETE: %v", err) + return + } + + rowsAffected, err = result.RowsAffected() + if err != nil { + log.Printf("Error getting rows affected: %v", err) + return + } + + fmt.Printf("Deleted %d customer(s)\n", rowsAffected) +} + + +func mongoExample() { + fmt.Println("\n=== MongoDB Example ===") + + // Inisialisasi koneksi MongoDB + client, err := mongo.Connect(context.Background(), options.Client().ApplyURI("mongodb://localhost:27017")) + if err != nil { + log.Fatalf("Failed to connect to MongoDB: %v", err) + } + defer client.Disconnect(context.Background()) + + db := client.Database("testdb") + collection := db.Collection("users") + + // Inisialisasi MongoQueryBuilder + mqb := utils.NewMongoQueryBuilder() + + // Query sederhana + query := utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "name"}, + {Expression: "email"}, + {Expression: "status"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "active"}, + {Column: "age", Operator: utils.OpGreaterThan, Value: 18}, + }, + LogicOp: "AND", + }, + }, + Sort: []utils.SortField{ + {Column: "name", Order: "ASC"}, + }, + Limit: 10, + } + + var results []map[string]interface{} + err = mqb.ExecuteFind(context.Background(), collection, query, &results) + if err != nil { + log.Printf("Error executing MongoDB query: %v", err) + return + } + + fmt.Printf("Found %d users\n", len(results)) + for _, user := range results { + fmt.Printf("User: %+v\n", user) + } + + // Aggregation pipeline + aggQuery := utils.DynamicQuery{ + Fields: []utils.SelectField{ + {Expression: "department", Alias: "_id"}, + {Expression: "COUNT(*)", Alias: "employee_count"}, + {Expression: "AVG(salary)", Alias: "avg_salary"}, + }, + Filters: []utils.FilterGroup{ + { + Filters: []utils.DynamicFilter{ + {Column: "status", Operator: utils.OpEqual, Value: "active"}, + }, + }, + }, + GroupBy: []string{"department"}, + Sort: []utils.SortField{ + {Column: "employee_count", Order: "DESC"}, + }, + Limit: 10, + } + + var aggResults []map[string]interface{} + err = mqb.ExecuteAggregate(context.Background(), collection, aggQuery, &aggResults) + if err != nil { + log.Printf("Error executing MongoDB aggregation: %v", err) + return + } + + fmt.Printf("Found %d departments\n", len(aggResults)) + for _, dept := range aggResults { + fmt.Printf("Department: %+v\n", dept) + } +} + +func queryParserExample(db *sqlx.DB, qb *utils.QueryBuilder) { + fmt.Println("\n=== Query Parser Example ===") + + // Inisialisasi QueryParser + qp := utils.NewQueryParser() + + // Parse URL query parameters + values := url.Values{} + values.Add("fields", "id,name,email,status") + values.Add("filter[status][_eq]", "active") + values.Add("filter[created_at][_gte]", "2023-01-01") + values.Add("filter[age][_between]", "18,65") + values.Add("sort", "+name,-created_at") + values.Add("limit", "20") + values.Add("offset", "10") + + // Parse query parameters into DynamicQuery + query, err := qp.ParseQuery(values, "users") + if err != nil { + log.Printf("Error parsing query: %v", err) + return + } + + // Execute the parsed query + var results []map[string]interface{} + err = qb.ExecuteQuery(context.Background(), db, query, &results) + if err != nil { + log.Printf("Error executing parsed query: %v", err) + return + } + + fmt.Printf("Found %d users using parsed query\n", len(results)) + for _, user := range results { + fmt.Printf("User: %+v\n", user) + } +} diff --git a/tools/general/generate-handler.go b/tools/general/generate-handler.go index da14b9d..3f09477 100644 --- a/tools/general/generate-handler.go +++ b/tools/general/generate-handler.go @@ -6,7 +6,6 @@ import ( "log" "os" "path/filepath" - "strconv" "strings" "time" @@ -22,7 +21,9 @@ type HandlerData struct { DirPath string // Path direktori lengkap ModuleName string TableName string - TableSchema []ColumnConfig // Untuk penyimpanan schema + TableSchema []ColumnConfig // Untuk penyimpanan schema + Relationships []RelationshipConfig // Untuk menyimpan relasi + FieldGroups map[string][]string // Untuk menyimpan field groups HasGet bool HasPost bool HasPut bool @@ -33,44 +34,44 @@ type HandlerData struct { HasFilter bool HasPagination bool Timestamp string + Endpoints map[string]EndpointConfig // Menyimpan semua endpoint } // Config represents the YAML configuration structure type Config struct { - Entities []Entity `yaml:"entities"` -} - -// Entity represents a single entity configuration in YAML -type Entity struct { - Name string `yaml:"name"` - Methods []string `yaml:"methods"` - Category string `yaml:"category,omitempty"` -} - -// ServicesConfig represents the new services-based YAML configuration structure -type ServicesConfig struct { Global GlobalConfig `yaml:"global"` Services map[string]ServiceConfig `yaml:"services"` } // GlobalConfig represents global configuration type GlobalConfig struct { - ModuleName string `yaml:"module_name"` - OutputDir string `yaml:"output_dir"` - EnableSwagger bool `yaml:"enable_swagger"` - EnableLogging bool `yaml:"enable_logging"` + ModuleName string `yaml:"module_name"` + OutputDir string `yaml:"output_dir"` + EnableSwagger bool `yaml:"enable_swagger"` + EnableLogging bool `yaml:"enable_logging"` + Database DatabaseConfig `yaml:"database"` +} + +// DatabaseConfig represents database configuration +type DatabaseConfig struct { + DefaultConnection string `yaml:"default_connection"` + TimeoutSeconds int `yaml:"timeout_seconds"` } // ServiceConfig represents a service configuration type ServiceConfig struct { - Name string `yaml:"name"` - Category string `yaml:"category"` - Package string `yaml:"package"` - Description string `yaml:"description"` - BaseURL string `yaml:"base_url"` - Timeout int `yaml:"timeout"` - RetryCount int `yaml:"retry_count"` - Endpoints map[string]EndpointConfig `yaml:"endpoints"` + Name string `yaml:"name"` + Category string `yaml:"category"` + Package string `yaml:"package"` + Description string `yaml:"description"` + BaseURL string `yaml:"base_url"` + Timeout int `yaml:"timeout"` + RetryCount int `yaml:"retry_count"` + TableName string `yaml:"table_name"` + Schema SchemaConfig `yaml:"schema"` + Relationships []RelationshipConfig `yaml:"relationships"` + FieldGroups map[string][]string `yaml:"field_groups"` + Endpoints map[string]EndpointConfig `yaml:"endpoints"` } // SchemaConfig represents a schema configuration @@ -80,51 +81,48 @@ type SchemaConfig struct { // ColumnConfig represents a column configuration type ColumnConfig struct { - Name string `yaml:"name"` - Type string `yaml:"type"` - Nullable bool `yaml:"nullable,omitempty"` - GoType string `yaml:"go_type,omitempty"` // Untuk override tipe Go secara manual - PrimaryKey bool `yaml:"primary_key,omitempty"` + Name string `yaml:"name"` + Type string `yaml:"type"` + Nullable bool `yaml:"nullable,omitempty"` + GoType string `yaml:"go_type,omitempty"` // Untuk override tipe Go secara manual + PrimaryKey bool `yaml:"primary_key,omitempty"` + Searchable bool `yaml:"searchable,omitempty"` // Menandai kolom yang dapat dicari + Unique bool `yaml:"unique,omitempty"` // Menandai kolom yang harus unik + SystemField bool `yaml:"system_field,omitempty"` // Menandai kolom sistem (created_at, dll) + Description string `yaml:"description,omitempty"` + Validation string `yaml:"validation,omitempty"` +} + +// RelationshipConfig represents a relationship configuration +type RelationshipConfig struct { + Name string `yaml:"name"` + Table string `yaml:"table"` + ForeignKey string `yaml:"foreign_key"` + LocalKey string `yaml:"local_key"` + Columns []ColumnConfig `yaml:"columns"` } // EndpointConfig represents an endpoint configuration type EndpointConfig struct { - Description string `yaml:"description"` - HandlerFolder string `yaml:"handler_folder"` - HandlerFile string `yaml:"handler_file"` - HandlerName string `yaml:"handler_name"` - TableName string `yaml:"table_name,omitempty"` - Schema SchemaConfig `yaml:"schema,omitempty"` - Functions map[string]FunctionConfig `yaml:"functions"` -} - -// FunctionConfig represents a function configuration -type FunctionConfig struct { - Methods []string `yaml:"methods"` - Path string `yaml:"path"` - GetRoutes string `yaml:"get_routes,omitempty"` - PostRoutes string `yaml:"post_routes,omitempty"` - PutRoutes string `yaml:"put_routes,omitempty"` - DeleteRoutes string `yaml:"delete_routes,omitempty"` - GetPath string `yaml:"get_path,omitempty"` - PostPath string `yaml:"post_path,omitempty"` - PutPath string `yaml:"put_path,omitempty"` - DeletePath string `yaml:"delete_path,omitempty"` - Model string `yaml:"model"` - ResponseModel string `yaml:"response_model"` - RequestModel string `yaml:"request_model,omitempty"` - Description string `yaml:"description"` - Summary string `yaml:"summary"` - Tags []string `yaml:"tags"` - RequireAuth bool `yaml:"require_auth"` - CacheEnabled bool `yaml:"cache_enabled"` - EnableDatabase bool `yaml:"enable_database"` - CacheTTL int `yaml:"cache_ttl"` - HasPagination bool `yaml:"has_pagination,omitempty"` - HasFilter bool `yaml:"has_filter,omitempty"` - HasSearch bool `yaml:"has_search,omitempty"` - HasStats bool `yaml:"has_stats,omitempty"` - HasDynamic bool `yaml:"has_dynamic,omitempty"` + HandlerFolder string `yaml:"handler_folder"` + HandlerFile string `yaml:"handler_file"` + Methods []string `yaml:"methods"` + Path string `yaml:"path"` + Description string `yaml:"description"` + Summary string `yaml:"summary"` + Tags []string `yaml:"tags"` + RequireAuth bool `yaml:"require_auth"` + CacheEnabled bool `yaml:"cache_enabled"` + CacheTTL int `yaml:"cache_ttl"` + HasPagination bool `yaml:"has_pagination,omitempty"` + HasFilter bool `yaml:"has_filter,omitempty"` + HasSearch bool `yaml:"has_search,omitempty"` + HasStats bool `yaml:"has_stats,omitempty"` + HasDynamic bool `yaml:"has_dynamic,omitempty"` + Fields string `yaml:"fields"` + ResponseModel string `yaml:"response_model"` + RequestModel string `yaml:"request_model,omitempty"` + SoftDelete bool `yaml:"soft_delete,omitempty"` } type PathInfo struct { @@ -144,6 +142,15 @@ var ( // Global file skip function var shouldSkipExistingFile func(filePath string, fileType string) bool +// Global excluded fields +var excludedFields = map[string]bool{ + "id": true, + "date_created": true, + "date_updated": true, + "user_created": true, + "user_updated": true, +} + // parseEntityPath - Enhanced logic parsing dengan validasi lebih baik func parseEntityPath(entityPath string) (*PathInfo, error) { if strings.TrimSpace(entityPath) == "" { @@ -330,165 +337,187 @@ func loadConfig(configPath string) (*Config, error) { return &config, nil } -func loadServicesConfig(configPath string) (*ServicesConfig, error) { - data, err := os.ReadFile(configPath) - if err != nil { - return nil, fmt.Errorf("failed to read services config file: %w", err) +// getDefaultTableSchema returns a basic table schema if none is provided +func getDefaultTableSchema() []ColumnConfig { + return []ColumnConfig{ + {Name: "id", Type: "uuid", Nullable: false, GoType: "string", PrimaryKey: true}, + {Name: "status", Type: "varchar", Nullable: false, GoType: "string"}, + {Name: "sort", Type: "int4", Nullable: true, GoType: "int32"}, + {Name: "user_created", Type: "varchar", Nullable: true, GoType: "string", SystemField: true}, + {Name: "date_created", Type: "timestamp", Nullable: true, GoType: "time.Time", SystemField: true}, + {Name: "user_updated", Type: "varchar", Nullable: true, GoType: "string", SystemField: true}, + {Name: "date_updated", Type: "timestamp", Nullable: true, GoType: "time.Time", SystemField: true}, + {Name: "name", Type: "varchar", Nullable: true, GoType: "string", Searchable: true}, } - - var config ServicesConfig - if err := yaml.Unmarshal(data, &config); err != nil { - return nil, fmt.Errorf("failed to parse YAML services config: %w", err) - } - - return &config, nil } // generateFromServicesConfig - RESTRUCTURED untuk agreggasi methods -func generateFromServicesConfig(config *ServicesConfig) { +func generateFromServicesConfig(config *Config) { for serviceName, service := range config.Services { if *verboseFlag { fmt.Printf("🔧 Processing service: %s\n", serviceName) } + // Parse entity path dari service name + pathInfo, err := parseEntityPath(serviceName) + if err != nil { + logError(fmt.Sprintf("Error parsing entity path '%s'", serviceName), err, *verboseFlag) + continue + } + + // Override category dari service config + if service.Category != "" { + pathInfo.Category = service.Category + } + + // Set directory path dari handler_folder jika specified + if len(service.Endpoints) > 0 { + for _, endpoint := range service.Endpoints { + if endpoint.HandlerFolder != "" { + pathInfo.DirPath = endpoint.HandlerFolder + break + } + } + } + + // AGGREGATE semua methods dari semua endpoints + var allMethods []string + var endpointConfigs = make(map[string]EndpointConfig) // Inisialisasi map + for endpointName, endpoint := range service.Endpoints { if *verboseFlag { fmt.Printf(" 📍 Processing endpoint: %s\n", endpointName) } - // Parse entity path dari endpoint name - pathInfo, err := parseEntityPath(endpointName) - if err != nil { - logError(fmt.Sprintf("Error parsing entity path '%s'", endpointName), err, *verboseFlag) - continue - } - - // Override category dari service config - if service.Category != "" { - pathInfo.Category = service.Category - } - - // Set directory path dari handler_folder jika specified - if endpoint.HandlerFolder != "" { - pathInfo.DirPath = endpoint.HandlerFolder - } - - // AGGREGATE semua methods dari semua functions - var allMethods []string - var functionConfigs []FunctionConfig - - for functionName, function := range endpoint.Functions { - if *verboseFlag { - fmt.Printf(" ⚙️ Processing function: %s\n", functionName) - } - - // Tambahkan methods dari function ini - allMethods = append(allMethods, function.Methods...) - functionConfigs = append(functionConfigs, function) - } - - // Remove duplicates dari methods - allMethods = removeDuplicateMethods(allMethods) - - // Jika tidak ada methods, gunakan default - if len(allMethods) == 0 { - allMethods = []string{"get", "post", "put", "delete", "dynamic", "search"} - } - - // Validate methods - if err := validateMethods(allMethods); err != nil { - logError(fmt.Sprintf("Invalid methods for endpoint '%s'", endpointName), err, *verboseFlag) - continue - } - - // Override table name jika specified - tableName := endpoint.TableName - if tableName == "" { - tableName = generateTableName(pathInfo) - } - - // Generate handler data dengan service-specific information - entityName := strings.Title(pathInfo.EntityName) - entityLower := strings.ToLower(pathInfo.EntityName) - entityPlural := entityLower + "s" - - data := HandlerData{ - Name: entityName, - NameLower: entityLower, - NamePlural: entityPlural, - Category: pathInfo.Category, - DirPath: pathInfo.DirPath, - ModuleName: config.Global.ModuleName, - TableName: tableName, - TableSchema: endpoint.Schema.Columns, - Timestamp: time.Now().Format("2006-01-02 15:04:05"), - } - - // Set methods berdasarkan aggregated methods - setMethods(&data, allMethods) - - // Set flags berdasarkan function configs - for _, function := range functionConfigs { - if function.HasPagination { - data.HasPagination = true - } - if function.HasFilter { - data.HasFilter = true - } - if function.HasSearch { - data.HasSearch = true - } - if function.HasStats { - data.HasStats = true - } - if function.HasDynamic { - data.HasDynamic = true - } - } - - // Create directories - handlerDir, modelDir, err := createDirectories(pathInfo) - if err != nil { - logError("Error creating directories", err, *verboseFlag) - continue - } - - // CHECK existing files sebelum generate - handlerPath := filepath.Join(handlerDir, entityLower+".go") - modelPath := filepath.Join(modelDir, entityLower+".go") - - if shouldSkipExistingFile(handlerPath, "handler") { - fmt.Printf("⚠️ Skipping handler generation: %s\n", handlerPath) - continue - } - - if len(data.TableSchema) > 0 { - if shouldSkipExistingFile(modelPath, "model") { - fmt.Printf("⚠️ Skipping model generation: %s\n", modelPath) - } else { - generateModelFile(data, modelDir) // Memanggil fungsi baru - } - } else { - fmt.Printf("⚠️ Skipping model generation for '%s' because no schema is defined in the config.\n", entityName) - } - - // Generate files (SEKALI SAJA per endpoint) - generateHandlerFile(data, handlerDir) - // generateModelFile(data, modelDir) - - // HANYA UPDATE ROUTES SEKALI PER ENDPOINT setelah semua fungsi di-aggregate - updateRoutesFile(data) - - // Success output - logSuccess(fmt.Sprintf("Successfully generated handler: %s", entityName), - fmt.Sprintf("Category: %s", pathInfo.Category), - fmt.Sprintf("Path: %s", pathInfo.DirPath), - fmt.Sprintf("Handler: %s", handlerPath), - fmt.Sprintf("Model: %s", modelPath), - fmt.Sprintf("Table: %s", tableName), - fmt.Sprintf("Methods: %s", strings.Join(allMethods, ", ")), - ) + // Tambahkan methods dari endpoint ini + allMethods = append(allMethods, endpoint.Methods...) + endpointConfigs[endpointName] = endpoint // Simpan endpoint ke map } + + // Remove duplicates dari methods + allMethods = removeDuplicateMethods(allMethods) + + // Jika tidak ada methods, gunakan default + if len(allMethods) == 0 { + allMethods = []string{"get", "post", "put", "delete", "dynamic", "search"} + } + + // Validate methods + if err := validateMethods(allMethods); err != nil { + logError(fmt.Sprintf("Invalid methods for service '%s'", serviceName), err, *verboseFlag) + continue + } + + // Override table name jika specified + tableName := service.TableName + if tableName == "" { + tableName = generateTableName(pathInfo) + } + + // Get table schema from service + var tableSchema []ColumnConfig + if len(service.Schema.Columns) > 0 { + tableSchema = service.Schema.Columns + } else { + // Use default schema + tableSchema = getDefaultTableSchema() + } + + // Get relationships from service + var relationships []RelationshipConfig + if len(service.Relationships) > 0 { + relationships = service.Relationships + } + + // Get field groups from service + var fieldGroups map[string][]string + if len(service.FieldGroups) > 0 { + fieldGroups = service.FieldGroups + } + + // Generate handler data dengan service-specific information + entityName := strings.Title(pathInfo.EntityName) + entityLower := strings.ToLower(pathInfo.EntityName) + entityPlural := entityLower + "s" + + data := HandlerData{ + Name: entityName, + NameLower: entityLower, + NamePlural: entityPlural, + Category: pathInfo.Category, + DirPath: pathInfo.DirPath, + ModuleName: config.Global.ModuleName, + TableName: tableName, + TableSchema: tableSchema, + Relationships: relationships, + FieldGroups: fieldGroups, + Endpoints: endpointConfigs, // Gunakan map yang sudah diinisialisasi + Timestamp: time.Now().Format("2006-01-02 15:04:05"), + } + + // Set methods berdasarkan aggregated methods + setMethods(&data, allMethods) + + // Set flags berdasarkan endpoint configs + for _, endpoint := range endpointConfigs { + if endpoint.HasPagination { + data.HasPagination = true + } + if endpoint.HasFilter { + data.HasFilter = true + } + if endpoint.HasSearch { + data.HasSearch = true + } + if endpoint.HasStats { + data.HasStats = true + } + if endpoint.HasDynamic { + data.HasDynamic = true + } + } + + // Create directories + handlerDir, modelDir, err := createDirectories(pathInfo) + if err != nil { + logError("Error creating directories", err, *verboseFlag) + continue + } + + // CHECK existing files sebelum generate + handlerPath := filepath.Join(handlerDir, entityLower+".go") + modelPath := filepath.Join(modelDir, entityLower+".go") + + if shouldSkipExistingFile(handlerPath, "handler") { + fmt.Printf("⚠️ Skipping handler generation: %s\n", handlerPath) + continue + } + + if len(data.TableSchema) > 0 { + if shouldSkipExistingFile(modelPath, "model") { + fmt.Printf("⚠️ Skipping model generation: %s\n", modelPath) + } else { + generateModelFile(data, modelDir) // Memanggil fungsi baru + } + } else { + fmt.Printf("⚠️ Skipping model generation for '%s' because no schema is defined in the config.\n", entityName) + } + + // Generate files (SEKALI SAJA per service) + generateHandlerFile(data, handlerDir) + + // HANYA UPDATE ROUTES SEKALI PER SERVICE setelah semua endpoint di-aggregate + updateRoutesFile(data) + + // Success output + logSuccess(fmt.Sprintf("Successfully generated handler: %s", entityName), + fmt.Sprintf("Category: %s", pathInfo.Category), + fmt.Sprintf("Path: %s", pathInfo.DirPath), + fmt.Sprintf("Handler: %s", handlerPath), + fmt.Sprintf("Model: %s", modelPath), + fmt.Sprintf("Table: %s", tableName), + fmt.Sprintf("Methods: %s", strings.Join(allMethods, ", ")), + ) } } @@ -506,8 +535,7 @@ func main() { } // Check for services-config.yaml first (new format) - servicesConfig, err := - loadServicesConfig(configPath) + servicesConfig, err := loadConfig(configPath) if err == nil { // Use services config if *verboseFlag { @@ -517,49 +545,8 @@ func main() { return } - // Fallback to old config-handler.yml - oldConfigPath := "config-handler.yml" - if *configFlag == "" { - oldConfigPath = "config-handler.yml" - } - - config, err := loadConfig(oldConfigPath) - if err == nil { - // Generate from old config - if *verboseFlag { - fmt.Printf("📄 Using legacy configuration from %s\n", oldConfigPath) - } - for _, entity := range config.Entities { - pathInfo, err := parseEntityPath(entity.Name) - if err != nil { - logError(fmt.Sprintf("Error parsing entity path '%s'", entity.Name), err, *verboseFlag) - continue - } - - // Override category if specified in config - if entity.Category != "" { - pathInfo.Category = entity.Category - } - - // Use methods from config or default - methods := entity.Methods - if len(methods) == 0 { - methods = []string{"get", "post", "put", "delete", "dynamic", "search"} - } - - // Validate methods - if err := validateMethods(methods); err != nil { - logError(fmt.Sprintf("Invalid methods for entity '%s'", entity.Name), err, *verboseFlag) - continue - } - - generateForEntity(pathInfo, methods) - } - return - } - // No config files found, fallback to command line arguments - fmt.Printf("⚠️ No config files found (%s or %s), falling back to command line arguments\n", configPath, oldConfigPath) + fmt.Printf("⚠️ No config file found (%s), falling back to command line arguments\n", configPath) if len(os.Args) < 2 { fmt.Println("Usage: go run generate-handler.go [path/]entity [methods]") @@ -568,7 +555,7 @@ func main() { fmt.Println(" go run generate-handler.go retribusi/tarif get post put delete dynamic search") fmt.Println(" go run generate-handler.go product/category/subcategory/item get post") fmt.Println("\nSupported methods: get, post, put, delete, stats, dynamic, search") - fmt.Println("\nAlternatively, create a services-config.yaml or config-handler.yml file with configurations.") + fmt.Println("\nAlternatively, create a services-config.yaml file with configurations.") fmt.Println("\nFlags:") fmt.Println(" --force Force overwrite existing files") fmt.Println(" --verbose Enable verbose output") @@ -621,6 +608,7 @@ func generateForEntity(pathInfo *PathInfo, methods []string) { DirPath: pathInfo.DirPath, ModuleName: "api-service", TableName: tableName, + TableSchema: getDefaultTableSchema(), // Use default schema if not provided HasPagination: true, HasFilter: true, Timestamp: time.Now().Format("2006-01-02 15:04:05"), @@ -682,7 +670,7 @@ func generateHandlerFile(data HandlerData, handlerDir string) { // Conditional imports based on enabled methods if data.HasDynamic || data.HasSearch { - handlerContent.WriteString(` utils "` + data.ModuleName + `/internal/utils/filters"` + "\n") + handlerContent.WriteString(` queryUtils "` + data.ModuleName + `/internal/utils/query"` + "\n") } // Only import validation if POST is enabled (since validation is primarily for create operations) @@ -690,10 +678,10 @@ func generateHandlerFile(data HandlerData, handlerDir string) { handlerContent.WriteString(` "` + data.ModuleName + `/internal/utils/validation"` + "\n") } + handlerContent.WriteString(` "` + data.ModuleName + `/pkg/logger"` + "\n") handlerContent.WriteString(` "context"` + "\n") handlerContent.WriteString(` "database/sql"` + "\n") handlerContent.WriteString(` "fmt"` + "\n") - handlerContent.WriteString(` "log"` + "\n") handlerContent.WriteString(` "net/http"` + "\n") handlerContent.WriteString(` "strconv"` + "\n") handlerContent.WriteString(` "strings"` + "\n") @@ -701,35 +689,37 @@ func generateHandlerFile(data HandlerData, handlerDir string) { handlerContent.WriteString(` "time"` + "\n\n") handlerContent.WriteString(` "github.com/gin-gonic/gin"` + "\n") handlerContent.WriteString(` "github.com/go-playground/validator/v10"` + "\n") - handlerContent.WriteString(` "github.com/google/uuid"` + "\n") + handlerContent.WriteString(` "github.com/jmoiron/sqlx"` + "\n") + handlerContent.WriteString(` "github.com/lib/pq"` + "\n") handlerContent.WriteString(")\n\n") - // Vars + // Global initialization with caching + handlerContent.WriteString("// =============================================================================\n") + handlerContent.WriteString("// GLOBAL INITIALIZATION & VALIDATION\n") + handlerContent.WriteString("// =============================================================================\n\n") + handlerContent.WriteString("var (\n") - handlerContent.WriteString(" " + data.NameLower + "db database.Service\n") - handlerContent.WriteString(" " + data.NameLower + "once sync.Once\n") - handlerContent.WriteString(" " + data.NameLower + "validate *validator.Validate\n") + handlerContent.WriteString(" db database.Service\n") + handlerContent.WriteString(" once sync.Once\n") + handlerContent.WriteString(" validate *validator.Validate\n") handlerContent.WriteString(")\n\n") - // init func - handlerContent.WriteString("// Initialize the database connection and validator\n") + handlerContent.WriteString("// Initialize the database connection and validator once\n") handlerContent.WriteString("func init() {\n") - handlerContent.WriteString(" " + data.NameLower + "once.Do(func() {\n") - handlerContent.WriteString(" " + data.NameLower + "db = database.New(config.LoadConfig())\n") - handlerContent.WriteString(" " + data.NameLower + "validate = validator.New()\n") + handlerContent.WriteString(" once.Do(func() {\n") + handlerContent.WriteString(" db = database.New(config.LoadConfig())\n") + handlerContent.WriteString(" validate = validator.New()\n") - // Only register validation if POST is enabled if data.HasPost { - handlerContent.WriteString(" " + data.NameLower + "validate.RegisterValidation(\"" + data.NameLower + "_status\", validate" + data.Name + "Status)\n") + handlerContent.WriteString(" validate.RegisterValidation(\"" + data.NameLower + "_status\", validate" + data.Name + "Status)\n") } - handlerContent.WriteString(" if " + data.NameLower + "db == nil {\n") - handlerContent.WriteString(" log.Fatal(\"Failed to initialize database connection\")\n") + handlerContent.WriteString(" if db == nil {\n") + handlerContent.WriteString(" logger.Fatal(\"Failed to initialize database connection\")\n") handlerContent.WriteString(" }\n") handlerContent.WriteString(" })\n") handlerContent.WriteString("}\n\n") - // Custom validation - Only include if POST is enabled if data.HasPost { handlerContent.WriteString("// Custom validation for " + data.NameLower + " status\n") handlerContent.WriteString("func validate" + data.Name + "Status(fl validator.FieldLevel) bool {\n") @@ -737,1308 +727,2175 @@ func generateHandlerFile(data HandlerData, handlerDir string) { handlerContent.WriteString("}\n\n") } - // Handler struct - handlerContent.WriteString("// " + data.Name + "Handler handles " + data.NameLower + " services\n") - handlerContent.WriteString("type " + data.Name + "Handler struct {\n") - handlerContent.WriteString(" db database.Service\n") + // Cache implementation + handlerContent.WriteString("// =============================================================================\n") + handlerContent.WriteString("// CACHE IMPLEMENTATION\n") + handlerContent.WriteString("// =============================================================================\n\n") + + handlerContent.WriteString("// CacheEntry represents an entry in the cache\n") + handlerContent.WriteString("type CacheEntry struct {\n") + handlerContent.WriteString(" Data interface{}\n") + handlerContent.WriteString(" ExpiresAt time.Time\n") handlerContent.WriteString("}\n\n") - // Constructor - handlerContent.WriteString("// New" + data.Name + "Handler creates a new " + data.Name + "Handler\n") - handlerContent.WriteString("func New" + data.Name + "Handler() *" + data.Name + "Handler {\n") - handlerContent.WriteString(" return &" + data.Name + "Handler{\n") - handlerContent.WriteString(" db: " + data.NameLower + "db,\n") + handlerContent.WriteString("// IsExpired checks if the cache entry has expired\n") + handlerContent.WriteString("func (e *CacheEntry) IsExpired() bool {\n") + handlerContent.WriteString(" return time.Now().After(e.ExpiresAt)\n") + handlerContent.WriteString("}\n\n") + + handlerContent.WriteString("// InMemoryCache implements a simple in-memory cache with TTL\n") + handlerContent.WriteString("type InMemoryCache struct {\n") + handlerContent.WriteString(" items sync.Map\n") + handlerContent.WriteString(" mu sync.RWMutex\n") + handlerContent.WriteString("}\n\n") + + handlerContent.WriteString("// NewInMemoryCache creates a new in-memory cache\n") + handlerContent.WriteString("func NewInMemoryCache() *InMemoryCache {\n") + handlerContent.WriteString(" return &InMemoryCache{}\n") + handlerContent.WriteString("}\n\n") + + handlerContent.WriteString("// Get retrieves an item from the cache\n") + handlerContent.WriteString("func (c *InMemoryCache) Get(key string) (interface{}, bool) {\n") + handlerContent.WriteString(" val, ok := c.items.Load(key)\n") + handlerContent.WriteString(" if !ok {\n") + handlerContent.WriteString(" return nil, false\n") + handlerContent.WriteString(" }\n\n") + + handlerContent.WriteString(" entry, ok := val.(*CacheEntry)\n") + handlerContent.WriteString(" if !ok || entry.IsExpired() {\n") + handlerContent.WriteString(" c.items.Delete(key)\n") + handlerContent.WriteString(" return nil, false\n") + handlerContent.WriteString(" }\n\n") + + handlerContent.WriteString(" return entry.Data, true\n") + handlerContent.WriteString("}\n\n") + + handlerContent.WriteString("// Set stores an item in the cache with a TTL\n") + handlerContent.WriteString("func (c *InMemoryCache) Set(key string, value interface{}, ttl time.Duration) {\n") + handlerContent.WriteString(" entry := &CacheEntry{\n") + handlerContent.WriteString(" Data: value,\n") + handlerContent.WriteString(" ExpiresAt: time.Now().Add(ttl),\n") handlerContent.WriteString(" }\n") - handlerContent.WriteString("}\n") + handlerContent.WriteString(" c.items.Store(key, entry)\n") + handlerContent.WriteString("}\n\n") - // Add optional methods based on enabled flags - if data.HasGet { - handlerContent.WriteString(generateGetMethods(data)) - } - if data.HasDynamic { - handlerContent.WriteString(generateDynamicMethod(data)) - } - if data.HasSearch { - handlerContent.WriteString(generateSearchMethod(data)) - } - if data.HasPost { - handlerContent.WriteString(generateCreateMethod(data)) - } - if data.HasPut { - handlerContent.WriteString(generateUpdateMethod(data)) - } - if data.HasDelete { - handlerContent.WriteString(generateDeleteMethod(data)) - } - if data.HasStats { - handlerContent.WriteString(generateStatsMethod(data)) + handlerContent.WriteString("// Delete removes an item from the cache\n") + handlerContent.WriteString("func (c *InMemoryCache) Delete(key string) {\n") + handlerContent.WriteString(" c.items.Delete(key)\n") + handlerContent.WriteString("}\n\n") + + handlerContent.WriteString("// DeleteByPrefix removes all items with a specific prefix\n") + handlerContent.WriteString("func (c *InMemoryCache) DeleteByPrefix(prefix string) {\n") + handlerContent.WriteString(" c.items.Range(func(key, value interface{}) bool {\n") + handlerContent.WriteString(" if keyStr, ok := key.(string); ok && strings.HasPrefix(keyStr, prefix) {\n") + handlerContent.WriteString(" c.items.Delete(key)\n") + handlerContent.WriteString(" }\n") + handlerContent.WriteString(" return true\n") + handlerContent.WriteString(" })\n") + handlerContent.WriteString("}\n\n") + + // Handler struct + handlerContent.WriteString("// =============================================================================\n") + handlerContent.WriteString("// " + data.Name + " HANDLER STRUCT\n") + handlerContent.WriteString("// =============================================================================\n\n") + + handlerContent.WriteString("// " + data.Name + "Handler handles " + data.NameLower + " services\n") + handlerContent.WriteString("type " + data.Name + "Handler struct {\n") + handlerContent.WriteString(" db database.Service\n") + handlerContent.WriteString(" queryBuilder *queryUtils.QueryBuilder\n") + handlerContent.WriteString(" validator *validation.DynamicValidator\n") + handlerContent.WriteString(" cache *InMemoryCache\n") + handlerContent.WriteString("}\n\n") + + handlerContent.WriteString("// New" + data.Name + "Handler creates a new " + data.Name + "Handler with a pre-configured QueryBuilder\n") + handlerContent.WriteString("func New" + data.Name + "Handler() *" + data.Name + "Handler {\n") + + // Generate allowed columns from table schema + var allowedColumns []string + for _, col := range data.TableSchema { + allowedColumns = append(allowedColumns, col.Name) } - // Add helper methods - this function now handles conditional generation internally - handlerContent.WriteString(generateHelperMethods(data)) + // Add relationship columns to allowed columns + for _, rel := range data.Relationships { + for _, col := range rel.Columns { + allowedColumns = append(allowedColumns, col.Name) + } + } + + handlerContent.WriteString(" // Initialize QueryBuilder with allowed columns list for security.\n") + handlerContent.WriteString(" queryBuilder := queryUtils.NewQueryBuilder(queryUtils.DBTypePostgreSQL).\n") + handlerContent.WriteString(" SetAllowedColumns([]string{\n") + for i, col := range allowedColumns { + if i < len(allowedColumns)-1 { + handlerContent.WriteString(" \"" + col + "\",\n") + } else { + handlerContent.WriteString(" \"" + col + "\",\n") + } + } + handlerContent.WriteString(" })\n\n") + + handlerContent.WriteString(" return &" + data.Name + "Handler{\n") + handlerContent.WriteString(" db: db,\n") + handlerContent.WriteString(" queryBuilder: queryBuilder,\n") + handlerContent.WriteString(" validator: validation.NewDynamicValidator(queryBuilder),\n") + handlerContent.WriteString(" cache: NewInMemoryCache(),\n") + handlerContent.WriteString(" }\n") + handlerContent.WriteString("}\n\n") + + // Handler endpoints + handlerContent.WriteString("// =============================================================================\n") + handlerContent.WriteString("// HANDLER ENDPOINTS\n") + handlerContent.WriteString("// =============================================================================\n\n") + + // Generate all endpoints in one file + for endpointName, endpoint := range data.Endpoints { + if *verboseFlag { + fmt.Printf(" 📍 Generating endpoint: %s\n", endpointName) + } + + // Generate endpoint based on its methods + for _, method := range endpoint.Methods { + switch strings.ToLower(method) { + case "get": + primaryKey := findPrimaryKey(data.TableSchema) + if endpoint.Path == "/:"+primaryKey { + handlerContent.WriteString(generateGetByIDMethod(data, endpoint)) + } else if endpoint.Path == "/dynamic" { + handlerContent.WriteString(generateDynamicMethod(data, endpoint)) + } else if endpoint.Path == "/search" { + handlerContent.WriteString(generateSearchMethod(data, endpoint)) + } else if endpoint.Path == "/stats" { + handlerContent.WriteString(generateStatsMethod(data, endpoint)) + } else if endpoint.Path == "/by-location" { + handlerContent.WriteString(generateByLocationMethod(data, endpoint)) + } else if endpoint.Path == "/by-age" { + handlerContent.WriteString(generateByAgeMethod(data, endpoint)) + } else { + handlerContent.WriteString(generateGetMethod(data, endpoint)) + } + case "post": + handlerContent.WriteString(generateCreateMethod(data, endpoint)) + case "put": + handlerContent.WriteString(generateUpdateMethod(data, endpoint)) + case "delete": + handlerContent.WriteString(generateDeleteMethod(data, endpoint)) + } + } + } + + // Helper methods + handlerContent.WriteString("// =============================================================================\n") + handlerContent.WriteString("// HELPER FUNCTIONS\n") + handlerContent.WriteString("// =============================================================================\n\n") + + handlerContent.WriteString(generateHelperMethodsWithCache(data)) // Write into file writeFile(filepath.Join(handlerDir, data.NameLower+".go"), handlerContent.String()) } -func generateGetMethods(data HandlerData) string { - return ` +// generateGetMethod - Template untuk GET method dengan cache +func generateGetMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder -// Get` + data.Name + ` godoc -// @Summary Get ` + data.NameLower + ` with pagination and optional aggregation -// @Description Returns a paginated list of ` + data.NamePlural + ` with optional summary statistics -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param limit query int false "Limit (max 100)" default(10) -// @Param offset query int false "Offset" default(0) -// @Param include_summary query bool false "Include aggregation summary" default(false) -// @Param status query string false "Filter by status" -// @Param search query string false "Search in multiple fields" -// @Success 200 {object} ` + data.Category + `Models.` + data.Name + `GetResponse "Success response" -// @Failure 400 {object} models.ErrorResponse "Bad request" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NamePlural + ` [get] -func (h *` + data.Name + `Handler) Get` + data.Name + `(c *gin.Context) { - // Parse pagination parameters - limit, offset, err := h.parsePaginationParams(c) - if err != nil { - h.respondError(c, "Invalid pagination parameters", err, http.StatusBadRequest) - return - } + methodContent.WriteString("// Get" + data.Name + " godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") - // Parse filter parameters - filter := h.parseFilterParams(c) - includeAggregation := c.Query("include_summary") == "true" - - // Get database connection - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - // Create context with timeout - ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) - defer cancel() - - // Execute concurrent operations - var ( - items []` + data.Category + `Models.` + data.Name + ` - total int - aggregateData *models.AggregateData - wg sync.WaitGroup - errChan = make(chan error, 3) - mu sync.Mutex - ) - - // Fetch total count - wg.Add(1) - go func() { - defer wg.Done() - if err := h.getTotalCount(ctx, dbConn, filter, &total); err != nil { - mu.Lock() - errChan <- fmt.Errorf("failed to get total count: %w", err) - mu.Unlock() - } - }() - - // Fetch main data - wg.Add(1) - go func() { - defer wg.Done() - result, err := h.fetch` + data.Name + `s(ctx, dbConn, filter, limit, offset) - mu.Lock() - if err != nil { - errChan <- fmt.Errorf("failed to fetch data: %w", err) - } else { - items = result - } - mu.Unlock() - }() - - // Fetch aggregation data if requested - if includeAggregation { - wg.Add(1) - go func() { - defer wg.Done() - result, err := h.getAggregateData(ctx, dbConn, filter) - mu.Lock() - if err != nil { - errChan <- fmt.Errorf("failed to get aggregate data: %w", err) - } else { - aggregateData = result - } - mu.Unlock() - }() - } - - // Wait for all goroutines - wg.Wait() - close(errChan) - - // Check for errors - for err := range errChan { - if err != nil { - h.logAndRespondError(c, "Data processing failed", err, http.StatusInternalServerError) - return - } - } - - // Build response - meta := h.calculateMeta(limit, offset, total) - response := ` + data.Category + `Models.` + data.Name + `GetResponse{ - Message: "Data ` + data.Category + ` berhasil diambil", - Data: items, - Meta: meta, - } - - if includeAggregation && aggregateData != nil { - response.Summary = aggregateData - } - - c.JSON(http.StatusOK, response) -} - -// Get` + data.Name + `ByID godoc -// @Summary Get ` + data.Name + ` by ID -// @Description Returns a single ` + data.NameLower + ` by ID -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param id path string true "` + data.Name + ` ID (UUID)" -// @Success 200 {object} ` + data.Category + `Models.` + data.Name + `GetByIDResponse "Success response" -// @Failure 400 {object} models.ErrorResponse "Invalid ID format" -// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NameLower + `/{id} [get] -func (h *` + data.Name + `Handler) Get` + data.Name + `ByID(c *gin.Context) { - id := c.Param("id") - - // Validate UUID format - if _, err := uuid.Parse(id); err != nil { - h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) - return - } - - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) - defer cancel() - - item, err := h.get` + data.Name + `ByID(ctx, dbConn, id) - if err != nil { - if err == sql.ErrNoRows { - h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) - } else { - h.logAndRespondError(c, "Failed to get ` + data.NameLower + `", err, http.StatusInternalServerError) - } - return - } - - response := ` + data.Category + `Models.` + data.Name + `GetByIDResponse{ - Message: "` + data.Category + ` details retrieved successfully", - Data: item, - } - - c.JSON(http.StatusOK, response) -}` -} - -func generateDynamicMethod(data HandlerData) string { - return ` - -// Get` + data.Name + `Dynamic godoc -// @Summary Get ` + data.NameLower + ` with dynamic filtering -// @Description Returns ` + data.NamePlural + ` with advanced dynamic filtering like Directus -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param fields query string false "Fields to select (e.g., fields=*.*)" -// @Param filter[column][operator] query string false "Dynamic filters (e.g., filter[name][_eq]=value)" -// @Param sort query string false "Sort fields (e.g., sort=date_created,-name)" -// @Param limit query int false "Limit" default(10) -// @Param offset query int false "Offset" default(0) -// @Success 200 {object} ` + data.Category + `Models.` + data.Name + `GetResponse "Success response" -// @Failure 400 {object} models.ErrorResponse "Bad request" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NamePlural + `/dynamic [get] -func (h *` + data.Name + `Handler) Get` + data.Name + `Dynamic(c *gin.Context) { - // Parse query parameters - parser := utils.NewQueryParser().SetLimits(10, 100) - dynamicQuery, err := parser.ParseQuery(c.Request.URL.Query()) - if err != nil { - h.respondError(c, "Invalid query parameters", err, http.StatusBadRequest) - return - } - - // Get database connection - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - // Create context with timeout - ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) - defer cancel() - - // Execute query with dynamic filtering - items, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, dynamicQuery) - if err != nil { - h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) - return - } - - // Build response - meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total) - response := ` + data.Category + `Models.` + data.Name + `GetResponse{ - Message: "Data ` + data.Category + ` berhasil diambil", - Data: items, - Meta: meta, - } - - c.JSON(http.StatusOK, response) -}` -} - -func generateSearchMethod(data HandlerData) string { - return ` - -// Get` + data.Name + `Search godoc -// @Summary Get ` + data.NameLower + ` with Search filtering -// @Description Returns ` + data.NamePlural + ` with advanced dynamic filtering like Directus -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param fields query string false "Fields to select (e.g., fields=*.*)" -// @Param filter[column][operator] query string false "Search filters (e.g., filter[name][_eq]=value)" -// @Param sort query string false "Sort fields (e.g., sort=date_created,-name)" -// @Param limit query int false "Limit" default(10) -// @Param offset query int false "Offset" default(0) -// @Success 200 {object} ` + data.Category + `Models.` + data.Name + `GetResponse "Success response" -// @Failure 400 {object} models.ErrorResponse "Bad request" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NamePlural + `/search [get] -func (h *` + data.Name + `Handler) Search` + data.Name + `Advanced(c *gin.Context) { - // Parse complex search parameters - searchQuery := c.Query("q") - if searchQuery == "" { - // If no search query provided, return all records with default sorting - query := utils.DynamicQuery{ - Fields: []string{"*"}, - Filters: []utils.FilterGroup{}, // Empty filters - fetch` + data.Name + `sDynamic will add default deleted filter - Sort: []utils.SortField{{ - Column: "date_created", - Order: "DESC", - }}, - Limit: 20, - Offset: 0, - } - - // Parse pagination if provided - if limit := c.Query("limit"); limit != "" { - if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { - query.Limit = l - } - } - - if offset := c.Query("offset"); offset != "" { - if o, err := strconv.Atoi(offset); err == nil && o >= 0 { - query.Offset = o - } - } - - // Get database connection - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) - defer cancel() - - // Execute query to get all records - ` + data.NameLower + `s, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, query) - if err != nil { - h.logAndRespondError(c, "Failed to fetch data", err, http.StatusInternalServerError) - return - } - - // Build response - meta := h.calculateMeta(query.Limit, query.Offset, total) - response := ` + data.Category + `Models.` + data.Name + `GetResponse{ - Message: "All records retrieved (no search query provided)", - Data: ` + data.NameLower + `s, - Meta: meta, - } - - c.JSON(http.StatusOK, response) - return - } - - // Build dynamic query for search - query := utils.DynamicQuery{ - Fields: []string{"*"}, - Filters: []utils.FilterGroup{{ - Filters: []utils.DynamicFilter{ - { - Column: "name", - Operator: utils.OpContains, - Value: searchQuery, - LogicOp: "OR", - }, - }, - LogicOp: "AND", - }}, - Sort: []utils.SortField{{ - Column: "date_created", - Order: "DESC", - }}, - Limit: 20, - Offset: 0, - } - - // Parse pagination if provided - if limit := c.Query("limit"); limit != "" { - if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 { - query.Limit = l - } - } - - if offset := c.Query("offset"); offset != "" { - if o, err := strconv.Atoi(offset); err == nil && o >= 0 { - query.Offset = o - } - } - - // Get database connection - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second) - defer cancel() - - // Execute search - ` + data.NameLower + `s, total, err := h.fetch` + data.Name + `sDynamic(ctx, dbConn, query) - if err != nil { - h.logAndRespondError(c, "Search failed", err, http.StatusInternalServerError) - return - } - - // Build response - meta := h.calculateMeta(query.Limit, query.Offset, total) - response := ` + data.Category + `Models.` + data.Name + `GetResponse{ - Message: fmt.Sprintf("Search results for '%s'", searchQuery), - Data: ` + data.NameLower + `s, - Meta: meta, - } - - c.JSON(http.StatusOK, response) -} -// fetch` + data.Name + `sDynamic executes dynamic query -func (h *` + data.Name + `Handler) fetch` + data.Name + `sDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]` + data.Category + `Models.` + data.Name + `, int, error) { - // Setup query builders - countBuilder := utils.NewQueryBuilder("` + data.TableName + `"). - SetColumnMapping(map[string]string{ - // Add your column mappings here - }). - SetAllowedColumns([]string{ - "id", "status", "sort", "user_created", "date_created", - "user_updated", "date_updated", "name", - // Add other allowed columns here - }) - - mainBuilder := utils.NewQueryBuilder("` + data.TableName + `"). - SetColumnMapping(map[string]string{ - // Add your column mappings here - }). - SetAllowedColumns([]string{ - "id", "status", "sort", "user_created", "date_created", - "user_updated", "date_updated", "name", - // Add other allowed columns here - }) - - // Add default filter to exclude deleted records - if len(query.Filters) > 0 { - query.Filters = append([]utils.FilterGroup{{ - Filters: []utils.DynamicFilter{{ - Column: "status", - Operator: utils.OpNotEqual, - Value: "deleted", - }}, - LogicOp: "AND", - }}, query.Filters...) - } else { - query.Filters = []utils.FilterGroup{{ - Filters: []utils.DynamicFilter{{ - Column: "status", - Operator: utils.OpNotEqual, - Value: "deleted", - }}, - LogicOp: "AND", - }} + // Add parameters based on endpoint configuration + if endpoint.HasPagination { + methodContent.WriteString("// @Param limit query int false \"Limit (max 100)\" default(10)\n") + methodContent.WriteString("// @Param offset query int false \"Offset\" default(0)\n") } - // Execute queries sequentially - var total int - var items []` + data.Category + `Models.` + data.Name + ` - - // 1. Get total count - countQuery := query - countQuery.Limit = 0 - countQuery.Offset = 0 - - countSQL, countArgs, err := countBuilder.BuildCountQuery(countQuery) - if err != nil { - return nil, 0, fmt.Errorf("failed to build count query: %w", err) + if endpoint.HasFilter { + statusColumn := findStatusColumn(data.TableSchema) + methodContent.WriteString("// @Param " + statusColumn + " query string false \"Filter by status\"\n") } - if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { - return nil, 0, fmt.Errorf("failed to get total count: %w", err) + if endpoint.HasSearch { + methodContent.WriteString("// @Param search query string false \"Search in multiple fields\"\n") } - // 2. Get main data - mainSQL, mainArgs, err := mainBuilder.BuildQuery(query) - if err != nil { - return nil, 0, fmt.Errorf("failed to build main query: %w", err) + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Success response\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Get" + data.Name + "(c *gin.Context) {\n") + methodContent.WriteString(" // Increase timeout for complex queries\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 120*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + // Get the fields for this endpoint from the configuration + fields := getFieldsForEndpoint(data, endpoint.Fields) + + methodContent.WriteString(" // Use the core fetch" + data.Name + "sDynamic function for all data retrieval logic.\n") + methodContent.WriteString(" query := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" From: \"" + data.TableName + "\",\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{\n") + + // Generate select fields based on the endpoint configuration + for _, field := range fields { + methodContent.WriteString(" {Expression: \"" + field + "\"},\n") } - rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) - if err != nil { - return nil, 0, fmt.Errorf("failed to execute main query: %w", err) - } - defer rows.Close() - - for rows.Next() { - item, err := h.scan` + data.Name + `(rows) - if err != nil { - return nil, 0, fmt.Errorf("failed to scan ` + data.NameLower + `: %w", err) - } - items = append(items, item) - } - - if err := rows.Err(); err != nil { - return nil, 0, fmt.Errorf("rows iteration error: %w", err) - } - - return items, total, nil -} -` -} - -func generateCreateMethod(data HandlerData) string { - return ` - -// Create` + data.Name + ` godoc -// @Summary Create ` + data.NameLower + ` -// @Description Creates a new ` + data.NameLower + ` record -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param request body ` + data.Category + `Models.` + data.Name + `CreateRequest true "` + data.Name + ` creation request" -// @Success 201 {object} ` + data.Category + `Models.` + data.Name + `CreateResponse "` + data.Name + ` created successfully" -// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NamePlural + ` [post] -func (h *` + data.Name + `Handler) Create` + data.Name + `(c *gin.Context) { - var req ` + data.Category + `Models.` + data.Name + `CreateRequest - if err := c.ShouldBindJSON(&req); err != nil { - h.respondError(c, "Invalid request body", err, http.StatusBadRequest) - return - } - - // Validate request - if err := ` + data.NameLower + `validate.Struct(&req); err != nil { - h.respondError(c, "Validation failed", err, http.StatusBadRequest) - return - } - - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) - defer cancel() - - // Validate duplicate and daily submission - if err := h.validate` + data.Name + `Submission(ctx, dbConn, &req); err != nil { - h.respondError(c, "Validation failed", err, http.StatusBadRequest) - return - } - - item, err := h.create` + data.Name + `(ctx, dbConn, &req) - if err != nil { - h.logAndRespondError(c, "Failed to create ` + data.NameLower + `", err, http.StatusInternalServerError) - return - } - - response := ` + data.Category + `Models.` + data.Name + `CreateResponse{ - Message: "` + data.Name + ` berhasil dibuat", - Data: item, - } - - c.JSON(http.StatusCreated, response) -}` -} - -func generateUpdateMethod(data HandlerData) string { - return ` - -// Update` + data.Name + ` godoc -// @Summary Update ` + data.NameLower + ` -// @Description Updates an existing ` + data.NameLower + ` record -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param id path string true "` + data.Name + ` ID (UUID)" -// @Param request body ` + data.Category + `Models.` + data.Name + `UpdateRequest true "` + data.Name + ` update request" -// @Success 200 {object} ` + data.Category + `Models.` + data.Name + `UpdateResponse "` + data.Name + ` updated successfully" -// @Failure 400 {object} models.ErrorResponse "Bad request or validation error" -// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NameLower + `/{id} [put] -func (h *` + data.Name + `Handler) Update` + data.Name + `(c *gin.Context) { - id := c.Param("id") - - // Validate UUID format - if _, err := uuid.Parse(id); err != nil { - h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) - return - } - - var req ` + data.Category + `Models.` + data.Name + `UpdateRequest - if err := c.ShouldBindJSON(&req); err != nil { - h.respondError(c, "Invalid request body", err, http.StatusBadRequest) - return - } - - // Set ID from path parameter - req.ID = id - - // Validate request - if err := ` + data.NameLower + `validate.Struct(&req); err != nil { - h.respondError(c, "Validation failed", err, http.StatusBadRequest) - return - } - - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) - defer cancel() - - item, err := h.update` + data.Name + `(ctx, dbConn, &req) - if err != nil { - if err == sql.ErrNoRows { - h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) - } else { - h.logAndRespondError(c, "Failed to update ` + data.NameLower + `", err, http.StatusInternalServerError) - } - return - } - - response := ` + data.Category + `Models.` + data.Name + `UpdateResponse{ - Message: "` + data.Name + ` berhasil diperbarui", - Data: item, - } - - c.JSON(http.StatusOK, response) -}` -} - -func generateDeleteMethod(data HandlerData) string { - return ` - -// Delete` + data.Name + ` godoc -// @Summary Delete ` + data.NameLower + ` -// @Description Soft deletes a ` + data.NameLower + ` by setting status to 'deleted' -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param id path string true "` + data.Name + ` ID (UUID)" -// @Success 200 {object} ` + data.Category + `Models.` + data.Name + `DeleteResponse "` + data.Name + ` deleted successfully" -// @Failure 400 {object} models.ErrorResponse "Invalid ID format" -// @Failure 404 {object} models.ErrorResponse "` + data.Name + ` not found" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NameLower + `/{id} [delete] -func (h *` + data.Name + `Handler) Delete` + data.Name + `(c *gin.Context) { - id := c.Param("id") - - // Validate UUID format - if _, err := uuid.Parse(id); err != nil { - h.respondError(c, "Invalid ID format", err, http.StatusBadRequest) - return - } - - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) - defer cancel() - - err = h.delete` + data.Name + `(ctx, dbConn, id) - if err != nil { - if err == sql.ErrNoRows { - h.respondError(c, "` + data.Name + ` not found", err, http.StatusNotFound) - } else { - h.logAndRespondError(c, "Failed to delete ` + data.NameLower + `", err, http.StatusInternalServerError) - } - return - } - - response := ` + data.Category + `Models.` + data.Name + `DeleteResponse{ - Message: "` + data.Name + ` berhasil dihapus", - ID: id, - } - - c.JSON(http.StatusOK, response) -}` -} - -func generateStatsMethod(data HandlerData) string { - return ` - -// Get` + data.Name + `Stats godoc -// @Summary Get ` + data.NameLower + ` statistics -// @Description Returns comprehensive statistics about ` + data.NameLower + ` data -// @Tags ` + data.Name + ` -// @Accept json -// @Produce json -// @Param status query string false "Filter statistics by status" -// @Success 200 {object} models.AggregateData "Statistics data" -// @Failure 500 {object} models.ErrorResponse "Internal server error" -// @Router /api/v1/` + data.NamePlural + `/stats [get] -func (h *` + data.Name + `Handler) Get` + data.Name + `Stats(c *gin.Context) { - dbConn, err := h.db.GetDB("postgres_satudata") - if err != nil { - h.logAndRespondError(c, "Database connection failed", err, http.StatusInternalServerError) - return - } - - ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second) - defer cancel() - - filter := h.parseFilterParams(c) - aggregateData, err := h.getAggregateData(ctx, dbConn, filter) - if err != nil { - h.logAndRespondError(c, "Failed to get statistics", err, http.StatusInternalServerError) - return - } - - c.JSON(http.StatusOK, gin.H{ - "message": "Statistik ` + data.NameLower + ` berhasil diambil", - "data": aggregateData, - }) -}` -} - -func generateHelperMethods(data HandlerData) string { - var helperMethods strings.Builder - - // Helper methods yang selalu dibutuhkan untuk semua handlers - helperMethods.WriteString(` - -// Optimized scanning function - selalu dibutuhkan untuk semua operasi database -func (h *` + data.Name + `Handler) scan` + data.Name + `(rows *sql.Rows) (` + data.Category + `Models.` + data.Name + `, error) { - var item ` + data.Category + `Models.` + data.Name + ` - - // Scan into individual fields to handle nullable types properly - err := rows.Scan( - &item.ID, - &item.Status, - &item.Sort.Int32, &item.Sort.Valid, // models.NullableInt32 - &item.UserCreated.String, &item.UserCreated.Valid, // sql.NullString - &item.DateCreated.Time, &item.DateCreated.Valid, // sql.NullTime - &item.UserUpdated.String, &item.UserUpdated.Valid, // sql.NullString - &item.DateUpdated.Time, &item.DateUpdated.Valid, // sql.NullTime - &item.Name.String, &item.Name.Valid, // sql.NullString - ) - - return item, err -} - -// Enhanced error handling - selalu dibutuhkan untuk semua handlers -func (h *` + data.Name + `Handler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) { - log.Printf("[ERROR] %s: %v", message, err) - h.respondError(c, message, err, statusCode) -} - -func (h *` + data.Name + `Handler) respondError(c *gin.Context, message string, err error, statusCode int) { - errorMessage := message - if gin.Mode() == gin.ReleaseMode { - errorMessage = "Internal server error" - } - - c.JSON(statusCode, models.ErrorResponse{ - Error: errorMessage, - Code: statusCode, - Message: err.Error(), - Timestamp: time.Now(), - }) -} -`) - - // Helper methods untuk GET operations - if data.HasGet { - helperMethods.WriteString(` - -// Database operations untuk GET by ID -func (h *` + data.Name + `Handler) get` + data.Name + `ByID(ctx context.Context, dbConn *sql.DB, id string) (*` + data.Category + `Models.` + data.Name + `, error) { - query := "SELECT id, status, sort, user_created, date_created, user_updated, date_updated, name FROM ` + data.TableName + ` WHERE id = $1 AND status != 'deleted'" - row := dbConn.QueryRowContext(ctx, query, id) - - var item ` + data.Category + `Models.` + data.Name + ` - err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) - if err != nil { - return nil, err - } - - return &item, nil -} -`) - - // Helper untuk fetch dengan pagination/filter - helperMethods.WriteString(` - -func (h *` + data.Name + `Handler) fetch` + data.Name + `s(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `Models.` + data.Name + `Filter, limit, offset int) ([]` + data.Category + `Models.` + data.Name + `, error) { - whereClause, args := h.buildWhereClause(filter) - query := fmt.Sprintf("SELECT id, status, sort, user_created, date_created, user_updated, date_updated, name FROM ` + data.TableName + ` WHERE %s ORDER BY date_created DESC NULLS LAST LIMIT $%d OFFSET $%d", whereClause, len(args)+1, len(args)+2) - args = append(args, limit, offset) - - rows, err := dbConn.QueryContext(ctx, query, args...) - if err != nil { - return nil, fmt.Errorf("fetch ` + data.NamePlural + ` query failed: %w", err) - } - defer rows.Close() - - items := make([]` + data.Category + `Models.` + data.Name + `, 0, limit) - for rows.Next() { - item, err := h.scan` + data.Name + `(rows) - if err != nil { - return nil, fmt.Errorf("scan ` + data.Name + ` failed: %w", err) - } - items = append(items, item) - } - - if err := rows.Err(); err != nil { - return nil, fmt.Errorf("rows iteration error: %w", err) - } - - if *verboseFlag { - log.Printf("Successfully fetched %d ` + data.NamePlural + ` with filters applied", len(items)) - } - return items, nil -} -`) - - // Helper untuk pagination - helperMethods.WriteString(` - -// Parse pagination parameters dengan validation yang lebih ketat -func (h *` + data.Name + `Handler) parsePaginationParams(c *gin.Context) (int, int, error) { - limit := 10 // Default limit - offset := 0 // Default offset - - if limitStr := c.Query("limit"); limitStr != "" { - parsedLimit, err := strconv.Atoi(limitStr) - if err != nil { - return 0, 0, fmt.Errorf("invalid limit parameter: %s", limitStr) - } - if parsedLimit <= 0 { - return 0, 0, fmt.Errorf("limit must be greater than 0") - } - if parsedLimit > 100 { - return 0, 0, fmt.Errorf("limit cannot exceed 100") - } - limit = parsedLimit - } - - if offsetStr := c.Query("offset"); offsetStr != "" { - parsedOffset, err := strconv.Atoi(offsetStr) - if err != nil { - return 0, 0, fmt.Errorf("invalid offset parameter: %s", offsetStr) - } - if parsedOffset < 0 { - return 0, 0, fmt.Errorf("offset cannot be negative") - } - offset = parsedOffset - } - - if *verboseFlag { - log.Printf("Pagination - Limit: %d, Offset: %d", limit, offset) - } - return limit, offset, nil -} -`) - - // Helper untuk filter (jika ada filter atau search) - if data.HasFilter || data.HasSearch { - helperMethods.WriteString(` - -func (h *` + data.Name + `Handler) parseFilterParams(c *gin.Context) ` + data.Category + `Models.` + data.Name + `Filter { - filter := ` + data.Category + `Models.` + data.Name + `Filter{} - - if status := c.Query("status"); status != "" { - if models.IsValidStatus(status) { - filter.Status = &status - } - } - - if search := c.Query("search"); search != "" { - filter.Search = &search - } - - // Parse date filters - if dateFromStr := c.Query("date_from"); dateFromStr != "" { - if dateFrom, err := time.Parse("2006-01-02", dateFromStr); err == nil { - filter.DateFrom = &dateFrom - } - } - - if dateToStr := c.Query("date_to"); dateToStr != "" { - if dateTo, err := time.Parse("2006-01-02", dateToStr); err == nil { - filter.DateTo = &dateTo - } - } - - return filter -} - -// Build WHERE clause dengan filter parameters -func (h *` + data.Name + `Handler) buildWhereClause(filter ` + data.Category + `Models.` + data.Name + `Filter) (string, []interface{}) { - conditions := []string{"status != 'deleted'"} - args := []interface{}{} - paramCount := 1 - - if filter.Status != nil { - conditions = append(conditions, fmt.Sprintf("status = $%d", paramCount)) - args = append(args, *filter.Status) - paramCount++ - } - - if filter.Search != nil { - searchCondition := fmt.Sprintf("name ILIKE $%d", paramCount) - conditions = append(conditions, searchCondition) - searchTerm := "%" + *filter.Search + "%" - args = append(args, searchTerm) - paramCount++ - } - - if filter.DateFrom != nil { - conditions = append(conditions, fmt.Sprintf("date_created >= $%d", paramCount)) - args = append(args, *filter.DateFrom) - paramCount++ - } - - if filter.DateTo != nil { - conditions = append(conditions, fmt.Sprintf("date_created <= $%d", paramCount)) - args = append(args, filter.DateTo.Add(24*time.Hour-time.Nanosecond)) - paramCount++ - } - - return strings.Join(conditions, " AND "), args -} -`) - } - - // Helper untuk pagination meta - helperMethods.WriteString(` - -func (h *` + data.Name + `Handler) calculateMeta(limit, offset, total int) models.MetaResponse { - totalPages := 0 - currentPage := 1 - if limit > 0 { - totalPages = (total + limit - 1) / limit // Ceiling division - currentPage = (offset / limit) + 1 - } - - return models.MetaResponse{ - Limit: limit, - Offset: offset, - Total: total, - TotalPages: totalPages, - CurrentPage: currentPage, - HasNext: offset+limit < total, - HasPrev: offset > 0, - } -} -`) - - // Helper untuk total count (dibutuhkan untuk pagination dan stats) - if data.HasPagination || data.HasStats { - helperMethods.WriteString(` - -func (h *` + data.Name + `Handler) getTotalCount(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `Models.` + data.Name + `Filter, total *int) error { - whereClause, args := h.buildWhereClause(filter) - countQuery := fmt.Sprintf("SELECT COUNT(*) FROM ` + data.TableName + ` WHERE %s", whereClause) - if err := dbConn.QueryRowContext(ctx, countQuery, args...).Scan(total); err != nil { - return fmt.Errorf("total count query failed: %w", err) - } - return nil -} -`) - } - - // Helper untuk aggregate data (stats) - if data.HasStats { - helperMethods.WriteString(` - -// Get comprehensive aggregate data dengan filter support -func (h *` + data.Name + `Handler) getAggregateData(ctx context.Context, dbConn *sql.DB, filter ` + data.Category + `Models.` + data.Name + `Filter) (*models.AggregateData, error) { - aggregate := &models.AggregateData{ - ByStatus: make(map[string]int), - } - - // Build where clause untuk filter - whereClause, args := h.buildWhereClause(filter) - - // Use concurrent execution untuk performance - var wg sync.WaitGroup - var mu sync.Mutex - errChan := make(chan error, 4) - - // 1. Count by status - wg.Add(1) - go func() { - defer wg.Done() - statusQuery := fmt.Sprintf("SELECT status, COUNT(*) FROM ` + data.TableName + ` WHERE %s GROUP BY status ORDER BY status", whereClause) - - rows, err := dbConn.QueryContext(ctx, statusQuery, args...) - if err != nil { - errChan <- fmt.Errorf("status query failed: %w", err) - return - } - defer rows.Close() - - mu.Lock() - for rows.Next() { - var status string - var count int - if err := rows.Scan(&status, &count); err != nil { - mu.Unlock() - errChan <- fmt.Errorf("status scan failed: %w", err) - return - } - aggregate.ByStatus[status] = count - switch status { - case "active": - aggregate.TotalActive = count - case "draft": - aggregate.TotalDraft = count - case "inactive": - aggregate.TotalInactive = count - } - } - mu.Unlock() - - if err := rows.Err(); err != nil { - errChan <- fmt.Errorf("status iteration error: %w", err) - } - }() - - // 2. Get last updated time dan today statistics - wg.Add(1) - go func() { - defer wg.Done() - - // Last updated - lastUpdatedQuery := fmt.Sprintf("SELECT MAX(date_updated) FROM ` + data.TableName + ` WHERE %s AND date_updated IS NOT NULL", whereClause) - var lastUpdated sql.NullTime - if err := dbConn.QueryRowContext(ctx, lastUpdatedQuery, args...).Scan(&lastUpdated); err != nil { - errChan <- fmt.Errorf("last updated query failed: %w", err) - return - } - - // Today statistics - today := time.Now().Format("2006-01-02") - todayStatsQuery := fmt.Sprintf(` + "`" + ` - SELECT - SUM(CASE WHEN DATE(date_created) = $%d THEN 1 ELSE 0 END) as created_today, - SUM(CASE WHEN DATE(date_updated) = $%d AND DATE(date_created) != $%d THEN 1 ELSE 0 END) as updated_today - FROM ` + data.TableName + ` - WHERE %s` + "`" + `, len(args)+1, len(args)+1, len(args)+1, whereClause) - - todayArgs := append(args, today) - var createdToday, updatedToday int - if err := dbConn.QueryRowContext(ctx, todayStatsQuery, todayArgs...).Scan(&createdToday, &updatedToday); err != nil { - errChan <- fmt.Errorf("today stats query failed: %w", err) - return - } - - mu.Lock() - if lastUpdated.Valid { - aggregate.LastUpdated = &lastUpdated.Time - } - aggregate.CreatedToday = createdToday - aggregate.UpdatedToday = updatedToday - mu.Unlock() - }() - - // Wait for all goroutines - wg.Wait() - close(errChan) - - // Check for errors - for err := range errChan { - if err != nil { - return nil, err - } - } - - return aggregate, nil -} -`) - } - } - - // Helper methods untuk POST operations - if data.HasPost { - helperMethods.WriteString(` - -// Database operations untuk CREATE -func (h *` + data.Name + `Handler) create` + data.Name + `(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `Models.` + data.Name + `CreateRequest) (*` + data.Category + `Models.` + data.Name + `, error) { - id := uuid.New().String() - now := time.Now() - - query := "INSERT INTO ` + data.TableName + ` (id, status, date_created, date_updated, name) VALUES ($1, $2, $3, $4, $5) RETURNING id, status, sort, user_created, date_created, user_updated, date_updated, name" - row := dbConn.QueryRowContext(ctx, query, id, req.Status, now, now, req.Name) - - var item ` + data.Category + `Models.` + data.Name + ` - err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) - if err != nil { - return nil, fmt.Errorf("failed to create ` + data.NameLower + `: %w", err) - } - - return &item, nil -} - -// validate` + data.Name + `Submission performs validation for duplicate entries and daily submission limits -func (h *` + data.Name + `Handler) validate` + data.Name + `Submission(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `Models.` + data.Name + `CreateRequest) error { - // Import the validation utility - validator := validation.NewDuplicateValidator(dbConn) - - // Use default configuration - config := validation.ValidationConfig{ - TableName: "` + data.TableName + `", - IDColumn: "id", - StatusColumn: "status", - DateColumn: "date_created", - ActiveStatuses: []string{"active", "draft"}, - } - - // Validate duplicate entries with active status for today - err := validator.ValidateDuplicate(ctx, config, "dummy_id") - if err != nil { - return fmt.Errorf("validation failed: %w", err) - } - - // Validate once per day submission - err = validator.ValidateOncePerDay(ctx, "` + data.TableName + `", "id", "date_created", "daily_limit") - if err != nil { - return fmt.Errorf("daily submission limit exceeded: %w", err) - } - - return nil -} - -// Example usage of the validation utility with custom configuration -func (h *` + data.Name + `Handler) validateWithCustomConfig(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `Models.` + data.Name + `CreateRequest) error { - // Create validator instance - validator := validation.NewDuplicateValidator(dbConn) - - // Use custom configuration - config := validation.ValidationConfig{ - TableName: "` + data.TableName + `", - IDColumn: "id", - StatusColumn: "status", - DateColumn: "date_created", - ActiveStatuses: []string{"active", "draft"}, - AdditionalFields: map[string]interface{}{ - "name": req.Name, - }, - } - - // Validate with custom fields - fields := map[string]interface{}{ - "name": *req.Name, - } - - err := validator.ValidateDuplicateWithCustomFields(ctx, config, fields) - if err != nil { - return fmt.Errorf("custom validation failed: %w", err) - } - - return nil -} - -// GetLastSubmissionTime example -func (h *` + data.Name + `Handler) getLastSubmissionTimeExample(ctx context.Context, dbConn *sql.DB, identifier string) (*time.Time, error) { - validator := validation.NewDuplicateValidator(dbConn) - return validator.GetLastSubmissionTime(ctx, "` + data.TableName + `", "id", "date_created", identifier) -} -`) - } - - // Helper methods untuk PUT operations - if data.HasPut { - helperMethods.WriteString(` - -// Database operations untuk UPDATE -func (h *` + data.Name + `Handler) update` + data.Name + `(ctx context.Context, dbConn *sql.DB, req *` + data.Category + `Models.` + data.Name + `UpdateRequest) (*` + data.Category + `Models.` + data.Name + `, error) { - now := time.Now() - - query := "UPDATE ` + data.TableName + ` SET status = $2, date_updated = $3, name = $4 WHERE id = $1 AND status != 'deleted' RETURNING id, status, sort, user_created, date_created, user_updated, date_updated, name" - row := dbConn.QueryRowContext(ctx, query, req.ID, req.Status, now, req.Name) - - var item ` + data.Category + `Models.` + data.Name + ` - err := row.Scan(&item.ID, &item.Status, &item.Sort, &item.UserCreated, &item.DateCreated, &item.UserUpdated, &item.DateUpdated, &item.Name) - if err != nil { - return nil, fmt.Errorf("failed to update ` + data.NameLower + `: %w", err) - } - - return &item, nil -} -`) - } - - // Helper methods untuk DELETE operations - if data.HasDelete { - helperMethods.WriteString(` - -// Database operations untuk DELETE -func (h *` + data.Name + `Handler) delete` + data.Name + `(ctx context.Context, dbConn *sql.DB, id string) error { - now := time.Now() - query := "UPDATE ` + data.TableName + ` SET status = 'deleted', date_updated = $2 WHERE id = $1 AND status != 'deleted'" - - result, err := dbConn.ExecContext(ctx, query, id, now) - if err != nil { - return fmt.Errorf("failed to delete ` + data.NameLower + `: %w", err) - } - - rowsAffected, err := result.RowsAffected() - if err != nil { - return fmt.Errorf("failed to get affected rows: %w", err) - } - - if rowsAffected == 0 { - return sql.ErrNoRows - } - - return nil -} -`) - } - - // Helper methods untuk DYNAMIC operations - if data.HasDynamic { - helperMethods.WriteString(` - // fetch` + data.Name + `sDynamic executes dynamic query - func (h *` + data.Name + `Handler) fetch` + data.Name + `sDynamic(ctx context.Context, dbConn *sql.DB, query utils.DynamicQuery) ([]` + data.Category + `Models.` + data.Name + `, int, error) { - // Setup query builders - countBuilder := utils.NewQueryBuilder("` + data.TableName + `"). - SetColumnMapping(map[string]string{ - // Add your column mappings here - }). - SetAllowedColumns([]string{ - "id", "status", "sort", "user_created", "date_created", - "user_updated", "date_updated", "name", - // Add other allowed columns here - }) - - mainBuilder := utils.NewQueryBuilder("` + data.TableName + `"). - SetColumnMapping(map[string]string{ - // Add your column mappings here - }). - SetAllowedColumns([]string{ - "id", "status", "sort", "user_created", "date_created", - "user_updated", "date_updated", "name", - // Add other allowed columns here - }) - - // Add default filter to exclude deleted records - if len(query.Filters) > 0 { - query.Filters = append([]utils.FilterGroup{{ - Filters: []utils.DynamicFilter{{ - Column: "status", - Operator: utils.OpNotEqual, - Value: "deleted", - }}, - LogicOp: "AND", - }}, query.Filters...) - } else { - query.Filters = []utils.FilterGroup{{ - Filters: []utils.DynamicFilter{{ - Column: "status", - Operator: utils.OpNotEqual, - Value: "deleted", - }}, - LogicOp: "AND", - }} - } - - // Execute queries sequentially - var total int - var items []` + data.Category + `Models.` + data.Name + ` - - // 1. Get total count - countQuery := query - countQuery.Limit = 0 - countQuery.Offset = 0 - - countSQL, countArgs, err := countBuilder.BuildCountQuery(countQuery) - if err != nil { - return nil, 0, fmt.Errorf("failed to build count query: %w", err) - } - - if err := dbConn.QueryRowContext(ctx, countSQL, countArgs...).Scan(&total); err != nil { - return nil, 0, fmt.Errorf("failed to get total count: %w", err) - } - - // 2. Get main data - mainSQL, mainArgs, err := mainBuilder.BuildQuery(query) - if err != nil { - return nil, 0, fmt.Errorf("failed to build main query: %w", err) - } - - rows, err := dbConn.QueryContext(ctx, mainSQL, mainArgs...) - if err != nil { - return nil, 0, fmt.Errorf("failed to execute main query: %w", err) - } - defer rows.Close() - - for rows.Next() { - item, err := h.scan` + data.Name + `(rows) - if err != nil { - return nil, 0, fmt.Errorf("failed to scan ` + data.NameLower + `: %w", err) + methodContent.WriteString(" },\n") + methodContent.WriteString(" Sort: []queryUtils.SortField{{Column: \"date_created\", Order: \"DESC\"}},\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist and fields include relationship columns + if len(data.Relationships) > 0 && hasRelationshipFields(fields, data.Relationships) { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" query.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + // Check if any field from this relationship is included + if hasRelationshipField(fields, rel) { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") } - items = append(items, item) } - if err := rows.Err(); err != nil { - return nil, 0, fmt.Errorf("rows iteration error: %w", err) + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Parse pagination\n") + methodContent.WriteString(" if limit, err := strconv.Atoi(c.DefaultQuery(\"limit\", \"10\")); err == nil && limit > 0 && limit <= 100 {\n") + methodContent.WriteString(" query.Limit = limit\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" if offset, err := strconv.Atoi(c.DefaultQuery(\"offset\", \"0\")); err == nil && offset >= 0 {\n") + methodContent.WriteString(" query.Offset = offset\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Parse simple filters\n") + methodContent.WriteString(" var filters []queryUtils.DynamicFilter\n") + + // Add status filter if status column exists + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString(" if " + statusColumn + " := c.Query(\"" + statusColumn + "\"); " + statusColumn + " != \"\" && models.IsValidStatus(" + statusColumn + ") {\n") + methodContent.WriteString(" filters = append(filters, queryUtils.DynamicFilter{Column: \"" + statusColumn + "\", Operator: queryUtils.OpEqual, Value: " + statusColumn + "})\n") + methodContent.WriteString(" }\n") + } + + methodContent.WriteString(" \n") + methodContent.WriteString(" // Optimize query search with caching\n") + methodContent.WriteString(" search := c.Query(\"search\")\n") + methodContent.WriteString(" var searchFilters []queryUtils.DynamicFilter\n") + methodContent.WriteString(" var cacheKey string\n") + methodContent.WriteString(" var useCache bool\n\n") + + methodContent.WriteString(" // Initialize searchFilters before using it in the cache hit section\n") + methodContent.WriteString(" if search != \"\" {\n") + methodContent.WriteString(" // Limit search length to prevent slow queries\n") + methodContent.WriteString(" if len(search) > 50 {\n") + methodContent.WriteString(" search = search[:50]\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Generate cache key for search\n") + methodContent.WriteString(" cacheKey = fmt.Sprintf(\"" + data.NameLower + ":search:%s:%d:%d\", search, query.Limit, query.Offset)\n\n") + + methodContent.WriteString(" // Initialize searchFilters here\n") + methodContent.WriteString(" searchFilters = []queryUtils.DynamicFilter{\n") + + // Add searchable columns based on table schema + searchableColumns := findSearchableColumns(data.TableSchema) + for _, col := range searchableColumns { + methodContent.WriteString(" {Column: \"" + col + "\", Operator: queryUtils.OpILike, Value: \"%\" + search + \"%\"},\n") + } + + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Try to get from cache first\n") + methodContent.WriteString(" if cachedData, found := h.cache.Get(cacheKey); found {\n") + methodContent.WriteString(" logger.Info(\"Cache hit for search\", map[string]interface{}{\"search\": search, \"cache_key\": cacheKey})\n\n") + + methodContent.WriteString(" // Convert from interface{} to expected type\n") + methodContent.WriteString(" " + data.NamePlural + ", ok := cachedData.([]" + data.Category + "Models." + data.Name + ")\n") + methodContent.WriteString(" if !ok {\n") + methodContent.WriteString(" logger.Error(\"Failed to convert cached data\", map[string]interface{}{\"cache_key\": cacheKey})\n") + methodContent.WriteString(" } else {\n") + methodContent.WriteString(" // If requested, get aggregation data\n") + methodContent.WriteString(" var aggregateData *models.AggregateData\n") + methodContent.WriteString(" if c.Query(\"include_summary\") == \"true\" {\n") + methodContent.WriteString(" // Build full filter groups for aggregate data (including search filters)\n") + methodContent.WriteString(" fullFilterGroups := []queryUtils.FilterGroup{\n") + methodContent.WriteString(" {Filters: searchFilters, LogicOp: \"OR\"},\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" if len(filters) > 0 {\n") + methodContent.WriteString(" fullFilterGroups = append(fullFilterGroups, queryUtils.FilterGroup{Filters: filters, LogicOp: \"AND\"})\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" aggregateData, err = h.getAggregateData(ctx, dbConn, fullFilterGroups)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to get aggregate data\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Build response\n") + methodContent.WriteString(" meta := h.calculateMeta(query.Limit, query.Offset, len(" + data.NamePlural + "))\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"Data " + data.NameLower + " berhasil diambil (dari cache)\",\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if aggregateData != nil {\n") + methodContent.WriteString(" response.Summary = aggregateData\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // If not in cache, mark for saving after query\n") + methodContent.WriteString(" useCache = true\n\n") + + methodContent.WriteString(" // If there's search, create OR filter group\n") + methodContent.WriteString(" query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: searchFilters, LogicOp: \"OR\"})\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Add other filters (if any) as AND group\n") + methodContent.WriteString(" if len(filters) > 0 {\n") + methodContent.WriteString(" query.Filters = append(query.Filters, queryUtils.FilterGroup{Filters: filters, LogicOp: \"AND\"})\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" " + data.NamePlural + ", total, err := h.fetch" + data.Name + "sDynamic(ctx, dbConn, query)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to fetch data\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Save search results to cache if there's a search parameter\n") + methodContent.WriteString(" if useCache && len(" + data.NamePlural + ") > 0 {\n") + methodContent.WriteString(" h.cache.Set(cacheKey, " + data.NamePlural + ", 15*time.Minute) // Cache for 15 minutes\n") + methodContent.WriteString(" logger.Info(\"Cached search results\", map[string]interface{}{\"search\": search, \"cache_key\": cacheKey, \"count\": len(" + data.NamePlural + ")})\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // If requested, get aggregation data\n") + methodContent.WriteString(" var aggregateData *models.AggregateData\n") + methodContent.WriteString(" if c.Query(\"include_summary\") == \"true\" {\n") + methodContent.WriteString(" aggregateData, err = h.getAggregateData(ctx, dbConn, query.Filters)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to get aggregate data\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Build response\n") + methodContent.WriteString(" meta := h.calculateMeta(query.Limit, query.Offset, total)\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"Data " + data.NameLower + " berhasil diambil\",\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if aggregateData != nil {\n") + methodContent.WriteString(" response.Summary = aggregateData\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateGetByIDMethod - Template untuk GET by ID method dengan cache +func generateGetByIDMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" // Default fallback + } + + methodContent.WriteString("// Get" + data.Name + "By" + snakeToPascal(primaryKey) + " godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param " + primaryKey + " path string true \"" + data.Name + " " + strings.ToUpper(primaryKey) + "\"\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Success response\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Invalid ID format\"\n") + methodContent.WriteString("// @Failure 404 {object} models.ErrorResponse \"" + data.Name + " not found\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Get" + data.Name + "By" + snakeToPascal(primaryKey) + "(c *gin.Context) {\n") + methodContent.WriteString(" " + primaryKey + " := c.Param(\"" + primaryKey + "\")\n") + methodContent.WriteString(" if " + primaryKey + " == \"\" {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid " + strings.ToUpper(primaryKey) + " format\", fmt.Errorf(\"" + primaryKey + " cannot be empty\"), http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Try to get from cache first\n") + methodContent.WriteString(" cacheKey := fmt.Sprintf(\"" + data.NameLower + ":" + primaryKey + ":%s\", " + primaryKey + ")\n") + methodContent.WriteString(" if cachedData, found := h.cache.Get(cacheKey); found {\n") + methodContent.WriteString(" logger.Info(\"Cache hit for " + primaryKey + "\", map[string]interface{}{\"" + primaryKey + "\": " + primaryKey + ", \"cache_key\": cacheKey})\n\n") + + methodContent.WriteString(" // Convert from interface{} to expected type\n") + methodContent.WriteString(" if cached" + data.Name + ", ok := cachedData.(" + data.Category + "Models." + data.Name + "); ok {\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"" + data.Name + " details retrieved successfully (dari cache)\",\n") + methodContent.WriteString(" Data: &cached" + data.Name + ",\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + // Get the fields for this endpoint from the configuration + fields := getFieldsForEndpoint(data, endpoint.Fields) + + methodContent.WriteString(" dynamicQuery := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" From: \"" + data.TableName + "\",\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{\n") + + // Generate select fields based on the endpoint configuration + for _, field := range fields { + methodContent.WriteString(" {Expression: \"" + field + "\"},\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" Filters: []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + primaryKey + "\", Operator: queryUtils.OpEqual, Value: " + primaryKey + "},\n") + + // Add status filter if status column exists + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString(" {Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"},\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" Limit: 1,\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist and fields include relationship columns + if len(data.Relationships) > 0 && hasRelationshipFields(fields, data.Relationships) { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" dynamicQuery.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + // Check if any field from this relationship is included + if hasRelationshipField(fields, rel) { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } } - return items, total, nil + methodContent.WriteString(" }\n\n") } - `) + + methodContent.WriteString(" var data" + data.Name + " " + data.Category + "Models." + data.Name + "\n") + methodContent.WriteString(" err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &data" + data.Name + ")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" if err == sql.ErrNoRows {\n") + methodContent.WriteString(" h.respondError(c, \"" + data.Name + " not found\", err, http.StatusNotFound)\n") + methodContent.WriteString(" } else {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to get " + data.NameLower + "\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Save to cache\n") + methodContent.WriteString(" h.cache.Set(cacheKey, data" + data.Name + ", 30*time.Minute) // Cache for 30 minutes\n\n") + + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"" + data.Name + " details retrieved successfully\",\n") + methodContent.WriteString(" Data: &data" + data.Name + ",\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateDynamicMethod - Template untuk dynamic method dengan cache +func generateDynamicMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + + methodContent.WriteString("// Get" + data.Name + "Dynamic godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param fields query string false \"Fields to select (e.g., fields=*.*)\"\n") + methodContent.WriteString("// @Param filter[column][operator] query string false \"Dynamic filters (e.g., filter[name][_eq]=value)\"\n") + methodContent.WriteString("// @Param sort query string false \"Sort fields (e.g., sort=date_created,-name)\"\n") + methodContent.WriteString("// @Param limit query int false \"Limit\" default(10)\n") + methodContent.WriteString("// @Param offset query int false \"Offset\" default(0)\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Success response\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Get" + data.Name + "Dynamic(c *gin.Context) {\n") + methodContent.WriteString(" parser := queryUtils.NewQueryParser().SetLimits(10, 100)\n") + + // Get the default fields for this endpoint from the configuration + defaultFields := getFieldsForEndpoint(data, endpoint.Fields) + + methodContent.WriteString(" dynamicQuery, err := parser.ParseQueryWithDefaultFields(c.Request.URL.Query(), \"" + data.TableName + "\", []string{\n") + for _, field := range defaultFields { + methodContent.WriteString(" \"" + field + "\",\n") } + methodContent.WriteString(" })\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid query parameters\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist and fields include relationship columns + if len(data.Relationships) > 0 && hasRelationshipFields(defaultFields, data.Relationships) { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" dynamicQuery.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + // Check if any field from this relationship is included + if hasRelationshipField(defaultFields, rel) { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } + } + + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Add default filter to exclude deleted records\n") + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString(" dynamicQuery.Filters = append([]queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{{Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"}},\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }}, dynamicQuery.Filters...)\n\n") + } + + methodContent.WriteString(" // Try to get from cache first\n") + methodContent.WriteString(" // Create cache key from query string\n") + methodContent.WriteString(" cacheKey := fmt.Sprintf(\"" + data.NameLower + ":dynamic:%s\", c.Request.URL.RawQuery)\n") + methodContent.WriteString(" if cachedData, found := h.cache.Get(cacheKey); found {\n") + methodContent.WriteString(" logger.Info(\"Cache hit for dynamic query\", map[string]interface{}{\"cache_key\": cacheKey})\n\n") + + methodContent.WriteString(" // Convert from interface{} to expected type\n") + methodContent.WriteString(" if " + data.NamePlural + ", ok := cachedData.([]" + data.Category + "Models." + data.Name + "); ok {\n") + methodContent.WriteString(" meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, len(" + data.NamePlural + "))\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"Data " + data.NameLower + " berhasil diambil (dari cache)\",\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + methodContent.WriteString(" " + data.NamePlural + ", total, err := h.fetch" + data.Name + "sDynamic(ctx, dbConn, dynamicQuery)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to fetch data\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Save to cache\n") + methodContent.WriteString(" h.cache.Set(cacheKey, " + data.NamePlural + ", 10*time.Minute) // Cache for 10 minutes\n\n") + + methodContent.WriteString(" meta := h.calculateMeta(dynamicQuery.Limit, dynamicQuery.Offset, total)\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"Data " + data.NameLower + " berhasil diambil\",\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateSearchMethod - Template untuk search method dengan cache +func generateSearchMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + + methodContent.WriteString("// Search" + data.Name + " godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param q query string false \"Search query\"\n") + methodContent.WriteString("// @Param limit query int false \"Limit\" default(20)\n") + methodContent.WriteString("// @Param offset query int false \"Offset\" default(0)\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Success response\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Search" + data.Name + "(c *gin.Context) {\n") + methodContent.WriteString(" // Parse complex search parameters\n") + methodContent.WriteString(" searchQuery := c.Query(\"q\")\n") + methodContent.WriteString(" if searchQuery == \"\" {\n") + methodContent.WriteString(" // If no search query provided, return all records with default sorting\n") + + // Get the fields for this endpoint from the configuration + fields := getFieldsForEndpoint(data, endpoint.Fields) + + methodContent.WriteString(" query := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{\n") + + // Generate select fields based on the endpoint configuration + for _, field := range fields { + methodContent.WriteString(" {Expression: \"" + field + "\"},\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" Filters: []queryUtils.FilterGroup{}, // Empty filters - fetch" + data.Name + "sDynamic will add default deleted filter\n") + methodContent.WriteString(" Sort: []queryUtils.SortField{{\n") + methodContent.WriteString(" Column: \"date_created\",\n") + methodContent.WriteString(" Order: \"DESC\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" Limit: 20,\n") + methodContent.WriteString(" Offset: 0,\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist and fields include relationship columns + if len(data.Relationships) > 0 && hasRelationshipFields(fields, data.Relationships) { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" query.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + // Check if any field from this relationship is included + if hasRelationshipField(fields, rel) { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } + } + + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Parse pagination if provided\n") + methodContent.WriteString(" if limit := c.Query(\"limit\"); limit != \"\" {\n") + methodContent.WriteString(" if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 {\n") + methodContent.WriteString(" query.Limit = l\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if offset := c.Query(\"offset\"); offset != \"\" {\n") + methodContent.WriteString(" if o, err := strconv.Atoi(offset); err == nil && o >= 0 {\n") + methodContent.WriteString(" query.Offset = o\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + methodContent.WriteString(" // Execute query to get all records\n") + methodContent.WriteString(" " + data.NamePlural + ", total, err := h.fetch" + data.Name + "sDynamic(ctx, dbConn, query)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to fetch data\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Build response\n") + methodContent.WriteString(" meta := h.calculateMeta(query.Limit, query.Offset, total)\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"All records retrieved (no search query provided)\",\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Build dynamic query for search\n") + methodContent.WriteString(" query := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{\n") + + // Generate select fields based on the endpoint configuration + for _, field := range fields { + methodContent.WriteString(" {Expression: \"" + field + "\"},\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" Filters: []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + + // Add searchable columns based on table schema + searchableColumns := findSearchableColumns(data.TableSchema) + for _, col := range searchableColumns { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Column: \"" + col + "\",\n") + methodContent.WriteString(" Operator: queryUtils.OpContains,\n") + methodContent.WriteString(" Value: searchQuery,\n") + methodContent.WriteString(" },\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"OR\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" Sort: []queryUtils.SortField{{\n") + methodContent.WriteString(" Column: \"date_created\",\n") + methodContent.WriteString(" Order: \"DESC\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" Limit: 20,\n") + methodContent.WriteString(" Offset: 0,\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist and fields include relationship columns + if len(data.Relationships) > 0 && hasRelationshipFields(fields, data.Relationships) { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" query.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + // Check if any field from this relationship is included + if hasRelationshipField(fields, rel) { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } + } + + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Parse pagination if provided\n") + methodContent.WriteString(" if limit := c.Query(\"limit\"); limit != \"\" {\n") + methodContent.WriteString(" if l, err := strconv.Atoi(limit); err == nil && l > 0 && l <= 100 {\n") + methodContent.WriteString(" query.Limit = l\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if offset := c.Query(\"offset\"); offset != \"\" {\n") + methodContent.WriteString(" if o, err := strconv.Atoi(offset); err == nil && o >= 0 {\n") + methodContent.WriteString(" query.Offset = o\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Try to get from cache first\n") + methodContent.WriteString(" cacheKey := fmt.Sprintf(\"" + data.NameLower + ":search:%s:%d:%d\", searchQuery, query.Limit, query.Offset)\n") + methodContent.WriteString(" if cachedData, found := h.cache.Get(cacheKey); found {\n") + methodContent.WriteString(" logger.Info(\"Cache hit for search\", map[string]interface{}{\"search\": searchQuery, \"cache_key\": cacheKey})\n\n") + + methodContent.WriteString(" // Convert from interface{} to expected type\n") + methodContent.WriteString(" if " + data.NamePlural + ", ok := cachedData.([]" + data.Category + "Models." + data.Name + "); ok {\n") + methodContent.WriteString(" meta := h.calculateMeta(query.Limit, query.Offset, len(" + data.NamePlural + "))\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: fmt.Sprintf(\"Search results for '%s' (dari cache)\", searchQuery),\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + methodContent.WriteString(" // Execute search\n") + methodContent.WriteString(" " + data.NamePlural + ", total, err := h.fetch" + data.Name + "sDynamic(ctx, dbConn, query)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Search failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Save to cache\n") + methodContent.WriteString(" h.cache.Set(cacheKey, " + data.NamePlural + ", 15*time.Minute) // Cache for 15 minutes\n\n") + + methodContent.WriteString(" // Build response\n") + methodContent.WriteString(" meta := h.calculateMeta(query.Limit, query.Offset, total)\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: fmt.Sprintf(\"Search results for '%s'\", searchQuery),\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateStatsMethod - Template untuk stats method dengan cache +func generateStatsMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + + methodContent.WriteString("// Get" + data.Name + "Stats godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString("// @Param " + statusColumn + " query string false \"Filter statistics by status\"\n") + } + + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Statistics data\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Get" + data.Name + "Stats(c *gin.Context) {\n") + methodContent.WriteString(" // Try to get from cache first\n") + methodContent.WriteString(" cacheKey := fmt.Sprintf(\"" + data.NameLower + ":stats:%s\", c.Query(\"" + statusColumn + "\"))\n") + methodContent.WriteString(" if cachedData, found := h.cache.Get(cacheKey); found {\n") + methodContent.WriteString(" logger.Info(\"Cache hit for stats\", map[string]interface{}{\"cache_key\": cacheKey})\n\n") + + methodContent.WriteString(" // Convert from interface{} to expected type\n") + methodContent.WriteString(" if aggregateData, ok := cachedData.(*models.AggregateData); ok {\n") + methodContent.WriteString(" c.JSON(http.StatusOK, gin.H{\n") + methodContent.WriteString(" \"message\": \"Statistik " + data.NameLower + " berhasil diambil (dari cache)\",\n") + methodContent.WriteString(" \"data\": aggregateData,\n") + methodContent.WriteString(" })\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + methodContent.WriteString(" // Build filter groups\n") + methodContent.WriteString(" filterGroups := []queryUtils.FilterGroup{{\n") + if statusColumn != "" { + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{{Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"}},\n") + } + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }}\n\n") + + if statusColumn != "" { + methodContent.WriteString(" // Add status filter if provided\n") + methodContent.WriteString(" if " + statusColumn + " := c.Query(\"" + statusColumn + "\"); " + statusColumn + " != \"\" && models.IsValidStatus(" + statusColumn + ") {\n") + methodContent.WriteString(" filterGroups = append(filterGroups, queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{{Column: \"" + statusColumn + "\", Operator: queryUtils.OpEqual, Value: " + statusColumn + "}},\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" })\n") + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" aggregateData, err := h.getAggregateData(ctx, dbConn, filterGroups)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to get statistics\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Save to cache\n") + methodContent.WriteString(" h.cache.Set(cacheKey, aggregateData, 5*time.Minute) // Cache stats for 5 minutes\n\n") + + methodContent.WriteString(" c.JSON(http.StatusOK, gin.H{\n") + methodContent.WriteString(" \"message\": \"Statistik " + data.NameLower + " berhasil diambil\",\n") + methodContent.WriteString(" \"data\": aggregateData,\n") + methodContent.WriteString(" })\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateByLocationMethod - Template untuk by location method +func generateByLocationMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + + methodContent.WriteString("// Get" + data.Name + "ByLocation godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + + // Find location-related columns + locationColumns := findLocationColumns(data.TableSchema) + for _, col := range locationColumns { + methodContent.WriteString("// @Param " + col + " query int false \"Filter by " + col + " ID\"\n") + } + + methodContent.WriteString("// @Param limit query int false \"Limit (max 100)\" default(10)\n") + methodContent.WriteString("// @Param offset query int false \"Offset\" default(0)\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Success response\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Get" + data.Name + "ByLocation(c *gin.Context) {\n") + methodContent.WriteString(" // Parse location filters\n") + methodContent.WriteString(" var filters []queryUtils.DynamicFilter\n\n") + + // Add location filters + for _, col := range locationColumns { + methodContent.WriteString(" if " + col + " := c.Query(\"" + col + "\"); " + col + " != \"\" {\n") + methodContent.WriteString(" if " + col + "ID, err := strconv.Atoi(" + col + "); err == nil {\n") + methodContent.WriteString(" filters = append(filters, queryUtils.DynamicFilter{Column: \"" + col + "\", Operator: queryUtils.OpEqual, Value: " + col + "ID})\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Parse pagination\n") + methodContent.WriteString(" limit, offset := 10, 0\n") + methodContent.WriteString(" if limitStr := c.Query(\"limit\"); limitStr != \"\" {\n") + methodContent.WriteString(" if l, err := strconv.Atoi(limitStr); err == nil && l > 0 && l <= 100 {\n") + methodContent.WriteString(" limit = l\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" if offsetStr := c.Query(\"offset\"); offsetStr != \"\" {\n") + methodContent.WriteString(" if o, err := strconv.Atoi(offsetStr); err == nil && o >= 0 {\n") + methodContent.WriteString(" offset = o\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + // Get the fields for this endpoint from the configuration + fields := getFieldsForEndpoint(data, endpoint.Fields) + + methodContent.WriteString(" // Build query\n") + methodContent.WriteString(" query := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" From: \"" + data.TableName + "\",\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{\n") + + // Generate select fields based on the endpoint configuration + for _, field := range fields { + methodContent.WriteString(" {Expression: \"" + field + "\"},\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" Sort: []queryUtils.SortField{{Column: \"date_created\", Order: \"DESC\"}},\n") + methodContent.WriteString(" Limit: limit,\n") + methodContent.WriteString(" Offset: offset,\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist and fields include relationship columns + if len(data.Relationships) > 0 && hasRelationshipFields(fields, data.Relationships) { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" query.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + // Check if any field from this relationship is included + if hasRelationshipField(fields, rel) { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } + } + + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Add filters if any\n") + methodContent.WriteString(" if len(filters) > 0 {\n") + methodContent.WriteString(" query.Filters = append(query.Filters, queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: filters,\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" })\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Execute query\n") + methodContent.WriteString(" " + data.NamePlural + ", total, err := h.fetch" + data.Name + "sDynamic(ctx, dbConn, query)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to fetch data\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Build response\n") + methodContent.WriteString(" meta := h.calculateMeta(limit, offset, total)\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: \"Data " + data.NameLower + " by location retrieved successfully\",\n") + methodContent.WriteString(" Data: " + data.NamePlural + ",\n") + methodContent.WriteString(" Meta: meta,\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateByAgeMethod - Template untuk by age method +func generateByAgeMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + + methodContent.WriteString("// Get" + data.Name + "ByAge godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param age_group query string false \"Age group (child, teen, adult, senior)\"\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"Statistics data\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [get]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Get" + data.Name + "ByAge(c *gin.Context) {\n") + methodContent.WriteString(" // Parse age group\n") + methodContent.WriteString(" ageGroup := c.Query(\"age_group\")\n") + methodContent.WriteString(" validAgeGroups := map[string]bool{\n") + methodContent.WriteString(" \"child\": true, // 0-12 years\n") + methodContent.WriteString(" \"teen\": true, // 13-17 years\n") + methodContent.WriteString(" \"adult\": true, // 18-59 years\n") + methodContent.WriteString(" \"senior\": true, // 60+ years\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if ageGroup == \"\" || !validAgeGroups[ageGroup] {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid age group\", fmt.Errorf(\"age group must be one of: child, teen, adult, senior\"), http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + // Find date of birth column + birthDateColumn := findBirthDateColumn(data.TableSchema) + + // Only declare these variables if we have a birth date column + if birthDateColumn != "" { + methodContent.WriteString(" // Calculate age range based on group\n") + methodContent.WriteString(" var minAge, maxAge int\n") + methodContent.WriteString(" now := time.Now()\n") + methodContent.WriteString(" switch ageGroup {\n") + methodContent.WriteString(" case \"child\":\n") + methodContent.WriteString(" maxAge = 12\n") + methodContent.WriteString(" case \"teen\":\n") + methodContent.WriteString(" minAge = 13\n") + methodContent.WriteString(" maxAge = 17\n") + methodContent.WriteString(" case \"adult\":\n") + methodContent.WriteString(" minAge = 18\n") + methodContent.WriteString(" maxAge = 59\n") + methodContent.WriteString(" case \"senior\":\n") + methodContent.WriteString(" minAge = 60\n") + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Build query\n") + methodContent.WriteString(" query := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" From: \"" + data.TableName + "\",\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{\n") + methodContent.WriteString(" {Expression: \"COUNT(*)\", Alias: \"count\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" Filters: []queryUtils.FilterGroup{{\n") + + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"},\n") + methodContent.WriteString(" },\n") + } + + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" }\n\n") + + if birthDateColumn != "" { + methodContent.WriteString(" // Add age filter if applicable\n") + methodContent.WriteString(" if minAge > 0 {\n") + methodContent.WriteString(" minBirthDate := now.AddDate(-maxAge-1, 0, 0)\n") + methodContent.WriteString(" query.Filters[0].Filters = append(query.Filters[0].Filters, \n") + methodContent.WriteString(" queryUtils.DynamicFilter{Column: \"" + birthDateColumn + "\", Operator: queryUtils.OpGreaterThanEqual, Value: minBirthDate})\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if maxAge > 0 {\n") + methodContent.WriteString(" maxBirthDate := now.AddDate(-minAge, 0, 0)\n") + methodContent.WriteString(" query.Filters[0].Filters = append(query.Filters[0].Filters, \n") + methodContent.WriteString(" queryUtils.DynamicFilter{Column: \"" + birthDateColumn + "\", Operator: queryUtils.OpLessThan, Value: maxBirthDate})\n") + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" // Execute query\n") + methodContent.WriteString(" var result struct {\n") + methodContent.WriteString(" Count int `db:\"count\"`\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, query, &result)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to get age statistics\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Build response\n") + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{\n") + methodContent.WriteString(" Message: fmt.Sprintf(\"Age group '%s' statistics retrieved successfully\", ageGroup),\n") + methodContent.WriteString(" Data: map[string]interface{}{\n") + methodContent.WriteString(" \"age_group\": ageGroup,\n") + methodContent.WriteString(" \"count\": result.Count,\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateCreateMethod - Template untuk create method dengan validation +func generateCreateMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + + methodContent.WriteString("// Create" + data.Name + " godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param request body " + data.Category + "Models." + endpoint.RequestModel + " true \"" + data.Name + " creation request\"\n") + methodContent.WriteString("// @Success 201 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"" + data.Name + " created successfully\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request or validation error\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [post]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Create" + data.Name + "(c *gin.Context) {\n") + methodContent.WriteString(" var req " + data.Category + "Models." + endpoint.RequestModel + "\n") + methodContent.WriteString(" if err := c.ShouldBindJSON(&req); err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid request body\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" if err := validate.Struct(&req); err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Validation failed\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + // Add validation for unique fields if they exist + uniqueColumns := findUniqueColumns(data.TableSchema) + for _, col := range uniqueColumns { + methodContent.WriteString(" // Validate " + col + " must be unique\n") + methodContent.WriteString(" if req." + snakeToPascal(col) + " != nil {\n") + methodContent.WriteString(" rule := validation.NewUniqueFieldRule(\n") + methodContent.WriteString(" \"" + data.TableName + "\", // Table name\n") + methodContent.WriteString(" \"" + col + "\", // Column that must be unique\n") + methodContent.WriteString(" queryUtils.DynamicFilter{ // Additional condition\n") + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString(" Column: \"" + statusColumn + "\",\n") + methodContent.WriteString(" Operator: queryUtils.OpNotEqual,\n") + methodContent.WriteString(" Value: \"deleted\",\n") + } else { + methodContent.WriteString(" Column: \"id\",\n") + methodContent.WriteString(" Operator: queryUtils.OpNotEqual,\n") + methodContent.WriteString(" Value: \"0\",\n") + } + methodContent.WriteString(" },\n") + methodContent.WriteString(" )\n\n") + + methodContent.WriteString(" // Prepare data from request for validation\n") + methodContent.WriteString(" dataToValidate := map[string]interface{}{\n") + methodContent.WriteString(" \"" + col + "\": *req." + snakeToPascal(col) + ",\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Execute validation\n") + methodContent.WriteString(" isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to validate " + col + "\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if isDuplicate {\n") + methodContent.WriteString(" h.respondError(c, \"" + col + " already exists\", fmt.Errorf(\"duplicate " + col + ": %d\", *req." + snakeToPascal(col) + "), http.StatusConflict)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" data := queryUtils.InsertData{\n") + methodContent.WriteString(" Columns: []string{\n") + + // Add primary key if it's not auto-generated + primaryKey := findPrimaryKey(data.TableSchema) + if !isAutoGeneratedPrimaryKey(data.TableSchema) { + methodContent.WriteString(" \"" + primaryKey + "\",\n") + } + + // Add status column if it exists + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + methodContent.WriteString(" \"" + statusColumn + "\",\n") + } + + methodContent.WriteString(" \"date_created\", \"date_updated\",\n") + + // Add all non-system columns + systemFields := findSystemFields(data.TableSchema) + for _, col := range data.TableSchema { + if isSystemField(col.Name, systemFields) { + continue + } + methodContent.WriteString(" \"" + col.Name + "\",\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" Values: []interface{}{\n") + + // Add primary key value if it's not auto-generated + if !isAutoGeneratedPrimaryKey(data.TableSchema) { + methodContent.WriteString(" req." + snakeToPascal(primaryKey) + ",\n") + } + + // Add status value if it exists + if statusColumn != "" { + methodContent.WriteString(" req." + snakeToPascal(statusColumn) + ",\n") + } + + methodContent.WriteString(" time.Now(), time.Now(),\n") + + // Add all non-system column values + for _, col := range data.TableSchema { + if isSystemField(col.Name, systemFields) { + continue + } + methodContent.WriteString(" req." + snakeToPascal(col.Name) + ",\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" returningCols := []string{\n") + + // Add primary key if it's not auto-generated + if !isAutoGeneratedPrimaryKey(data.TableSchema) { + methodContent.WriteString(" \"" + primaryKey + "\",\n") + } + + // Add status column if it exists + if statusColumn != "" { + methodContent.WriteString(" \"" + statusColumn + "\",\n") + } + + methodContent.WriteString(" \"sort\", \"user_created\", \"date_created\", \"user_updated\", \"date_updated\",\n") + + // Add all non-system columns for returning + for _, col := range data.TableSchema { + if isSystemField(col.Name, systemFields) { + continue + } + methodContent.WriteString(" \"" + col.Name + "\",\n") + } + + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" sql, args, err := h.queryBuilder.BuildInsertQuery(\"" + data.TableName + "\", data, returningCols...)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to build insert query\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" var data" + data.Name + " " + data.Category + "Models." + data.Name + "\n") + methodContent.WriteString(" err = dbConn.GetContext(ctx, &data" + data.Name + ", sql, args...)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to create " + data.NameLower + "\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Invalidate cache that might be affected\n") + methodContent.WriteString(" h.invalidateRelatedCache()\n\n") + + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{Message: \"" + data.Name + " berhasil dibuat\", Data: &data" + data.Name + "}\n") + methodContent.WriteString(" c.JSON(http.StatusCreated, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateUpdateMethod - Template untuk update method dengan cache yang diperbaiki +func generateUpdateMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" // Default fallback + } + + // Define statusColumn di awal fungsi + statusColumn := findStatusColumn(data.TableSchema) + + methodContent.WriteString("// Update" + data.Name + " godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param " + primaryKey + " path string true \"" + data.Name + " " + strings.ToUpper(primaryKey) + "\"\n") + methodContent.WriteString("// @Param request body " + data.Category + "Models." + endpoint.RequestModel + " true \"" + data.Name + " update request\"\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"" + data.Name + " updated successfully\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Bad request or validation error\"\n") + methodContent.WriteString("// @Failure 404 {object} models.ErrorResponse \"" + data.Name + " not found\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [put]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Update" + data.Name + "(c *gin.Context) {\n") + methodContent.WriteString(" " + primaryKey + " := c.Param(\"" + primaryKey + "\")\n") + methodContent.WriteString(" if " + primaryKey + " == \"\" {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid " + strings.ToUpper(primaryKey) + " format\", fmt.Errorf(\"" + primaryKey + " cannot be empty\"), http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" var req " + data.Category + "Models." + endpoint.RequestModel + "\n") + methodContent.WriteString(" if err := c.ShouldBindJSON(&req); err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid request body\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + + // Get the type of the primary key + _, pkBaseType, _ := mapSQLTypeToGo(getColumnType(data.TableSchema, primaryKey), isColumnNullable(data.TableSchema, primaryKey), "") + + // Generate ID conversion based on primary key type + if pkBaseType == "int32" { + methodContent.WriteString(" idInt, err := strconv.Atoi(" + primaryKey + ")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid ID format\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" idInt32 := int32(idInt)\n") + methodContent.WriteString(" req." + snakeToPascal(primaryKey) + " = &idInt32\n") + } else if pkBaseType == "int64" { + methodContent.WriteString(" idInt, err := strconv.ParseInt(" + primaryKey + ", 10, 64)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid ID format\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" req." + snakeToPascal(primaryKey) + " = &idInt\n") + } else if pkBaseType == "string" { + methodContent.WriteString(" req." + snakeToPascal(primaryKey) + " = &" + primaryKey + "\n") + } + + methodContent.WriteString(" if err := validate.Struct(&req); err != nil {\n") + methodContent.WriteString(" h.respondError(c, \"Validation failed\", err, http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Try to get old data for cache invalidation\n") + methodContent.WriteString(" var oldData " + data.Category + "Models." + data.Name + "\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err == nil {\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + methodContent.WriteString(" dynamicQuery := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" From: \"" + data.TableName + "\",\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{{Expression: \"*\"}},\n") + methodContent.WriteString(" Filters: []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + primaryKey + "\", Operator: queryUtils.OpEqual, Value: " + primaryKey + "},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" Limit: 1,\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist + if len(data.Relationships) > 0 { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" dynamicQuery.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } + + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &oldData)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" logger.Error(\"Failed to fetch old data for cache invalidation\", map[string]interface{}{\"error\": err.Error(), \"" + primaryKey + "\": " + primaryKey + "})\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err = h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + // Add validation for unique fields if they exist + uniqueColumns := findUniqueColumns(data.TableSchema) + for _, col := range uniqueColumns { + methodContent.WriteString(" // Validate " + col + " must be unique, except for record with this " + primaryKey + "\n") + methodContent.WriteString(" if req." + snakeToPascal(col) + " != nil {\n") + methodContent.WriteString(" rule := validation.ValidationRule{\n") + methodContent.WriteString(" TableName: \"" + data.TableName + "\",\n") + methodContent.WriteString(" UniqueColumns: []string{\"" + col + "\"},\n") + methodContent.WriteString(" Conditions: []queryUtils.DynamicFilter{\n") + if statusColumn != "" { + methodContent.WriteString(" {Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"},\n") + } + methodContent.WriteString(" },\n") + methodContent.WriteString(" ExcludeIDColumn: \"" + primaryKey + "\", // Exclude based on '" + primaryKey + "' column\n") + methodContent.WriteString(" ExcludeIDValue: " + primaryKey + ", // ...with " + primaryKey + " value from parameter\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" dataToValidate := map[string]interface{}{\n") + methodContent.WriteString(" \"" + col + "\": *req." + snakeToPascal(col) + ",\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" isDuplicate, err := h.validator.Validate(ctx, dbConn, rule, dataToValidate)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to validate " + col + "\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" if isDuplicate {\n") + methodContent.WriteString(" h.respondError(c, \"" + col + " already exists\", fmt.Errorf(\"duplicate " + col + ": %d\", *req." + snakeToPascal(col) + "), http.StatusConflict)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" updateData := queryUtils.UpdateData{\n") + methodContent.WriteString(" Columns: []string{\n") + + // Add status column if it exists + if statusColumn != "" { + methodContent.WriteString(" \"" + statusColumn + "\",\n") + } + + methodContent.WriteString(" \"date_updated\",\n") + + // Add all non-system columns + systemFields := findSystemFields(data.TableSchema) + for _, col := range data.TableSchema { + if isSystemField(col.Name, systemFields) { + continue + } + methodContent.WriteString(" \"" + col.Name + "\",\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" Values: []interface{}{\n") + + // Add status value if it exists + if statusColumn != "" { + methodContent.WriteString(" req." + snakeToPascal(statusColumn) + ",\n") + } + + methodContent.WriteString(" time.Now(),\n") + + // Add all non-system column values + for _, col := range data.TableSchema { + if isSystemField(col.Name, systemFields) { + continue + } + methodContent.WriteString(" req." + snakeToPascal(col.Name) + ",\n") + } + + methodContent.WriteString(" },\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" filters := []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + primaryKey + "\", Operator: queryUtils.OpEqual, Value: req." + snakeToPascal(primaryKey) + "},\n") + if statusColumn != "" { + methodContent.WriteString(" {Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"},\n") + } + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }}\n") + methodContent.WriteString(" returningCols := []string{\n") + + // Add primary key if it's not auto-generated + if !isAutoGeneratedPrimaryKey(data.TableSchema) { + methodContent.WriteString(" \"" + primaryKey + "\",\n") + } + + // Add status column if it exists + if statusColumn != "" { + methodContent.WriteString(" \"" + statusColumn + "\",\n") + } + + methodContent.WriteString(" \"sort\", \"user_created\", \"date_created\", \"user_updated\", \"date_updated\",\n") + + // Add all non-system columns for returning + for _, col := range data.TableSchema { + if isSystemField(col.Name, systemFields) { + continue + } + methodContent.WriteString(" \"" + col.Name + "\",\n") + } + + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" sql, args, err := h.queryBuilder.BuildUpdateQuery(\"" + data.TableName + "\", updateData, filters, returningCols...)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to build update query\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" var data" + data.Name + " " + data.Category + "Models." + data.Name + "\n") + methodContent.WriteString(" err = dbConn.GetContext(ctx, &data" + data.Name + ", sql, args...)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" if err.Error() == \"sql: no rows in result set\" {\n") + methodContent.WriteString(" h.respondError(c, \"" + data.Name + " not found\", err, http.StatusNotFound)\n") + methodContent.WriteString(" } else {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to update " + data.NameLower + "\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Invalidate cache that might be affected\n") + methodContent.WriteString(" // Invalidate cache for " + primaryKey + " that was updated\n") + methodContent.WriteString(" cacheKey := fmt.Sprintf(\"" + data.NameLower + ":" + primaryKey + ":%s\", " + primaryKey + ")\n") + methodContent.WriteString(" h.cache.Delete(cacheKey)\n\n") + + methodContent.WriteString(" // Invalidate cache for old and new data\n") + methodContent.WriteString(" if oldData." + snakeToPascal(primaryKey) + " != 0 {\n") + methodContent.WriteString(" h.invalidateRelatedCache()\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" h.invalidateRelatedCache()\n\n") + + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{Message: \"" + data.Name + " berhasil diperbarui\", Data: &data" + data.Name + "}\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateDeleteMethod - Template untuk delete method dengan cache +func generateDeleteMethod(data HandlerData, endpoint EndpointConfig) string { + var methodContent strings.Builder + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" // Default fallback + } + + methodContent.WriteString("// Delete" + data.Name + " godoc\n") + methodContent.WriteString("// @Summary " + endpoint.Summary + "\n") + methodContent.WriteString("// @Description " + endpoint.Description + "\n") + methodContent.WriteString("// @Tags " + strings.Join(endpoint.Tags, ", ") + "\n") + methodContent.WriteString("// @Accept json\n") + methodContent.WriteString("// @Produce json\n") + methodContent.WriteString("// @Param " + primaryKey + " path string true \"" + data.Name + " " + strings.ToUpper(primaryKey) + "\"\n") + methodContent.WriteString("// @Success 200 {object} " + data.Category + "Models." + endpoint.ResponseModel + " \"" + data.Name + " deleted successfully\"\n") + methodContent.WriteString("// @Failure 400 {object} models.ErrorResponse \"Invalid ID format\"\n") + methodContent.WriteString("// @Failure 404 {object} models.ErrorResponse \"" + data.Name + " not found\"\n") + methodContent.WriteString("// @Failure 500 {object} models.ErrorResponse \"Internal server error\"\n") + methodContent.WriteString("// @Router /api/v1/" + strings.ToLower(data.Name) + endpoint.Path + " [delete]\n") + + methodContent.WriteString("func (h *" + data.Name + "Handler) Delete" + data.Name + "(c *gin.Context) {\n") + methodContent.WriteString(" " + primaryKey + " := c.Param(\"" + primaryKey + "\")\n") + methodContent.WriteString(" if " + primaryKey + " == \"\" {\n") + methodContent.WriteString(" h.respondError(c, \"Invalid " + strings.ToUpper(primaryKey) + " format\", fmt.Errorf(\"" + primaryKey + " cannot be empty\"), http.StatusBadRequest)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Try to get data for cache invalidation\n") + methodContent.WriteString(" var dataToDelete " + data.Category + "Models." + data.Name + "\n") + methodContent.WriteString(" dbConn, err := h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err == nil {\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + methodContent.WriteString(" dynamicQuery := queryUtils.DynamicQuery{\n") + methodContent.WriteString(" From: \"" + data.TableName + "\",\n") + methodContent.WriteString(" Fields: []queryUtils.SelectField{{Expression: \"*\"}},\n") + methodContent.WriteString(" Filters: []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + primaryKey + "\", Operator: queryUtils.OpEqual, Value: " + primaryKey + "},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }},\n") + methodContent.WriteString(" Limit: 1,\n") + methodContent.WriteString(" }\n\n") + + // Add joins if relationships exist + if len(data.Relationships) > 0 { + methodContent.WriteString(" // Add joins for relationships using the correct structure\n") + methodContent.WriteString(" dynamicQuery.Joins = []queryUtils.Join{\n") + + for _, rel := range data.Relationships { + methodContent.WriteString(" {\n") + methodContent.WriteString(" Type: \"LEFT\",\n") + methodContent.WriteString(" Table: \"" + rel.Table + "\",\n") + methodContent.WriteString(" Alias: \"" + rel.Table + "\",\n") + methodContent.WriteString(" OnConditions: queryUtils.FilterGroup{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + data.TableName + "." + rel.ForeignKey + "\", Operator: queryUtils.OpEqual, Value: \"" + rel.Table + "." + rel.LocalKey + "\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" },\n") + } + + methodContent.WriteString(" }\n\n") + } + + methodContent.WriteString(" err = h.queryBuilder.ExecuteQueryRow(ctx, dbConn, dynamicQuery, &dataToDelete)\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" logger.Error(\"Failed to fetch data for cache invalidation\", map[string]interface{}{\"error\": err.Error(), \"" + primaryKey + "\": " + primaryKey + "})\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Use GetSQLXDB to get database connection\n") + methodContent.WriteString(" dbConn, err = h.db.GetSQLXDB(\"postgres_satudata\")\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Database connection failed\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" ctx, cancel := context.WithTimeout(c.Request.Context(), 15*time.Second)\n") + methodContent.WriteString(" defer cancel()\n\n") + + if endpoint.SoftDelete { + statusColumn := findStatusColumn(data.TableSchema) + methodContent.WriteString(" // Use ExecuteUpdate for soft delete by changing status\n") + methodContent.WriteString(" updateData := queryUtils.UpdateData{\n") + methodContent.WriteString(" Columns: []string{\"" + statusColumn + "\", \"date_updated\"},\n") + methodContent.WriteString(" Values: []interface{}{\"deleted\", time.Now()},\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" filters := []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + primaryKey + "\", Operator: queryUtils.OpEqual, Value: " + primaryKey + "},\n") + methodContent.WriteString(" {Column: \"" + statusColumn + "\", Operator: queryUtils.OpNotEqual, Value: \"deleted\"},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }}\n\n") + + methodContent.WriteString(" // Use ExecuteUpdate instead of ExecuteDelete\n") + methodContent.WriteString(" result, err := h.queryBuilder.ExecuteUpdate(ctx, dbConn, \"" + data.TableName + "\", updateData, filters)\n") + } else { + methodContent.WriteString(" // Use ExecuteDelete for hard delete\n") + methodContent.WriteString(" filters := []queryUtils.FilterGroup{{\n") + methodContent.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + methodContent.WriteString(" {Column: \"" + primaryKey + "\", Operator: queryUtils.OpEqual, Value: " + primaryKey + "},\n") + methodContent.WriteString(" },\n") + methodContent.WriteString(" LogicOp: \"AND\",\n") + methodContent.WriteString(" }}\n\n") + + methodContent.WriteString(" result, err := h.queryBuilder.ExecuteDelete(ctx, dbConn, \"" + data.TableName + "\", filters)\n") + } + + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to delete " + data.NameLower + "\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" rowsAffected, err := result.RowsAffected()\n") + methodContent.WriteString(" if err != nil {\n") + methodContent.WriteString(" h.logAndRespondError(c, \"Failed to get affected rows\", err, http.StatusInternalServerError)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n") + methodContent.WriteString(" if rowsAffected == 0 {\n") + methodContent.WriteString(" h.respondError(c, \"" + data.Name + " not found\", sql.ErrNoRows, http.StatusNotFound)\n") + methodContent.WriteString(" return\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" // Invalidate cache that might be affected\n") + methodContent.WriteString(" // Invalidate cache for " + primaryKey + " that was deleted\n") + methodContent.WriteString(" cacheKey := fmt.Sprintf(\"" + data.NameLower + ":" + primaryKey + ":%s\", " + primaryKey + ")\n") + methodContent.WriteString(" h.cache.Delete(cacheKey)\n\n") + + methodContent.WriteString(" // Invalidate cache for data that was deleted\n") + methodContent.WriteString(" if dataToDelete." + snakeToPascal(primaryKey) + " != 0 {\n") + methodContent.WriteString(" h.invalidateRelatedCache()\n") + methodContent.WriteString(" }\n\n") + + methodContent.WriteString(" response := " + data.Category + "Models." + endpoint.ResponseModel + "{Message: \"" + data.Name + " berhasil dihapus\", " + snakeToPascal(primaryKey) + ": " + primaryKey + "}\n") + methodContent.WriteString(" c.JSON(http.StatusOK, response)\n") + methodContent.WriteString("}\n\n") + + return methodContent.String() +} + +// generateHelperMethodsWithCache - Template untuk helper methods dengan cache +func generateHelperMethodsWithCache(data HandlerData) string { + var helperMethods strings.Builder + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" // Default fallback + } + + // Invalidate cache method + helperMethods.WriteString("// invalidateRelatedCache invalidates cache that might be affected by data changes\n") + helperMethods.WriteString("func (h *" + data.Name + "Handler) invalidateRelatedCache() {\n") + helperMethods.WriteString(" // Invalidate cache for search that might be affected\n") + helperMethods.WriteString(" h.cache.DeleteByPrefix(\"" + data.NameLower + ":search:\")\n") + helperMethods.WriteString(" h.cache.DeleteByPrefix(\"" + data.NameLower + ":dynamic:\")\n") + helperMethods.WriteString(" h.cache.DeleteByPrefix(\"" + data.NameLower + ":stats:\")\n") + helperMethods.WriteString(" h.cache.DeleteByPrefix(\"" + data.NameLower + ":" + primaryKey + ":\")\n") + helperMethods.WriteString("}\n\n") + + // Fetch dynamic method + helperMethods.WriteString("// fetch" + data.Name + "sDynamic executes dynamic query with timeout handling\n") + helperMethods.WriteString("func (h *" + data.Name + "Handler) fetch" + data.Name + "sDynamic(ctx context.Context, dbConn *sqlx.DB, query queryUtils.DynamicQuery) ([]" + data.Category + "Models." + data.Name + ", int, error) {\n") + helperMethods.WriteString(" logger.Info(\"Starting fetch" + data.Name + "sDynamic\", map[string]interface{}{\n") + helperMethods.WriteString(" \"limit\": query.Limit,\n") + helperMethods.WriteString(" \"offset\": query.Offset,\n") + helperMethods.WriteString(" \"from\": query.From,\n") + helperMethods.WriteString(" })\n\n") + + helperMethods.WriteString(" var total int\n") + helperMethods.WriteString(" var " + data.NamePlural + " []" + data.Category + "Models." + data.Name + "\n\n") + + helperMethods.WriteString(" // Check if query has search\n") + helperMethods.WriteString(" hasSearch := false\n") + helperMethods.WriteString(" for _, filterGroup := range query.Filters {\n") + helperMethods.WriteString(" for _, filter := range filterGroup.Filters {\n") + helperMethods.WriteString(" if filter.Operator == queryUtils.OpILike {\n") + helperMethods.WriteString(" hasSearch = true\n") + helperMethods.WriteString(" break\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" if hasSearch {\n") + helperMethods.WriteString(" break\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" logger.Info(\"Query analysis\", map[string]interface{}{\n") + helperMethods.WriteString(" \"hasSearch\": hasSearch,\n") + helperMethods.WriteString(" \"totalFilters\": len(query.Filters),\n") + helperMethods.WriteString(" })\n\n") + + helperMethods.WriteString(" // Optimize to prevent timeout on search queries\n") + helperMethods.WriteString(" // Use shorter context for search and count queries\n") + helperMethods.WriteString(" queryCtx, queryCancel := context.WithTimeout(ctx, 30*time.Second)\n") + helperMethods.WriteString(" defer queryCancel()\n\n") + + helperMethods.WriteString(" // For search queries, limit maximum to prevent timeout\n") + helperMethods.WriteString(" if hasSearch {\n") + helperMethods.WriteString(" search := getSearchTerm(query)\n") + helperMethods.WriteString(" logger.Info(\"Executing search query with timeout context\", map[string]interface{}{\"search_term\": search})\n\n") + + helperMethods.WriteString(" // Limit maximum search limit to prevent timeout\n") + helperMethods.WriteString(" maxSearchLimit := 50\n") + helperMethods.WriteString(" if query.Limit > maxSearchLimit {\n") + helperMethods.WriteString(" query.Limit = maxSearchLimit\n") + helperMethods.WriteString(" logger.Info(\"Reduced search limit to prevent timeout\", map[string]interface{}{\n") + helperMethods.WriteString(" \"original_limit\": query.Limit,\n") + helperMethods.WriteString(" \"new_limit\": maxSearchLimit,\n") + helperMethods.WriteString(" })\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" // Execute search query\n") + helperMethods.WriteString(" err := h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &" + data.NamePlural + ")\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" // Check if it's a PostgreSQL statement timeout error\n") + helperMethods.WriteString(" if pqErr, ok := err.(*pq.Error); ok && pqErr.Code == \"57014\" {\n") + helperMethods.WriteString(" logger.Warn(\"Search query timed out, trying fallback strategy\", map[string]interface{}{\n") + helperMethods.WriteString(" \"search_term\": search,\n") + helperMethods.WriteString(" })\n\n") + + helperMethods.WriteString(" // Fallback: Search only in the most relevant column\n") + helperMethods.WriteString(" // We need to rebuild the filters for the fallback\n") + helperMethods.WriteString(" var fallbackFilters []queryUtils.FilterGroup\n") + helperMethods.WriteString(" // Add other non-search filters back (e.g., status)\n") + helperMethods.WriteString(" for _, fg := range query.Filters {\n") + helperMethods.WriteString(" if fg.LogicOp == \"AND\" {\n") + helperMethods.WriteString(" fallbackFilters = append(fallbackFilters, fg)\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" // Add the single, more specific search filter\n") + // Perbaikan: Menggunakan data.TableSchema langsung alih-alih h.tableSchema + helperMethods.WriteString(" searchableColumns := []string{\n") + + // Generate searchable columns from table schema + searchableColumns := findSearchableColumns(data.TableSchema) + for i, col := range searchableColumns { + if i < len(searchableColumns)-1 { + helperMethods.WriteString(" \"" + col + "\",\n") + } else { + helperMethods.WriteString(" \"" + col + "\",\n") + } + } + + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" if len(searchableColumns) > 0 {\n") + helperMethods.WriteString(" fallbackFilters = append([]queryUtils.FilterGroup{{\n") + helperMethods.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + helperMethods.WriteString(" {Column: searchableColumns[0], Operator: queryUtils.OpILike, Value: \"%\" + search + \"%\"},\n") + helperMethods.WriteString(" },\n") + helperMethods.WriteString(" LogicOp: \"AND\",\n") + helperMethods.WriteString(" }}, fallbackFilters...)\n\n") + + helperMethods.WriteString(" fallbackQuery := query\n") + helperMethods.WriteString(" fallbackQuery.Filters = fallbackFilters\n\n") + + helperMethods.WriteString(" // Execute the fallback query with a shorter timeout\n") + helperMethods.WriteString(" fallbackCtx, fallbackCancel := context.WithTimeout(ctx, 10*time.Second)\n") + helperMethods.WriteString(" defer fallbackCancel()\n\n") + + helperMethods.WriteString(" err = h.queryBuilder.ExecuteQuery(fallbackCtx, dbConn, fallbackQuery, &" + data.NamePlural + ")\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" logger.Error(\"Fallback search query also failed\", map[string]interface{}{\n") + helperMethods.WriteString(" \"error\": err.Error(),\n") + helperMethods.WriteString(" \"query\": fallbackQuery,\n") + helperMethods.WriteString(" })\n") + helperMethods.WriteString(" // Return a more user-friendly error\n") + helperMethods.WriteString(" return nil, 0, fmt.Errorf(\"search timed out. The search term '%s' is too general. Please try a more specific term\", search)\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" logger.Info(\"Fallback search query successful\", map[string]interface{}{\n") + helperMethods.WriteString(" \"recordsFetched\": len(" + data.NamePlural + "),\n") + helperMethods.WriteString(" })\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" } else {\n") + helperMethods.WriteString(" // It's a different error, handle it as before\n") + helperMethods.WriteString(" logger.Error(\"Failed to execute search query\", map[string]interface{}{\n") + helperMethods.WriteString(" \"error\": err.Error(),\n") + helperMethods.WriteString(" \"query\": query,\n") + helperMethods.WriteString(" })\n") + helperMethods.WriteString(" return nil, 0, fmt.Errorf(\"failed to execute search query: %w\", err)\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" // Estimate total for search query (don't count exact for performance)\n") + helperMethods.WriteString(" total = len(" + data.NamePlural + ")\n") + helperMethods.WriteString(" if len(" + data.NamePlural + ") == query.Limit {\n") + helperMethods.WriteString(" // If reached limit, estimate there are more data\n") + helperMethods.WriteString(" total = query.Offset + query.Limit + 100\n") + helperMethods.WriteString(" } else {\n") + helperMethods.WriteString(" total = query.Offset + len(" + data.NamePlural + ")\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" } else {\n") + helperMethods.WriteString(" logger.Info(\"Executing regular query without search\")\n\n") + + helperMethods.WriteString(" // For queries without search, count total with shorter timeout\n") + helperMethods.WriteString(" countCtx, countCancel := context.WithTimeout(ctx, 15*time.Second)\n") + helperMethods.WriteString(" defer countCancel()\n\n") + + helperMethods.WriteString(" count, err := h.queryBuilder.ExecuteCount(countCtx, dbConn, query)\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" // If count failed, fallback to estimation or return error\n") + helperMethods.WriteString(" logger.Warn(\"Failed to get exact count, using estimation\", map[string]interface{}{\"error\": err.Error()})\n") + helperMethods.WriteString(" // For queries without search, we can estimate based on limit\n") + helperMethods.WriteString(" total = query.Offset + query.Limit + 100 // Conservative estimation\n") + helperMethods.WriteString(" } else {\n") + helperMethods.WriteString(" total = int(count)\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" logger.Info(\"Count query successful\", map[string]interface{}{\n") + helperMethods.WriteString(" \"count\": total,\n") + helperMethods.WriteString(" })\n\n") + + helperMethods.WriteString(" // Execute main data query\n") + helperMethods.WriteString(" err = h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &" + data.NamePlural + ")\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" logger.Error(\"Failed to execute main query\", map[string]interface{}{\n") + helperMethods.WriteString(" \"error\": err.Error(),\n") + helperMethods.WriteString(" \"query\": query,\n") + helperMethods.WriteString(" })\n") + helperMethods.WriteString(" return nil, 0, fmt.Errorf(\"failed to execute main query: %w\", err)\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" logger.Info(\"Data query successful\", map[string]interface{}{\n") + helperMethods.WriteString(" \"recordsFetched\": len(" + data.NamePlural + "),\n") + helperMethods.WriteString(" })\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" logger.Info(\"Query execution completed\", map[string]interface{}{\n") + helperMethods.WriteString(" \"totalRecords\": total,\n") + helperMethods.WriteString(" \"returnedRecords\": len(" + data.NamePlural + "),\n") + helperMethods.WriteString(" \"hasSearch\": hasSearch,\n") + helperMethods.WriteString(" })\n\n") + + helperMethods.WriteString(" return " + data.NamePlural + ", total, nil\n") + helperMethods.WriteString("}\n\n") + + // getSearchTerm helper + helperMethods.WriteString("// getSearchTerm extracts the search term from a DynamicQuery object.\n") + helperMethods.WriteString("// It assumes the search is the first filter group with an \"OR\" logic operator.\n") + helperMethods.WriteString("func getSearchTerm(query queryUtils.DynamicQuery) string {\n") + helperMethods.WriteString(" for _, filterGroup := range query.Filters {\n") + helperMethods.WriteString(" if filterGroup.LogicOp == \"OR\" && len(filterGroup.Filters) > 0 {\n") + helperMethods.WriteString(" if valueStr, ok := filterGroup.Filters[0].Value.(string); ok {\n") + helperMethods.WriteString(" return strings.Trim(valueStr, \"%\")\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" return \"\"\n") + helperMethods.WriteString("}\n\n") + + // getAggregateData helper + helperMethods.WriteString("// getAggregateData gets comprehensive statistics about " + data.NameLower + " data\n") + helperMethods.WriteString("func (h *" + data.Name + "Handler) getAggregateData(ctx context.Context, dbConn *sqlx.DB, filterGroups []queryUtils.FilterGroup) (*models.AggregateData, error) {\n") + helperMethods.WriteString(" aggregate := &models.AggregateData{\n") + helperMethods.WriteString(" ByStatus: make(map[string]int),\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" var wg sync.WaitGroup\n") + helperMethods.WriteString(" var mu sync.Mutex\n") + helperMethods.WriteString(" errChan := make(chan error, 4)\n\n") + + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + // Count by status + helperMethods.WriteString(" // 1. Count by status\n") + helperMethods.WriteString(" wg.Add(1)\n") + helperMethods.WriteString(" go func() {\n") + helperMethods.WriteString(" defer wg.Done()\n") + helperMethods.WriteString(" // Use context with shorter timeout\n") + helperMethods.WriteString(" queryCtx, queryCancel := context.WithTimeout(ctx, 20*time.Second)\n") + helperMethods.WriteString(" defer queryCancel()\n\n") + + helperMethods.WriteString(" query := queryUtils.DynamicQuery{\n") + helperMethods.WriteString(" From: \"" + data.TableName + "\",\n") + helperMethods.WriteString(" Fields: []queryUtils.SelectField{\n") + helperMethods.WriteString(" {Expression: \"" + statusColumn + "\"},\n") + helperMethods.WriteString(" {Expression: \"COUNT(*)\", Alias: \"count\"},\n") + helperMethods.WriteString(" },\n") + helperMethods.WriteString(" Filters: filterGroups,\n") + helperMethods.WriteString(" GroupBy: []string{\"" + statusColumn + "\"},\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" var results []struct {\n") + helperMethods.WriteString(" Status string `db:\"" + statusColumn + "\"`\n") + helperMethods.WriteString(" Count int `db:\"count\"`\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" err := h.queryBuilder.ExecuteQuery(queryCtx, dbConn, query, &results)\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" errChan <- fmt.Errorf(\"status query failed: %w\", err)\n") + helperMethods.WriteString(" return\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" mu.Lock()\n") + helperMethods.WriteString(" for _, result := range results {\n") + helperMethods.WriteString(" aggregate.ByStatus[result.Status] = result.Count\n") + helperMethods.WriteString(" switch result.Status {\n") + helperMethods.WriteString(" case \"active\":\n") + helperMethods.WriteString(" aggregate.TotalActive = result.Count\n") + helperMethods.WriteString(" case \"draft\":\n") + helperMethods.WriteString(" aggregate.TotalDraft = result.Count\n") + helperMethods.WriteString(" case \"inactive\":\n") + helperMethods.WriteString(" aggregate.TotalInactive = result.Count\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" mu.Unlock()\n") + helperMethods.WriteString(" }()\n\n") + } + + // Get last updated and today's stats + helperMethods.WriteString(" // 4. Get last updated and today's stats\n") + helperMethods.WriteString(" wg.Add(1)\n") + helperMethods.WriteString(" go func() {\n") + helperMethods.WriteString(" defer wg.Done()\n") + helperMethods.WriteString(" // Use context with shorter timeout\n") + helperMethods.WriteString(" queryCtx, queryCancel := context.WithTimeout(ctx, 20*time.Second)\n") + helperMethods.WriteString(" defer queryCancel()\n\n") + + helperMethods.WriteString(" // Last updated\n") + helperMethods.WriteString(" query1 := queryUtils.DynamicQuery{\n") + helperMethods.WriteString(" From: \"" + data.TableName + "\",\n") + helperMethods.WriteString(" Fields: []queryUtils.SelectField{{Expression: \"MAX(date_updated)\"}},\n") + helperMethods.WriteString(" Filters: filterGroups,\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" var lastUpdated sql.NullTime\n") + helperMethods.WriteString(" err := h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, query1, &lastUpdated)\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" errChan <- fmt.Errorf(\"last updated query failed: %w\", err)\n") + helperMethods.WriteString(" return\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" // Using QueryBuilder for today's statistics\n") + helperMethods.WriteString(" today := time.Now().Format(\"2006-01-02\")\n\n") + + helperMethods.WriteString(" // Query for created_today\n") + helperMethods.WriteString(" createdTodayQuery := queryUtils.DynamicQuery{\n") + helperMethods.WriteString(" From: \"" + data.TableName + "\",\n") + helperMethods.WriteString(" Fields: []queryUtils.SelectField{\n") + helperMethods.WriteString(" {Expression: \"COUNT(*)\", Alias: \"count\"},\n") + helperMethods.WriteString(" },\n") + helperMethods.WriteString(" Filters: append(filterGroups, queryUtils.FilterGroup{\n") + helperMethods.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + helperMethods.WriteString(" {Column: \"DATE(date_created)\", Operator: queryUtils.OpEqual, Value: today},\n") + helperMethods.WriteString(" },\n") + helperMethods.WriteString(" LogicOp: \"AND\",\n") + helperMethods.WriteString(" }),\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" var createdToday int\n") + helperMethods.WriteString(" err = h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, createdTodayQuery, &createdToday)\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" errChan <- fmt.Errorf(\"created today query failed: %w\", err)\n") + helperMethods.WriteString(" return\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" // Query for updated_today (updated today but not created today)\n") + helperMethods.WriteString(" updatedTodayQuery := queryUtils.DynamicQuery{\n") + helperMethods.WriteString(" From: \"" + data.TableName + "\",\n") + helperMethods.WriteString(" Fields: []queryUtils.SelectField{\n") + helperMethods.WriteString(" {Expression: \"COUNT(*)\", Alias: \"count\"},\n") + helperMethods.WriteString(" },\n") + helperMethods.WriteString(" Filters: append(filterGroups, queryUtils.FilterGroup{\n") + helperMethods.WriteString(" Filters: []queryUtils.DynamicFilter{\n") + helperMethods.WriteString(" {Column: \"DATE(date_updated)\", Operator: queryUtils.OpEqual, Value: today},\n") + helperMethods.WriteString(" {Column: \"DATE(date_created)\", Operator: queryUtils.OpNotEqual, Value: today},\n") + helperMethods.WriteString(" },\n") + helperMethods.WriteString(" LogicOp: \"AND\",\n") + helperMethods.WriteString(" }),\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" var updatedToday int\n") + helperMethods.WriteString(" err = h.queryBuilder.ExecuteQueryRow(queryCtx, dbConn, updatedTodayQuery, &updatedToday)\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" errChan <- fmt.Errorf(\"updated today query failed: %w\", err)\n") + helperMethods.WriteString(" return\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" mu.Lock()\n") + helperMethods.WriteString(" if lastUpdated.Valid {\n") + helperMethods.WriteString(" aggregate.LastUpdated = &lastUpdated.Time\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" aggregate.CreatedToday = createdToday\n") + helperMethods.WriteString(" aggregate.UpdatedToday = updatedToday\n") + helperMethods.WriteString(" mu.Unlock()\n") + helperMethods.WriteString(" }()\n\n") + + helperMethods.WriteString(" wg.Wait()\n") + helperMethods.WriteString(" close(errChan)\n\n") + + helperMethods.WriteString(" for err := range errChan {\n") + helperMethods.WriteString(" if err != nil {\n") + helperMethods.WriteString(" return nil, err\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" }\n\n") + + helperMethods.WriteString(" return aggregate, nil\n") + helperMethods.WriteString("}\n\n") + + // Error handling methods + helperMethods.WriteString("// logAndRespondError logs an error and sends a JSON response\n") + helperMethods.WriteString("func (h *" + data.Name + "Handler) logAndRespondError(c *gin.Context, message string, err error, statusCode int) {\n") + helperMethods.WriteString(" logger.Error(message, map[string]interface{}{\"error\": err.Error(), \"status_code\": statusCode})\n") + helperMethods.WriteString(" h.respondError(c, message, err, statusCode)\n") + helperMethods.WriteString("}\n\n") + + helperMethods.WriteString("// respondError sends a standardized JSON error response\n") + helperMethods.WriteString("func (h *" + data.Name + "Handler) respondError(c *gin.Context, message string, err error, statusCode int) {\n") + helperMethods.WriteString(" errorMessage := message\n") + helperMethods.WriteString(" if gin.Mode() == gin.ReleaseMode {\n") + helperMethods.WriteString(" errorMessage = \"Internal server error\"\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" c.JSON(statusCode, models.ErrorResponse{Error: errorMessage, Code: statusCode, Message: err.Error(), Timestamp: time.Now()})\n") + helperMethods.WriteString("}\n\n") + + // calculateMeta method + helperMethods.WriteString("// calculateMeta creates pagination metadata\n") + helperMethods.WriteString("func (h *" + data.Name + "Handler) calculateMeta(limit, offset, total int) models.MetaResponse {\n") + helperMethods.WriteString(" totalPages, currentPage := 0, 1\n") + helperMethods.WriteString(" if limit > 0 {\n") + helperMethods.WriteString(" totalPages = (total + limit - 1) / limit\n") + helperMethods.WriteString(" currentPage = (offset / limit) + 1\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString(" return models.MetaResponse{\n") + helperMethods.WriteString(" Limit: limit, Offset: offset, Total: total, TotalPages: totalPages,\n") + helperMethods.WriteString(" CurrentPage: currentPage, HasNext: offset+limit < total, HasPrev: offset > 0,\n") + helperMethods.WriteString(" }\n") + helperMethods.WriteString("}\n") return helperMethods.String() } @@ -2048,20 +2905,20 @@ func generateModelFile(data HandlerData, modelDir string) { modelFileName := data.NameLower + ".go" modelFilePath := filepath.Join(modelDir, modelFileName) - var importBlock, nullablePrefix string + var importBlock string if data.Category == "models" { importBlock = `import ( - "database/sql" + "api-service/internal/models" "encoding/json" + "database/sql" "time" ) ` } else { - nullablePrefix = "models." importBlock = `import ( - "` + data.ModuleName + `/internal/models" - "database/sql" + "api-service/internal/models" "encoding/json" + "database/sql" "time" ) ` @@ -2071,12 +2928,21 @@ func generateModelFile(data HandlerData, modelDir string) { modelContent.WriteString(fmt.Sprintf("package %s\n\n", data.Category)) modelContent.WriteString(importBlock) - // Generate main struct + // Generate main struct dengan nullable types modelContent.WriteString(fmt.Sprintf("// %s represents the data structure for the %s table\n", data.Name, data.TableName)) modelContent.WriteString("// with proper null handling and optimized JSON marshaling\n") modelContent.WriteString(fmt.Sprintf("type %s struct {\n", data.Name)) + // Track added fields to avoid duplicates + addedFields := make(map[string]bool) + + // Add main table columns for _, col := range data.TableSchema { + if addedFields[col.Name] { + continue // Skip if already added + } + addedFields[col.Name] = true + fieldName := snakeToPascal(col.Name) goType, _, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) jsonTag := snakeToCamel(col.Name) @@ -2087,6 +2953,29 @@ func generateModelFile(data HandlerData, modelDir string) { } modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\" db:\"%s\"`\n", fieldName, goType, jsonTagValue, dbTag)) } + + // Add relationship columns if relationships exist + if len(data.Relationships) > 0 { + for _, rel := range data.Relationships { + for _, col := range rel.Columns { + if addedFields[col.Name] { + continue // Skip if already added + } + addedFields[col.Name] = true + + fieldName := snakeToPascal(col.Name) + goType, _, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) + jsonTag := snakeToCamel(col.Name) + dbTag := col.Name // Gunakan nama kolom langsung + jsonTagValue := jsonTag + if col.Nullable { + jsonTagValue += ",omitempty" + } + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\" db:\"%s\"`\n", fieldName, goType, jsonTagValue, dbTag)) + } + } + } + modelContent.WriteString("}\n\n") // Generate MarshalJSON method @@ -2094,6 +2983,8 @@ func generateModelFile(data HandlerData, modelDir string) { modelContent.WriteString(fmt.Sprintf("func (r %s) MarshalJSON() ([]byte, error) {\n", data.Name)) modelContent.WriteString(fmt.Sprintf(" type Alias %s\n", data.Name)) modelContent.WriteString(" aux := &struct {\n *Alias\n") + + // Add main table columns for _, col := range data.TableSchema { if !col.Nullable { continue @@ -2104,7 +2995,26 @@ func generateModelFile(data HandlerData, modelDir string) { jsonTag := snakeToCamel(col.Name) + ",omitempty" modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\"`\n", fieldName, auxType, jsonTag)) } + + // Add relationship columns + if len(data.Relationships) > 0 { + for _, rel := range data.Relationships { + for _, col := range rel.Columns { + if !col.Nullable { + continue + } + fieldName := snakeToPascal(col.Name) + _, baseType, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) + auxType := "*" + baseType + jsonTag := snakeToCamel(col.Name) + ",omitempty" + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\"`\n", fieldName, auxType, jsonTag)) + } + } + } + modelContent.WriteString(" }{\n Alias: (*Alias)(&r),\n }\n\n") + + // Add main table columns for _, col := range data.TableSchema { if !col.Nullable { continue @@ -2113,9 +3023,24 @@ func generateModelFile(data HandlerData, modelDir string) { _, _, valueType := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) modelContent.WriteString(fmt.Sprintf(" if r.%s.Valid {\n aux.%s = &r.%s.%s\n }\n", fieldName, fieldName, fieldName, valueType)) } + + // Add relationship columns + if len(data.Relationships) > 0 { + for _, rel := range data.Relationships { + for _, col := range rel.Columns { + if !col.Nullable { + continue + } + fieldName := snakeToPascal(col.Name) + _, _, valueType := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) + modelContent.WriteString(fmt.Sprintf(" if r.%s.Valid {\n aux.%s = &r.%s.%s\n }\n", fieldName, fieldName, fieldName, valueType)) + } + } + } + modelContent.WriteString(" return json.Marshal(aux)\n}\n\n") - // Generate helper methods + // Generate helper methods for main table columns for _, col := range data.TableSchema { if !col.Nullable { continue @@ -2140,11 +3065,40 @@ func generateModelFile(data HandlerData, modelDir string) { modelContent.WriteString(fmt.Sprintf(" if r.%s.Valid {\n return r.%s.%s\n }\n return %s\n}\n\n", fieldName, fieldName, valueType, zeroValue)) } + // Generate helper methods for relationship columns + if len(data.Relationships) > 0 { + for _, rel := range data.Relationships { + for _, col := range rel.Columns { + if !col.Nullable { + continue + } + fieldName := snakeToPascal(col.Name) + _, baseType, valueType := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) + var zeroValue string + switch baseType { + case "string": + zeroValue = `""` + case "int32", "int64", "float64": + zeroValue = "0" + case "bool": + zeroValue = "false" + case "time.Time": + zeroValue = "time.Time{}" + default: + zeroValue = "nil" + } + modelContent.WriteString(fmt.Sprintf("// Helper method to safely get %s\n", fieldName)) + modelContent.WriteString(fmt.Sprintf("func (r *%s) Get%s() %s {\n", data.Name, fieldName, baseType)) + modelContent.WriteString(fmt.Sprintf(" if r.%s.Valid {\n return r.%s.%s\n }\n return %s\n}\n\n", fieldName, fieldName, valueType, zeroValue)) + } + } + } + // Generate request/response structs - excludedFields := map[string]bool{"id": true, "date_created": true, "date_updated": true, "user_created": true, "user_updated": true} + systemFields := findSystemFields(data.TableSchema) var createFields, updateFields []ColumnConfig for _, col := range data.TableSchema { - if excludedFields[strings.ToLower(col.Name)] { + if isSystemField(col.Name, systemFields) { continue } createFields = append(createFields, col) @@ -2152,93 +3106,191 @@ func generateModelFile(data HandlerData, modelDir string) { updateFields = append(updateFields, updateCol) } - if data.HasGet { - modelContent.WriteString(fmt.Sprintf(`// Response struct for GET by ID -type %sGetByIDResponse struct { - Message string `+"`json:\"message\"`"+` - Data *%s `+"`json:\"data\"`"+` -} - -// Enhanced GET response with pagination and aggregation -type %sGetResponse struct { - Message string `+"`json:\"message\"`"+` - Data []%s `+"`json:\"data\"`"+` - Meta %sMetaResponse `+"`json:\"meta\"`"+` - Summary *%sAggregateData `+"`json:\"summary,omitempty\"`"+` -} -`, data.Name, data.Name, data.Name, data.Name, nullablePrefix, nullablePrefix)) - } - if data.HasPost { - modelContent.WriteString(fmt.Sprintf("\n// Request struct for create\ntype %sCreateRequest struct {\n", data.Name)) - for _, col := range createFields { - fieldName := snakeToPascal(col.Name) - _, baseType, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) - jsonTag := snakeToCamel(col.Name) - var requestType string - if col.Nullable { - requestType = "*" + baseType - } else { - requestType = baseType + // Generate all response structs based on endpoints + for endpointName, endpoint := range data.Endpoints { + if endpoint.ResponseModel != "" { + switch endpointName { + case "list": + modelContent.WriteString(fmt.Sprintf(`// Response struct for GET list + type %sGetResponse struct { + Message string `+"`json:\"message\"`"+` + Data []%s `+"`json:\"data\"`"+` + Meta models.MetaResponse `+"`json:\"meta\"`"+` + Summary *models.AggregateData `+"`json:\"summary,omitempty\"`"+` + } + `, data.Name, data.Name)) + case "get": + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" + } + modelContent.WriteString(fmt.Sprintf(`// Response struct for GET by %s + type %sGetBy%sResponse struct { + Message string `+"`json:\"message\"`"+` + Data *%s `+"`json:\"data\"`"+` + } + `, primaryKey, data.Name, snakeToPascal(primaryKey), data.Name)) + case "create": + modelContent.WriteString(fmt.Sprintf(`// Response struct for create + type %sCreateResponse struct { + Message string `+"`json:\"message\"`"+` + Data *%s `+"`json:\"data\"`"+` + } + `, data.Name, data.Name)) + case "update": + modelContent.WriteString(fmt.Sprintf(`// Response struct for update + type %sUpdateResponse struct { + Message string `+"`json:\"message\"`"+` + Data *%s `+"`json:\"data\"`"+` + } + `, data.Name, data.Name)) + case "delete": + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" + } + modelContent.WriteString(fmt.Sprintf(`// Response struct for delete + type %sDeleteResponse struct { + Message string `+"`json:\"message\"`"+` + %s string `+"`json:\"%s\"`"+` + } + `, data.Name, snakeToPascal(primaryKey), snakeToCamel(primaryKey))) + case "stats": + // Stats uses AggregateData directly, no need to generate struct + case "by_age": + modelContent.WriteString(fmt.Sprintf(`// Response struct for by age + type %sAgeStatsResponse struct { + Message string `+"`json:\"message\"`"+` + Data map[string]interface{} `+"`json:\"data\"`"+` + } + `, data.Name)) } - modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\"`\n", fieldName, requestType, jsonTag)) } - modelContent.WriteString("}\n\n") - modelContent.WriteString(fmt.Sprintf(`// Response struct for create -type %sCreateResponse struct { - Message string `+"`json:\"message\"`"+` - Data *%s `+"`json:\"data\"`"+` -} -`, data.Name, data.Name)) - } - if data.HasPut { - // 1. Bangun string tag terlebih dahulu - idTagContent := `json:"-" validate:"required,uuid4"` - // 2. Gunakan strconv.Quote untuk membuatnya menjadi literal yang valid - // Hasilnya akan menjadi: "`json:\"-\" validate:\"required,uuid4\"`" - quotedIdTag := strconv.Quote(idTagContent) + if endpoint.RequestModel != "" { + switch endpointName { + case "create": + modelContent.WriteString(fmt.Sprintf("\n// Request struct for create\ntype %sCreateRequest struct {\n", data.Name)) - // 3. Gunakan tag yang sudah di-"quote" dalam fmt.Sprintf - modelContent.WriteString(fmt.Sprintf("\n// Update request\ntype %sUpdateRequest struct {\n ID string %s\n", data.Name, quotedIdTag)) + // Add status field first if it exists + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + fieldName := snakeToPascal(statusColumn) + _, baseType, _ := mapSQLTypeToGo(getColumnType(data.TableSchema, statusColumn), isColumnNullable(data.TableSchema, statusColumn), "") + jsonTag := snakeToCamel(statusColumn) + var requestType string + if isColumnNullable(data.TableSchema, statusColumn) { + requestType = "*" + baseType + } else { + requestType = baseType + } + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\" validate:\"required,oneof=draft active inactive\"`\n", fieldName, requestType, jsonTag)) + } - for _, col := range updateFields { - fieldName := snakeToPascal(col.Name) - _, baseType, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) - jsonTag := snakeToCamel(col.Name) - var requestType string - if col.Nullable { - requestType = "*" + baseType - } else { - requestType = baseType + for _, col := range createFields { + if statusColumn != "" && col.Name == statusColumn { + continue // Skip status as it's already added + } + fieldName := snakeToPascal(col.Name) + _, baseType, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) + jsonTag := snakeToCamel(col.Name) + var requestType string + if col.Nullable { + requestType = "*" + baseType + } else { + requestType = baseType + } + + // Add validation rules based on column type and validation from config + validationTag := "" + if col.Validation != "" { + validationTag = " validate:\"" + col.Validation + "\"" + } else if strings.Contains(strings.ToLower(col.Name), "nama") || strings.Contains(strings.ToLower(col.Name), "title") { + validationTag = " validate:\"required,min=1,max=100\"" + } else if strings.Contains(strings.ToLower(col.Name), "email") { + validationTag = " validate:\"omitempty,email\"" + } else if strings.Contains(strings.ToLower(col.Name), "code") || strings.Contains(strings.ToLower(col.Name), "kode") { + validationTag = " validate:\"omitempty,min=1,max=50\"" + } + + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\"%s`\n", fieldName, requestType, jsonTag, validationTag)) + } + modelContent.WriteString("}\n\n") + case "update": + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" + } + + // Get the type of the primary key + _, pkBaseType, _ := mapSQLTypeToGo(getColumnType(data.TableSchema, primaryKey), isColumnNullable(data.TableSchema, primaryKey), "") + + modelContent.WriteString(fmt.Sprintf("\n// Update request\ntype %sUpdateRequest struct {\n", data.Name)) + + // Add primary key field with correct type and tag + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"-\" validate:\"required\"`\n", snakeToPascal(primaryKey), pkBaseType)) + + // Add status field first if it exists + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + fieldName := snakeToPascal(statusColumn) + _, baseType, _ := mapSQLTypeToGo(getColumnType(data.TableSchema, statusColumn), isColumnNullable(data.TableSchema, statusColumn), "") + jsonTag := snakeToCamel(statusColumn) + var requestType string + if isColumnNullable(data.TableSchema, statusColumn) { + requestType = "*" + baseType + } else { + requestType = baseType + } + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\" validate:\"required,oneof=draft active inactive\"`\n", fieldName, requestType, jsonTag)) + } + + for _, col := range updateFields { + if statusColumn != "" && col.Name == statusColumn { + continue // Skip status as it's already added + } + // Skip primary key as it's already added above + if col.Name == primaryKey { + continue + } + + fieldName := snakeToPascal(col.Name) + _, baseType, _ := mapSQLTypeToGo(col.Type, col.Nullable, col.GoType) + jsonTag := snakeToCamel(col.Name) + var requestType string + if col.Nullable { + requestType = "*" + baseType + } else { + requestType = baseType + } + + // Add validation rules based on column type and validation from config + validationTag := "" + if col.Validation != "" { + validationTag = " validate:\"" + col.Validation + "\"" + } else if strings.Contains(strings.ToLower(col.Name), "nama") || strings.Contains(strings.ToLower(col.Name), "title") { + validationTag = " validate:\"omitempty,min=1,max=255\"" + } else if strings.Contains(strings.ToLower(col.Name), "email") { + validationTag = " validate:\"omitempty,email\"" + } else if strings.Contains(strings.ToLower(col.Name), "code") || strings.Contains(strings.ToLower(col.Name), "kode") { + validationTag = " validate:\"omitempty,min=1,max=50\"" + } + + modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\"%s`\n", fieldName, requestType, jsonTag, validationTag)) + } + modelContent.WriteString("}\n\n") } - modelContent.WriteString(fmt.Sprintf(" %s %s `json:\"%s\"`\n", fieldName, requestType, jsonTag)) } - modelContent.WriteString("}\n\n") - modelContent.WriteString(fmt.Sprintf(`// Response struct for update - type %sUpdateResponse struct { - Message string `+"`json:\"message\"`"+` - Data *%s `+"`json:\"data\"`"+` - } - `, data.Name, data.Name)) - } - if data.HasDelete { - modelContent.WriteString(fmt.Sprintf(`// Response struct for delete -type %sDeleteResponse struct { - Message string `+"`json:\"message\"`"+` - ID string `+"`json:\"id\"`"+` -} -`, data.Name)) } + if data.HasFilter { modelContent.WriteString(fmt.Sprintf("\n// Filter struct for query parameters\ntype %sFilter struct {\n", data.Name)) modelContent.WriteString(" Search *string `json:\"search,omitempty\" form:\"search\"`\n") modelContent.WriteString(" DateFrom *time.Time `json:\"date_from,omitempty\" form:\"date_from\"`\n") modelContent.WriteString(" DateTo *time.Time `json:\"date_to,omitempty\" form:\"date_to\"`\n") - for _, col := range data.TableSchema { - lowerName := strings.ToLower(col.Name) - if strings.Contains(lowerName, "status") { - modelContent.WriteString(fmt.Sprintf(" Status *string `json:\"status,omitempty\" form:\"%s\"`\n", col.Name)) - } + statusColumn := findStatusColumn(data.TableSchema) + if statusColumn != "" { + modelContent.WriteString(fmt.Sprintf(" Status *string `json:\"status,omitempty\" form:\"%s\"`\n", statusColumn)) } modelContent.WriteString("}\n") } @@ -2318,6 +3370,7 @@ func mapSQLTypeToGo(sqlType string, nullable bool, explicitGoType string) (goTyp return "string", "string", "" } } + func snakeToPascal(s string) string { if s == "" { return "" @@ -2546,6 +3599,10 @@ func generateProtectedRouteBlock(data HandlerData) string { var sb strings.Builder handlerName := getHandlerName(data) groupPath := getGroupPath(data) + primaryKey := findPrimaryKey(data.TableSchema) + if primaryKey == "" { + primaryKey = "id" // Default fallback + } // Komentar dan deklarasi handler & grup sb.WriteString("// ") @@ -2561,80 +3618,114 @@ func generateProtectedRouteBlock(data HandlerData) string { sb.WriteString(handlerName) sb.WriteString("Group := v1.Group(\"/") sb.WriteString(groupPath) - sb.WriteString("\")\n {\n ") - sb.WriteString(handlerName) - sb.WriteString("Group.GET(\"\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Get") - sb.WriteString(data.Name) - sb.WriteString(")\n") + sb.WriteString("\")\n {\n") - if data.HasDynamic { - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.GET(\"/dynamic\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Get") - sb.WriteString(data.Name) - sb.WriteString("Dynamic) // Route baru\n") + // Generate routes for all endpoints + for _, endpoint := range data.Endpoints { + for _, method := range endpoint.Methods { + switch strings.ToLower(method) { + case "get": + if endpoint.Path == "/:"+primaryKey { + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"/:") + sb.WriteString(primaryKey) + sb.WriteString("\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("By") + sb.WriteString(snakeToPascal(primaryKey)) + sb.WriteString(")\n") + } else if endpoint.Path == "/dynamic" { + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"/dynamic\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("Dynamic)\n") + } else if endpoint.Path == "/search" { + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"/search\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Search") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } else if endpoint.Path == "/stats" { + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"/stats\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("Stats)\n") + } else if endpoint.Path == "/by-location" { + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"/by-location\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("ByLocation)\n") + } else if endpoint.Path == "/by-age" { + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"/by-age\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString("ByAge)\n") + } else { + // Default GET handler + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.GET(\"") + sb.WriteString(endpoint.Path) + sb.WriteString("\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Get") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + case "post": + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.POST(\"") + sb.WriteString(endpoint.Path) + sb.WriteString("\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Create") + sb.WriteString(data.Name) + sb.WriteString(")\n") + case "put": + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.PUT(\"/:") + sb.WriteString(primaryKey) + sb.WriteString("\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Update") + sb.WriteString(data.Name) + sb.WriteString(")\n") + case "delete": + sb.WriteString(" ") + sb.WriteString(handlerName) + sb.WriteString("Group.DELETE(\"/:") + sb.WriteString(primaryKey) + sb.WriteString("\", ") + sb.WriteString(handlerName) + sb.WriteString("Handler.Delete") + sb.WriteString(data.Name) + sb.WriteString(")\n") + } + } } - if data.HasSearch { - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.GET(\"/search\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Search") - sb.WriteString(data.Name) - sb.WriteString("Advanced) // Route pencarian\n") - } - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.GET(\"/:id\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Get") - sb.WriteString(data.Name) - sb.WriteString("ByID)\n") - if data.HasPost { - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.POST(\"\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Create") - sb.WriteString(data.Name) - sb.WriteString(")\n") - } - if data.HasPut { - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.PUT(\"/:id\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Update") - sb.WriteString(data.Name) - sb.WriteString(")\n") - } - if data.HasDelete { - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.DELETE(\"/:id\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Delete") - sb.WriteString(data.Name) - sb.WriteString(")\n") - } - if data.HasStats { - sb.WriteString(" ") - sb.WriteString(handlerName) - sb.WriteString("Group.GET(\"/stats\", ") - sb.WriteString(handlerName) - sb.WriteString("Handler.Get") - sb.WriteString(data.Name) - sb.WriteString("Stats)\n") - } sb.WriteString(" }\n") return sb.String() } - func cleanupDuplicateRoutes(content string, data HandlerData) string { // Implement getGroupPath logic directly var groupPath string @@ -2765,6 +3856,7 @@ func cleanupDuplicateRoutes(content string, data HandlerData) string { return strings.Join(cleanedLines, "\n") } + func printRoutesSample(data HandlerData) { fmt.Print(generateProtectedRouteBlock(data)) fmt.Println() @@ -2798,3 +3890,286 @@ func logSuccess(message string, details ...string) { } fmt.Println() } + +// ================= HELPER FUNCTIONS ===================== +// findPrimaryKey finds the primary key column from schema +func findPrimaryKey(columns []ColumnConfig) string { + for _, col := range columns { + if col.PrimaryKey { + return col.Name + } + } + return "id" // Default fallback +} + +// findSearchableColumns finds columns that can be used for searching +func findSearchableColumns(columns []ColumnConfig) []string { + var searchable []string + for _, col := range columns { + if col.Searchable { + searchable = append(searchable, col.Name) + } else { + // Auto-detect searchable columns based on name + colNameLower := strings.ToLower(col.Name) + if strings.Contains(colNameLower, "nama") || + strings.Contains(colNameLower, "name") || + strings.Contains(colNameLower, "title") || + strings.Contains(colNameLower, "nomr") || + strings.Contains(colNameLower, "code") || + strings.Contains(colNameLower, "kode") { + searchable = append(searchable, col.Name) + } + } + } + return searchable +} + +// findUniqueColumns finds columns that must be unique +func findUniqueColumns(columns []ColumnConfig) []string { + var unique []string + for _, col := range columns { + if col.Unique { + unique = append(unique, col.Name) + } else { + // Auto-detect unique columns based on name + colNameLower := strings.ToLower(col.Name) + if strings.Contains(colNameLower, "nomr") || + strings.Contains(colNameLower, "no_kartu") || + strings.Contains(colNameLower, "kode") || + strings.Contains(colNameLower, "code") { + unique = append(unique, col.Name) + } + } + } + return unique +} + +// findStatusColumn finds the status column from schema +func findStatusColumn(columns []ColumnConfig) string { + for _, col := range columns { + colNameLower := strings.ToLower(col.Name) + if strings.Contains(colNameLower, "status") { + return col.Name + } + } + return "status" // Default fallback +} + +// findSystemFields finds system fields that should be excluded from user input +func findSystemFields(columns []ColumnConfig) []string { + var systemFields []string + for _, col := range columns { + if col.SystemField { + systemFields = append(systemFields, col.Name) + } else { + // Auto-detect system fields based on name + colNameLower := strings.ToLower(col.Name) + if strings.Contains(colNameLower, "date_created") || + strings.Contains(colNameLower, "date_updated") || + strings.Contains(colNameLower, "user_created") || + strings.Contains(colNameLower, "user_updated") || + strings.Contains(colNameLower, "created_at") || + strings.Contains(colNameLower, "updated_at") { + systemFields = append(systemFields, col.Name) + } + } + } + return systemFields +} + +// findLocationColumns finds columns related to location +func findLocationColumns(columns []ColumnConfig) []string { + var locationColumns []string + for _, col := range columns { + colNameLower := strings.ToLower(col.Name) + if strings.Contains(colNameLower, "provinsi") || + strings.Contains(colNameLower, "kota") || + strings.Contains(colNameLower, "kecamatan") || + strings.Contains(colNameLower, "kelurahan") { + locationColumns = append(locationColumns, col.Name) + } + } + return locationColumns +} + +// findBirthDateColumn finds the birth date column +func findBirthDateColumn(columns []ColumnConfig) string { + for _, col := range columns { + colNameLower := strings.ToLower(col.Name) + if strings.Contains(colNameLower, "tgl_lahir") || + strings.Contains(colNameLower, "birth_date") || + strings.Contains(colNameLower, "tanggal_lahir") { + return col.Name + } + } + return "" // Return empty if not found +} + +// isSystemField checks if a column is a system field +func isSystemField(columnName string, systemFields []string) bool { + for _, field := range systemFields { + if field == columnName { + return true + } + } + return false +} + +// isAutoGeneratedPrimaryKey checks if the primary key is auto-generated +func isAutoGeneratedPrimaryKey(columns []ColumnConfig) bool { + for _, col := range columns { + if col.PrimaryKey { + return strings.Contains(strings.ToLower(col.Type), "serial") || + strings.Contains(strings.ToLower(col.Type), "uuid") + } + } + return false +} + +// getColumnType returns the type of a column +func getColumnType(columns []ColumnConfig, columnName string) string { + for _, col := range columns { + if col.Name == columnName { + return col.Type + } + } + return "" +} + +// isColumnNullable checks if a column is nullable +func isColumnNullable(columns []ColumnConfig, columnName string) bool { + for _, col := range columns { + if col.Name == columnName { + return col.Nullable + } + } + return false +} + +// getFieldsForEndpoint returns the list of fields to select for an endpoint +func getFieldsForEndpoint(data HandlerData, fieldsGroup string) []string { + // Check if the fields group is defined in the configuration + if fieldsGroup != "" { + // For now, we'll handle the predefined field groups + switch fieldsGroup { + case "base_fields": + return getBaseFields(data.TableSchema) + case "location_fields": + return getLocationFields(data.TableSchema) + case "identity_fields": + return getIdentityFields(data.TableSchema) + case "all_fields": + return getAllFields(data.TableSchema) + case "with_location_names": + return getFieldsWithLocationNames(data) + default: + // If it's not a predefined group, treat it as a comma-separated list of fields + return strings.Split(fieldsGroup, ",") + } + } + + // Default to all fields if no specific group is defined + return getAllFields(data.TableSchema) +} + +// Helper functions to get specific field groups +func getBaseFields(columns []ColumnConfig) []string { + // Implementation to extract base fields from columns + var fields []string + for _, col := range columns { + if !isSystemField(col.Name, findSystemFields(columns)) { + fields = append(fields, col.Name) + } + } + return fields +} + +func getLocationFields(columns []ColumnConfig) []string { + // Implementation to extract location fields from columns + var fields []string + locationFields := []string{"alamat", "kelurahan", "kdkecamatan", "kota", "kdprovinsi"} + for _, col := range columns { + for _, locField := range locationFields { + if col.Name == locField { + fields = append(fields, col.Name) + break + } + } + } + return fields +} + +func getIdentityFields(columns []ColumnConfig) []string { + // Implementation to extract identity fields from columns + var fields []string + identityFields := []string{"agama", "no_kartu", "noktp_baru"} + for _, col := range columns { + for _, idField := range identityFields { + if col.Name == idField { + fields = append(fields, col.Name) + break + } + } + } + return fields +} + +func getAllFields(columns []ColumnConfig) []string { + // Implementation to extract all non-system fields from columns + var fields []string + systemFields := findSystemFields(columns) + for _, col := range columns { + if !isSystemField(col.Name, systemFields) { + fields = append(fields, col.Name) + } + } + return fields +} + +func getFieldsWithLocationNames(data HandlerData) []string { + // Implementation to extract fields with location names + // This would include both the location IDs and their corresponding names + var fields []string + systemFields := findSystemFields(data.TableSchema) + + // Add all non-system fields + for _, col := range data.TableSchema { + if !isSystemField(col.Name, systemFields) { + fields = append(fields, col.Name) + } + } + + // Add location name fields (these would be joined from other tables) + locationNameFields := []string{"namakelurahan", "namakecamatan", "namakota", "namaprovinsi"} + for _, nameField := range locationNameFields { + fields = append(fields, nameField) + } + + return fields +} + +// hasRelationshipFields checks if any field from relationships is included in the fields list +func hasRelationshipFields(fields []string, relationships []RelationshipConfig) bool { + for _, field := range fields { + for _, rel := range relationships { + for _, col := range rel.Columns { + if field == col.Name { + return true + } + } + } + } + return false +} + +// hasRelationshipField checks if a specific field from a relationship is included in the fields list +func hasRelationshipField(fields []string, relationship RelationshipConfig) bool { + for _, field := range fields { + for _, col := range relationship.Columns { + if field == col.Name { + return true + } + } + } + return false +} diff --git a/tools/general/services-config.yaml b/tools/general/services-config.yaml index 658695e..b7ad69f 100644 --- a/tools/general/services-config.yaml +++ b/tools/general/services-config.yaml @@ -19,79 +19,109 @@ services: table_name: "m_pasien" # Define all columns once for reuse - columns: - - name: "nomr" - type: "varchar" - nullable: true - go_type: "string" - description: "Nomor Rekam Medis" - - name: "title" - type: "varchar" - nullable: true - go_type: "string" - description: "Gelar pasien (Tn, Ny, Sdr, dll)" - - name: "nama" - type: "varchar" - nullable: true - go_type: "string" - validation: "required,min=1,max=100" - description: "Nama lengkap pasien" - - name: "tempat" - type: "varchar" - nullable: true - go_type: "string" - description: "Tempat lahir pasien" - - name: "tgllahir" - type: "date" - nullable: true - go_type: "time.Time" - description: "Tanggal lahir pasien" - - name: "jeniskelamin" - type: "varchar" - nullable: true - go_type: "string" - validation: "oneof=L P" - description: "Jenis kelamin (L/P)" - - name: "alamat" - type: "varchar" - nullable: true - go_type: "string" - description: "Alamat lengkap pasien" - - name: "kelurahan" - type: "int8" - nullable: true - go_type: "int64" - description: "ID Kelurahan" - - name: "kdkecamatan" - type: "int4" - nullable: true - go_type: "int32" - description: "ID Kecamatan" - - name: "kota" - type: "int4" - nullable: true - go_type: "int32" - description: "ID Kota" - - name: "kdprovinsi" - type: "int4" - nullable: true - go_type: "int32" - description: "ID Provinsi" - - name: "agama" - type: "int4" - nullable: true - go_type: "int32" - description: "ID Agama" - - name: "no_kartu" - type: "varchar" - nullable: true - go_type: "string" - description: "Nomor kartu identitas" - - name: "noktp_baru" - type: "varchar" - nullable: true - go_type: "string" - description: "Nomor KTP baru" + schema: + columns: + - name: "id" + type: "serial4" + nullable: false + go_type: "int32" + primary_key: true + unique: true + description: "Primary key for schedule" + - name: "nomr" + type: "varchar" + nullable: true + go_type: "string" + searchable: true + unique: true + description: "Nomor Rekam Medis" + - name: "status" + type: "varchar" + nullable: true + go_type: "string" + description: "Status pasien (A = Aktif, I = Inaktif)" + - name: "title" + type: "varchar" + nullable: true + go_type: "string" + description: "Gelar pasien (Tn, Ny, Sdr, dll)" + - name: "nama" + type: "varchar" + nullable: true + go_type: "string" + validation: "required,min=1,max=100" + searchable: true + description: "Nama lengkap pasien" + - name: "tempat" + type: "varchar" + nullable: true + go_type: "string" + description: "Tempat lahir pasien" + - name: "tgllahir" + type: "date" + nullable: true + go_type: "time.Time" + description: "Tanggal lahir pasien" + - name: "jeniskelamin" + type: "varchar" + nullable: true + go_type: "string" + validation: "oneof=L P" + description: "Jenis kelamin (L/P)" + - name: "alamat" + type: "varchar" + nullable: true + go_type: "string" + description: "Alamat lengkap pasien" + - name: "kelurahan" + type: "int8" + nullable: true + go_type: "int64" + description: "ID Kelurahan" + - name: "kdkecamatan" + type: "int4" + nullable: true + go_type: "int32" + description: "ID Kecamatan" + - name: "kota" + type: "int4" + nullable: true + go_type: "int32" + description: "ID Kota" + - name: "kdprovinsi" + type: "int4" + nullable: true + go_type: "int32" + description: "ID Provinsi" + - name: "agama" + type: "int4" + nullable: true + go_type: "int32" + description: "ID Agama" + - name: "no_kartu" + type: "varchar" + nullable: true + go_type: "string" + searchable: true + unique: true + description: "Nomor kartu identitas" + - name: "noktp_baru" + type: "varchar" + nullable: true + go_type: "string" + description: "Nomor KTP baru" + - name: "created_at" + type: "timestamp" + nullable: true + go_type: "time.Time" + system_field: true + description: "Tanggal pembuatan record" + - name: "updated_at" + type: "timestamp" + nullable: true + go_type: "time.Time" + system_field: true + description: "Tanggal update record" # Define relationships with other tables relationships: @@ -167,6 +197,8 @@ services: # Define endpoints with reusable configurations endpoints: list: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["GET"] path: "/" description: "Get list of pasien with pagination and filters" @@ -182,19 +214,37 @@ services: fields: "with_location_names" response_model: "PasienGetResponse" - get_by_nomr: + get_by_id: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["GET"] - path: "/:nomr" - description: "Get pasien by NOMR" - summary: "Get Pasien by NOMR" + path: "/:id" + description: "Get pasien by ID" + summary: "Get Pasien by ID" tags: ["Pasien"] require_auth: true cache_enabled: true cache_ttl: 300 fields: "with_location_names" - response_model: "PasienGetByNOMRResponse" + response_model: "PasienGetByIDResponse" + + get_by_nomr: + handler_folder: "pasien" + handler_file: "pasien.go" + methods: ["GET"] + path: "/nomr/:nomr" + description: "Get pasien by Nomr" + summary: "Get Pasien by Nomr" + tags: ["Pasien"] + require_auth: true + cache_enabled: true + cache_ttl: 300 + fields: "with_location_names" + response_model: "PasienGetByNomrResponse" create: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["POST"] path: "/" description: "Create a new pasien" @@ -206,6 +256,8 @@ services: response_model: "PasienCreateResponse" update: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["PUT"] path: "/:nomr" description: "Update an existing pasien" @@ -217,16 +269,20 @@ services: response_model: "PasienUpdateResponse" delete: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["DELETE"] path: "/:nomr" description: "Delete a pasien" summary: "Delete Pasien" tags: ["Pasien"] require_auth: true - soft_delete: false + soft_delete: true response_model: "PasienDeleteResponse" dynamic: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["GET"] path: "/dynamic" description: "Get pasien with dynamic filtering" @@ -237,28 +293,34 @@ services: fields: "with_location_names" response_model: "PasienGetResponse" - search: - methods: ["GET"] - path: "/search" - description: "Search pasien by name or NOMR" - summary: "Search Pasien" - tags: ["Pasien"] - require_auth: true - has_search: true - fields: "with_location_names" - response_model: "PasienGetResponse" + # search: + # handler_folder: "pasien" + # handler_file: "pasien.go" + # methods: ["GET"] + # path: "/search" + # description: "Search pasien by name or NOMR" + # summary: "Search Pasien" + # tags: ["Pasien"] + # require_auth: true + # has_search: true + # fields: "with_location_names" + # response_model: "PasienGetResponse" - stats: - methods: ["GET"] - path: "/stats" - description: "Get pasien statistics" - summary: "Get Pasien Stats" - tags: ["Pasien"] - require_auth: true - has_stats: true - response_model: "AggregateData" + # stats: + # handler_folder: "pasien" + # handler_file: "pasien.go" + # methods: ["GET"] + # path: "/stats" + # description: "Get pasien statistics" + # summary: "Get Pasien Stats" + # tags: ["Pasien"] + # require_auth: true + # has_stats: true + # response_model: "AggregateData" by_location: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["GET"] path: "/by-location" description: "Get pasien by location (provinsi, kota, kecamatan, kelurahan)" @@ -270,6 +332,8 @@ services: response_model: "PasienGetResponse" by_age: + handler_folder: "pasien" + handler_file: "pasien.go" methods: ["GET"] path: "/by-age" description: "Get pasien statistics by age group" @@ -277,4 +341,201 @@ services: tags: ["Pasien"] require_auth: true has_stats: true - response_model: "PasienAgeStatsResponse" \ No newline at end of file + response_model: "PasienAgeStatsResponse" + + # schedule: + # name: "Jadwal Dokter" + # category: "schedule" + # package: "schedule" + # description: "Jadwal Dokter management" + # base_url: "" + # timeout: 30 + # retry_count: 3 + # table_name: "daftar_jadwal_dokter" + + # # Define all columns once for reuse + # schema: + # columns: + # - name: "id" + # type: "serial4" + # nullable: false + # go_type: "int32" + # primary_key: true + # description: "Primary key for schedule" + # - name: "hari" + # type: "int4" + # nullable: true + # go_type: "int32" + # description: "Day of week (1-7)" + # - name: "nama_hari" + # type: "varchar" + # nullable: true + # go_type: "string" + # searchable: true + # description: "Name of day" + # - name: "waktu" + # type: "varchar" + # nullable: true + # go_type: "string" + # searchable: true + # description: "Time schedule" + # - name: "dokter" + # type: "uuid" + # nullable: true + # go_type: "string" + # searchable: true + # description: "Doctor ID" + # - name: "spesialis" + # type: "int4" + # nullable: true + # go_type: "int32" + # description: "Specialization ID" + # - name: "sub_spesialis" + # type: "int4" + # nullable: true + # go_type: "int32" + # description: "Sub-specialization ID" + # - name: "status" + # type: "int4" + # nullable: true + # go_type: "int32" + # description: "Status (1=active, 0=inactive)" + # - name: "date_created" + # type: "timestamp" + # nullable: true + # go_type: "time.Time" + # system_field: true + # description: "Tanggal pembuatan record" + # - name: "date_updated" + # type: "timestamp" + # nullable: true + # go_type: "time.Time" + # system_field: true + # description: "Tanggal update record" + # - name: "user_created" + # type: "varchar" + # nullable: true + # go_type: "string" + # system_field: true + # description: "User yang membuat record" + # - name: "user_updated" + # type: "varchar" + # nullable: true + # go_type: "string" + # system_field: true + # description: "User yang mengupdate record" + + # # Define reusable field groups + # field_groups: + # base_fields: ["id", "hari", "nama_hari", "waktu", "dokter"] + # all_fields: ["id", "hari", "nama_hari", "waktu", "dokter", "spesialis", "sub_spesialis", "status"] + + # # Define endpoints with reusable configurations + # endpoints: + # list: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["GET"] + # path: "/" + # description: "Get list of schedule with pagination and filters" + # summary: "Get Schedule List" + # tags: ["Schedule"] + # require_auth: true + # cache_enabled: true + # cache_ttl: 300 + # has_pagination: true + # has_filter: true + # has_search: true + # has_stats: true + # fields: "all_fields" + # response_model: "ScheduleGetResponse" + + # get_by_id: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["GET"] + # path: "/:id" + # description: "Get schedule by ID" + # summary: "Get Schedule by ID" + # tags: ["Schedule"] + # require_auth: true + # cache_enabled: true + # cache_ttl: 300 + # fields: "all_fields" + # response_model: "ScheduleGetByIDResponse" + + # create: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["POST"] + # path: "/" + # description: "Create a new schedule" + # summary: "Create Schedule" + # tags: ["Schedule"] + # require_auth: true + # fields: "all_fields" + # request_model: "ScheduleCreateRequest" + # response_model: "ScheduleCreateResponse" + + # update: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["PUT"] + # path: "/:id" + # description: "Update an existing schedule" + # summary: "Update Schedule" + # tags: ["Schedule"] + # require_auth: true + # fields: "all_fields" + # request_model: "ScheduleUpdateRequest" + # response_model: "ScheduleUpdateResponse" + + # delete: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["DELETE"] + # path: "/:id" + # description: "Delete a schedule" + # summary: "Delete Schedule" + # tags: ["Schedule"] + # require_auth: true + # soft_delete: true + # response_model: "ScheduleDeleteResponse" + + # dynamic: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["GET"] + # path: "/dynamic" + # description: "Get schedule with dynamic filtering" + # summary: "Get Schedule Dynamic" + # tags: ["Schedule"] + # require_auth: true + # has_dynamic: true + # fields: "all_fields" + # response_model: "ScheduleGetResponse" + + # search: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["GET"] + # path: "/search" + # description: "Search schedule by name or doctor" + # summary: "Search Schedule" + # tags: ["Schedule"] + # require_auth: true + # has_search: true + # fields: "all_fields" + # response_model: "ScheduleGetResponse" + + # stats: + # handler_folder: "schedule" + # handler_file: "schedule.go" + # methods: ["GET"] + # path: "/stats" + # description: "Get schedule statistics" + # summary: "Get Schedule Stats" + # tags: ["Schedule"] + # require_auth: true + # has_stats: true + # response_model: "AggregateData" \ No newline at end of file