// Package handler provides HTTP handlers for various endpoints. package handler import ( "context" "fmt" "maps" "slices" "strings" "github.com/gofrs/uuid" "modelRT/common/errcode" "modelRT/constants" "modelRT/database" "modelRT/diagram" "modelRT/logger" "modelRT/orm" "github.com/gin-gonic/gin" ) // ComponentAttributeQueryHandler define circuit diagram component attribute value query process API func ComponentAttributeQueryHandler(c *gin.Context) { pgClient := database.GetPostgresDBClient() tokens := c.Param("tokens") if tokens == "" { err := fmt.Errorf("tokens is missing from the path") logger.Error(c, "query tokens from path failed", "error", err, "url", c.Request.RequestURI) renderFailure(c, constants.RespCodeInvalidParams, err.Error(), nil) return } tokenSlice := strings.Split(tokens, ",") queryResults := make(map[string]queryResult) cacheQueryMap := make(map[string][]cacheQueryItem) for _, token := range tokenSlice { slices := strings.Split(token, ".") if len(slices) < 7 { queryResults[token] = queryResult{err: errcode.ErrInvalidToken} continue } hSetKey := fmt.Sprintf("%s_%s", slices[4], slices[5]) cacheQueryMap[hSetKey] = append(cacheQueryMap[hSetKey], cacheQueryItem{ token: token, attributeCompTag: slices[4], attributeExtendType: slices[5], attributeName: slices[6], }) } dbQueryMap := make(map[string][]cacheQueryItem) var secondaryQueryCount int for hSetKey, items := range cacheQueryMap { hset := diagram.NewRedisHash(c, hSetKey, 5000, false) cacheData, err := hset.HGetAll() if err != nil { logger.Warn(c, "redis hgetall failed", "key", hSetKey, "err", err) } for _, item := range items { if val, ok := cacheData[item.attributeName]; ok { queryResults[item.token] = queryResult{value: val} } else { dbQueryMap[item.attributeCompTag] = append(dbQueryMap[item.attributeCompTag], item) secondaryQueryCount++ } } } if secondaryQueryCount == 0 { renderQuerySuccess(c, queryResults, tokenSlice) return } // enable transaction processing for secondary database queries tx := pgClient.WithContext(c).Begin() if tx.Error != nil { logger.Error(c, "begin postgres transaction failed", "error", tx.Error) renderFailure(c, constants.RespCodeServerError, "begin postgres database transaction failed", nil) return } defer tx.Rollback() allCompTags := slices.Collect(maps.Keys(dbQueryMap)) compModelMap, err := database.QueryComponentByCompTags(c, tx, allCompTags) if err != nil { logger.Error(c, "query component info from postgres database failed", "error", err) renderFailure(c, constants.RespCodeServerError, "query component meta failed", nil) return } // batch retrieve component metadata identifiers := make([]orm.ProjectIdentifier, 0, secondaryQueryCount) for tag, items := range dbQueryMap { comp, ok := compModelMap[tag] if !ok { for _, it := range items { queryResults[it.token] = queryResult{err: errcode.ErrFoundTargetFailed} } continue } for i := range items { items[i].attributeModelName = comp.ModelName items[i].globalUUID = comp.GlobalUUID identifiers = append(identifiers, orm.ProjectIdentifier{ Token: items[i].token, Tag: comp.ModelName, MetaModel: items[i].attributeExtendType, }) } } tableNameMap, err := database.BatchGetProjectNames(tx, identifiers) if err != nil { logger.Error(c, "batch get table names from postgres database failed", "error", err) renderFailure(c, constants.RespCodeServerError, "batch get table names from postgres database failed", nil) return } redisSyncMap := make(map[string][]cacheQueryItem) for _, items := range dbQueryMap { for _, item := range items { if _, exists := queryResults[item.token]; exists { continue } tbl, ok := tableNameMap[orm.ProjectIdentifier{Tag: item.attributeModelName, MetaModel: item.attributeExtendType}] if !ok { queryResults[item.token] = queryResult{err: errcode.ErrFoundTargetFailed} continue } var dbVal string res := tx.Table(tbl).Select(item.attributeName).Where("global_uuid = ?", item.globalUUID).Scan(&dbVal) if res.Error != nil || res.RowsAffected == 0 { queryResults[item.token] = queryResult{err: errcode.ErrDBQueryFailed} continue } queryResults[item.token] = queryResult{value: dbVal} item.attributeVal = dbVal hKey := fmt.Sprintf("%s_%s", item.attributeCompTag, item.attributeExtendType) redisSyncMap[hKey] = append(redisSyncMap[hKey], item) } } if err := tx.Commit().Error; err != nil { logger.Error(c, "postgres database transaction commit failed", "error", err) renderFailure(c, constants.RespCodeServerError, "postgres database transaction commit failed", nil) return } for hKey, items := range redisSyncMap { go backfillRedis(c.Copy(), hKey, items) } renderQuerySuccess(c, queryResults, tokenSlice) } func backfillRedis(ctx context.Context, hSetKey string, items []cacheQueryItem) { hset := diagram.NewRedisHash(ctx, hSetKey, 5000, false) fields := make(map[string]any, len(items)) for _, item := range items { // 只回填有值的项 if item.attributeVal != "" { fields[item.attributeName] = item.attributeVal } } if len(fields) > 0 { if err := hset.SetRedisHashByMap(fields); err != nil { logger.Error(ctx, "async backfill redis failed", "hash_key", hSetKey, "error", err) } else { logger.Info(ctx, "async backfill redis success", "hash_key", hSetKey, "count", len(fields)) } } } type cacheQueryItem struct { globalUUID uuid.UUID token string attributeCompTag string attributeModelName string attributeExtendType string attributeName string attributeVal string } type queryResult struct { err *errcode.AppError value string }