modelRT/handler/component_attribute_query.go

241 lines
7.3 KiB
Go

// Package handler provides HTTP handlers for various endpoints.
package handler
import (
"context"
"fmt"
"maps"
"slices"
"strings"
"github.com/gofrs/uuid"
"modelRT/common/errcode"
"modelRT/constants"
"modelRT/database"
"modelRT/diagram"
"modelRT/logger"
"modelRT/orm"
"github.com/gin-gonic/gin"
)
// ComponentAttributeQueryHandler define circuit diagram component attribute value query process API
func ComponentAttributeQueryHandler(c *gin.Context) {
pgClient := database.GetPostgresDBClient()
tokens := c.Param("tokens")
if tokens == "" {
err := fmt.Errorf("tokens is missing from the path")
logger.Error(c, "query tokens from path failed", "error", err, "url", c.Request.RequestURI)
renderRespFailure(c, constants.RespCodeInvalidParams, err.Error(), nil)
return
}
tokenSlice := strings.Split(tokens, ",")
queryResults := make(map[string]queryResult)
cacheQueryMap := make(map[string][]cacheQueryItem)
for _, token := range tokenSlice {
slices := strings.Split(token, ".")
if len(slices) < 7 {
queryResults[token] = queryResult{err: errcode.ErrInvalidToken}
continue
}
hSetKey := fmt.Sprintf("%s_%s", slices[4], slices[5])
cacheQueryMap[hSetKey] = append(cacheQueryMap[hSetKey], cacheQueryItem{
token: token,
attributeCompTag: slices[4],
attributeExtendType: slices[5],
attributeName: slices[6],
})
}
dbQueryMap := make(map[string][]cacheQueryItem)
var secondaryQueryCount int
for hSetKey, items := range cacheQueryMap {
hset := diagram.NewRedisHash(c, hSetKey, 5000, false)
cacheData, err := hset.HGetAll()
if err != nil {
logger.Warn(c, "redis hgetall failed", "key", hSetKey, "err", err)
}
for _, item := range items {
if val, ok := cacheData[item.attributeName]; ok {
queryResults[item.token] = queryResult{err: errcode.ErrProcessSuccess, value: val}
} else {
dbQueryMap[item.attributeCompTag] = append(dbQueryMap[item.attributeCompTag], item)
secondaryQueryCount++
}
}
}
if secondaryQueryCount == 0 {
payload := genQueryRespPayload(queryResults, tokenSlice)
renderRespSuccess(c, constants.RespCodeSuccess, "query dynamic parameter values success", payload)
return
}
tx := pgClient.WithContext(c).Begin()
if tx.Error != nil {
logger.Error(c, "begin postgres transaction failed", "error", tx.Error)
fillRemainingErrors(queryResults, tokenSlice, errcode.ErrBeginTxFailed)
payload := genQueryRespPayload(queryResults, tokenSlice)
renderRespFailure(c, constants.RespCodeServerError, "begin postgres database transaction failed", payload)
return
}
defer tx.Rollback()
allCompTags := slices.Collect(maps.Keys(dbQueryMap))
compModelMap, err := database.QueryComponentByCompTags(c, tx, allCompTags)
if err != nil {
logger.Error(c, "query component info from postgres database failed", "error", err)
fillRemainingErrors(queryResults, tokenSlice, errcode.ErrDBQueryFailed)
payload := genQueryRespPayload(queryResults, tokenSlice)
renderRespFailure(c, constants.RespCodeServerError, "query component meta failed", payload)
return
}
// batch retrieve component metadata
identifiers := make([]orm.ProjectIdentifier, 0, secondaryQueryCount)
for tag, items := range dbQueryMap {
comp, ok := compModelMap[tag]
if !ok {
for _, it := range items {
queryResults[it.token] = queryResult{err: errcode.ErrFoundTargetFailed}
}
continue
}
for i := range items {
items[i].attributeModelName = comp.ModelName
items[i].globalUUID = comp.GlobalUUID
identifiers = append(identifiers, orm.ProjectIdentifier{
Token: items[i].token, Tag: comp.ModelName, MetaModel: items[i].attributeExtendType,
})
}
}
tableNameMap, err := database.BatchGetProjectNames(tx, identifiers)
if err != nil {
logger.Error(c, "batch get table names from postgres database failed", "error", err)
fillRemainingErrors(queryResults, tokenSlice, errcode.ErrRetrieveFailed)
payload := genQueryRespPayload(queryResults, tokenSlice)
renderRespFailure(c, constants.RespCodeServerError, "batch get table names from postgres database failed", payload)
return
}
redisSyncMap := make(map[string][]cacheQueryItem)
for _, items := range dbQueryMap {
for _, item := range items {
if _, exists := queryResults[item.token]; exists {
continue
}
tbl, ok := tableNameMap[orm.ProjectIdentifier{Tag: item.attributeModelName, MetaModel: item.attributeExtendType}]
if !ok {
queryResults[item.token] = queryResult{err: errcode.ErrFoundTargetFailed}
continue
}
var dbVal string
res := tx.Table(tbl).Select(item.attributeName).Where("global_uuid = ?", item.globalUUID).Scan(&dbVal)
if res.Error != nil || res.RowsAffected == 0 {
queryResults[item.token] = queryResult{err: errcode.ErrDBQueryFailed}
continue
}
queryResults[item.token] = queryResult{err: errcode.ErrProcessSuccess, value: dbVal}
item.attributeVal = dbVal
hKey := fmt.Sprintf("%s_%s", item.attributeCompTag, item.attributeExtendType)
redisSyncMap[hKey] = append(redisSyncMap[hKey], item)
}
}
if err := tx.Commit().Error; err != nil {
logger.Warn(c, "postgres transaction commit failed, but returning scanned data", "error", err)
} else {
for hKey, items := range redisSyncMap {
go backfillRedis(c.Copy(), hKey, items)
}
}
payload := genQueryRespPayload(queryResults, tokenSlice)
renderRespSuccess(c, constants.RespCodeSuccess, "process completed", payload)
}
func fillRemainingErrors(results map[string]queryResult, tokens []string, err *errcode.AppError) {
for _, token := range tokens {
if _, exists := results[token]; !exists {
results[token] = queryResult{err: err}
}
}
}
func backfillRedis(ctx context.Context, hSetKey string, items []cacheQueryItem) {
hset := diagram.NewRedisHash(ctx, hSetKey, 5000, false)
fields := make(map[string]any, len(items))
for _, item := range items {
if item.attributeVal != "" {
fields[item.attributeName] = item.attributeVal
}
}
if len(fields) > 0 {
if err := hset.SetRedisHashByMap(fields); err != nil {
logger.Error(ctx, "async backfill redis failed", "hash_key", hSetKey, "error", err)
} else {
logger.Info(ctx, "async backfill redis success", "hash_key", hSetKey, "count", len(fields))
}
}
}
func genQueryRespPayload(queryResults map[string]queryResult, requestTokens []string) map[string]any {
attributes := make([]attributeQueryResult, 0, len(requestTokens))
for _, token := range requestTokens {
if queryResult, exists := queryResults[token]; exists {
attributes = append(attributes, attributeQueryResult{
Token: token,
Code: queryResult.err.Code(),
Msg: queryResult.err.Msg(),
Value: queryResult.value,
})
} else {
err := errcode.ErrCacheQueryFailed
attributes = append(attributes, attributeQueryResult{
Token: token,
Code: err.Code(),
Msg: err.Msg(),
Value: "",
})
}
}
payload := map[string]any{
"attributes": attributes,
}
return payload
}
type cacheQueryItem struct {
globalUUID uuid.UUID
token string
attributeCompTag string
attributeModelName string
attributeExtendType string
attributeName string
attributeVal string
}
type attributeQueryResult struct {
Token string `json:"token"`
Msg string `json:"msg"`
Value string `json:"value"`
Code int `json:"code"`
}
type queryResult struct {
err *errcode.AppError
value string
}