modelRT/handler/component_attribute_query.go

208 lines
6.2 KiB
Go
Raw Normal View History

// Package handler provides HTTP handlers for various endpoints.
package handler
import (
"fmt"
"maps"
"slices"
"strings"
"github.com/gofrs/uuid"
"modelRT/common/errcode"
"modelRT/constants"
"modelRT/database"
"modelRT/diagram"
"modelRT/logger"
"modelRT/orm"
"github.com/gin-gonic/gin"
)
// ComponentAttributeQueryHandler define circuit diagram component attribute value query process API
func ComponentAttributeQueryHandler(c *gin.Context) {
pgClient := database.GetPostgresDBClient()
tokens := c.Param("tokens")
if tokens == "" {
err := fmt.Errorf("tokens is missing from the path")
logger.Error(c, "query tokens from path failed", "error", err, "url", c.Request.RequestURI)
renderFailure(c, constants.RespCodeInvalidParams, err.Error(), nil)
return
}
tokenSlice := strings.Split(tokens, ",")
queryResults := make(map[string]queryResult)
// TODO 优化掉 attriQueryConfs 和 attributeComponentTag
cacheQueryMap := make(map[string][]cacheQueryItem, len(tokenSlice))
for _, token := range tokenSlice {
slices := strings.Split(token, ".")
if len(slices) < 7 {
queryResults[token] = queryResult{err: errcode.ErrInvalidToken}
continue
}
hSetKey := fmt.Sprintf("%s_%s", slices[4], slices[5])
cacheQueryMap[hSetKey] = append(cacheQueryMap[hSetKey],
cacheQueryItem{
token: token,
attributeCompTag: slices[4],
attributeExtendType: slices[5],
attributeName: slices[6],
},
)
}
var secondaryQueryCount int
dbQueryMap := make(map[string][]cacheQueryItem)
for hSetKey, queryItems := range cacheQueryMap {
hset := diagram.NewRedisHash(c, hSetKey, 5000, false)
cacheItems, err := hset.HGetAll()
if err != nil {
logger.Error(c, "query redis cached data failed", "hash_key", hSetKey, "error", err)
for _, queryItem := range queryItems {
if _, exists := queryResults[queryItem.token]; exists {
queryResults[queryItem.token] = queryResult{err: errcode.ErrCachedQueryFailed.WithCause(err)}
}
}
}
for _, queryItem := range queryItems {
value, exists := cacheItems[queryItem.attributeName]
if !exists {
// TODO 增加二次查询流程,从 pg中尝试获取数据
secondaryQueryCount++
dbQueryMap[queryItem.attributeCompTag] = append(dbQueryMap[queryItem.attributeCompTag], queryItem)
continue
}
queryResults[queryItem.token] = queryResult{value: value}
}
}
// open transaction
tx := pgClient.WithContext(c).Begin()
if tx.Error != nil {
logger.Error(c, "begin postgres transaction failed", "error", tx.Error)
renderFailure(c, constants.RespCodeServerError, "begin transaction failed", nil)
return
}
allCompTags := slices.Collect(maps.Keys(dbQueryMap))
compModelMap, err := database.QueryComponentByCompTags(c, tx, allCompTags)
if err != nil {
logger.Error(c, "begin postgres transaction failed", "error", tx.Error)
renderFailure(c, constants.RespCodeServerError, "begin transaction failed", nil)
return
}
identifiers := make([]orm.ProjectIdentifier, secondaryQueryCount)
for compTag, queryItems := range dbQueryMap {
compInfo, exists := compModelMap[compTag]
if !exists {
// TODO 根据compTag下queryItems的更新 queryResults
fmt.Println(11111)
continue
}
for index, queryItem := range queryItems {
identifiers = append(identifiers, orm.ProjectIdentifier{
Token: queryItem.token,
Tag: queryItem.attributeCompTag,
MetaModel: compInfo.ModelName,
})
queryItems[index].attributeModelName = compInfo.ModelName
queryItems[index].globalUUID = compInfo.GlobalUUID
continue
}
}
tableNameMap, err := database.BatchGetProjectNames(tx, identifiers)
if err != nil {
tx.Rollback()
for _, id := range identifiers {
if _, exists := queryResults[id.Token]; !exists {
queryResults[id.Token] = queryResult{err: errcode.ErrRetrieveFailed.WithCause(err)}
}
}
}
redisUpdateMap := make(map[string][]cacheQueryItem)
for compTag, queryItems := range dbQueryMap {
fmt.Println(compTag, queryItems)
for _, queryItem := range queryItems {
fmt.Println(queryItem)
id := orm.ProjectIdentifier{Tag: queryItem.attributeModelName, MetaModel: queryItem.attributeExtendType}
tableName, exists := tableNameMap[id]
if !exists {
// TODO 优化先判断token是否存在
queryResults[queryItem.token] = queryResult{err: errcode.ErrFoundTargetFailed}
continue
}
cacheValue := make(map[string]any)
result := tx.Table(tableName).Select(queryItem.attributeName).
Where("global_uuid = ?", queryItem.globalUUID).First(cacheValue)
if result.Error != nil {
queryResults[queryItem.token] = queryResult{err: errcode.ErrDBQueryFailed}
continue
}
if result.RowsAffected == 0 {
queryResults[queryItem.token] = queryResult{err: errcode.ErrDBzeroAffectedRows}
continue
}
// TODO 更新对应的redis hset中的值
attributeValue := cacheValue[queryItem.attributeName].(string)
queryResults[queryItem.token] = queryResult{value: attributeValue}
queryItem.attributeVal = attributeValue
redisUpdateMap[queryItem.token] = append(redisUpdateMap[queryItem.token], queryItem)
continue
}
}
// commit transaction
if err := tx.Commit().Error; err != nil {
renderFailure(c, constants.RespCodeServerError, "transaction commit failed", nil)
return
}
for hSetKey, items := range redisUpdateMap {
hset := diagram.NewRedisHash(c, hSetKey, 5000, false)
fields := make(map[string]any, len(items))
for _, item := range items {
fields[item.attributeName] = item.attributeVal
}
if err := hset.SetRedisHashByMap(fields); err != nil {
logger.Error(c, "batch sync redis failed", "hash_key", hSetKey, "error", err)
for _, item := range items {
if _, exists := queryResults[item.token]; exists {
queryResults[item.token] = queryResult{err: errcode.ErrCacheSyncWarn.WithCause(err)}
}
}
}
}
// payload := generateRespPayload(queryResults, request.AttributeConfigs)
// renderRespSuccess(c, constants.RespCodeSuccess, "process completed", payload)
}
type cacheQueryItem struct {
globalUUID uuid.UUID
token string
attributeCompTag string
attributeModelName string
attributeExtendType string
attributeName string
attributeVal string
}
type queryResult struct {
err *errcode.AppError
value string
}