diff --git a/models/models.go b/models/models.go
index 362d46618..e8839572d 100755
--- a/models/models.go
+++ b/models/models.go
@@ -153,6 +153,8 @@ func init() {
new(UserBusinessAnalysisCurrentWeek),
new(UserBusinessAnalysisYesterday),
new(UserLoginLog),
+ new(UserMetrics),
+ new(UserAnalysisPara),
)
gonicNames := []string{"SSL", "UID"}
diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go
index 288762161..65ce642d5 100644
--- a/models/user_business_analysis.go
+++ b/models/user_business_analysis.go
@@ -6,7 +6,6 @@ import (
"strconv"
"time"
- "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
@@ -81,6 +80,19 @@ type UserBusinessAnalysisAll struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ //cloudbraintask
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysis struct {
@@ -146,6 +158,18 @@ type UserBusinessAnalysis struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysisQueryOptions struct {
@@ -183,6 +207,29 @@ func getLastCountDate() int64 {
return pageStartTime.Unix()
}
+func QueryMetrics(start int64, end int64) ([]*UserMetrics, int64) {
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+ userMetricsList := make([]*UserMetrics, 0)
+ if err := statictisSess.Table(new(UserMetrics)).Where("count_date >" + fmt.Sprint(start) + " and count_date<" + fmt.Sprint(end)).OrderBy("count_date desc").
+ Find(&userMetricsList); err != nil {
+ return nil, 0
+ }
+ return userMetricsList, int64(len(userMetricsList))
+}
+
+func QueryRankList(key string, tableName string, limit int) ([]*UserBusinessAnalysisAll, int64) {
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+
+ userBusinessAnalysisAllList := make([]*UserBusinessAnalysisAll, 0)
+ if err := statictisSess.Table(tableName).OrderBy(key+" desc,id desc").Limit(limit, 0).
+ Find(&userBusinessAnalysisAllList); err != nil {
+ return nil, 0
+ }
+ return userBusinessAnalysisAllList, int64(len(userBusinessAnalysisAllList))
+}
+
func QueryUserStaticDataByTableName(start int, pageSize int, tableName string, queryObj interface{}, userName string) ([]*UserBusinessAnalysisAll, int64) {
statictisSess := xStatistic.NewSession()
defer statictisSess.Close()
@@ -334,6 +381,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount
resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
+ resultMap[userRecord.ID].CommitDatasetNum += userRecord.CommitDatasetNum
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
@@ -353,7 +401,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
return userBusinessAnalysisReturnList, count
}
-func refreshUserStaticTable(wikiCountMap map[string]int, CommitCodeSizeMap map[string]*git.UserKPIStats, tableName string, pageStartTime time.Time, pageEndTime time.Time) {
+func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageStartTime time.Time, pageEndTime time.Time, userMetrics map[string]int) {
sess := x.NewSession()
defer sess.Close()
@@ -379,14 +427,15 @@ func refreshUserStaticTable(wikiCountMap map[string]int, CommitCodeSizeMap map[s
FocusRepoCountMap := queryWatch(start_unix, end_unix)
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap := queryFollow(start_unix, end_unix)
-
- CommitDatasetSizeMap := queryDatasetSize(start_unix, end_unix)
+ CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix)
+ CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
-
+ CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
+ AiModelManageMap := queryUserModel(start_unix, end_unix)
DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"
cond := "type != 1 and is_active=true"
@@ -395,6 +444,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, CommitCodeSizeMap map[s
log.Info("query user error. return.")
return
}
+ ParaWeight := getParaWeight()
var indexTotal int64
indexTotal = 0
insertCount := 0
@@ -412,84 +462,22 @@ func refreshUserStaticTable(wikiCountMap map[string]int, CommitCodeSizeMap map[s
dateRecordAll.Name = userRecord.Name
dateRecordAll.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
dateRecordAll.DataDate = DataDate
-
- if _, ok := CodeMergeCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.CodeMergeCount = 0
- } else {
- dateRecordAll.CodeMergeCount = CodeMergeCountMap[dateRecordAll.ID]
- }
-
- if _, ok := CommitCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.CommitCount = 0
- } else {
- dateRecordAll.CommitCount = CommitCountMap[dateRecordAll.ID]
- }
-
- if _, ok := IssueCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.IssueCount = 0
- } else {
- dateRecordAll.IssueCount = IssueCountMap[dateRecordAll.ID]
- }
-
- if _, ok := CommentCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.CommentCount = 0
- } else {
- dateRecordAll.CommentCount = CommentCountMap[dateRecordAll.ID]
- }
-
- if _, ok := FocusRepoCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.FocusRepoCount = 0
- } else {
- dateRecordAll.FocusRepoCount = FocusRepoCountMap[dateRecordAll.ID]
- }
-
- if _, ok := StarRepoCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.StarRepoCount = 0
- } else {
- dateRecordAll.StarRepoCount = StarRepoCountMap[dateRecordAll.ID]
- }
-
- if _, ok := WatchedCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.WatchedCount = 0
- } else {
- dateRecordAll.WatchedCount = WatchedCountMap[dateRecordAll.ID]
- }
-
- if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok {
- dateRecordAll.CommitCodeSize = 0
- } else {
- dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines)
- }
-
- if _, ok := CommitDatasetSizeMap[dateRecordAll.ID]; !ok {
- dateRecordAll.CommitDatasetSize = 0
- } else {
- dateRecordAll.CommitDatasetSize = CommitDatasetSizeMap[dateRecordAll.ID]
- }
-
- if _, ok := SolveIssueCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.SolveIssueCount = 0
- } else {
- dateRecordAll.SolveIssueCount = SolveIssueCountMap[dateRecordAll.ID]
- }
-
- if _, ok := wikiCountMap[dateRecordAll.Name]; !ok {
- dateRecordAll.EncyclopediasCount = 0
- } else {
- dateRecordAll.EncyclopediasCount = wikiCountMap[dateRecordAll.Name]
- }
-
- if _, ok := CreateRepoCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.CreateRepoCount = 0
- } else {
- dateRecordAll.CreateRepoCount = CreateRepoCountMap[dateRecordAll.ID]
- }
-
- if _, ok := LoginCountMap[dateRecordAll.ID]; !ok {
- dateRecordAll.LoginCount = 0
- } else {
- dateRecordAll.LoginCount = LoginCountMap[dateRecordAll.ID]
- }
+ dateRecordAll.UserLocation = userRecord.Location
+
+ dateRecordAll.CodeMergeCount = getMapValue(dateRecordAll.ID, CodeMergeCountMap)
+ dateRecordAll.CommitCount = getMapValue(dateRecordAll.ID, CommitCountMap)
+ dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap)
+ dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap)
+ dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap)
+ dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap)
+ dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap)
+ dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
+ dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap)
+ dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap)
+ dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap)
+ dateRecordAll.EncyclopediasCount = getMapKeyStringValue(dateRecordAll.Name, wikiCountMap)
+ dateRecordAll.CreateRepoCount = getMapValue(dateRecordAll.ID, CreateRepoCountMap)
+ dateRecordAll.LoginCount = getMapValue(dateRecordAll.ID, LoginCountMap)
if _, ok := OpenIIndexMap[dateRecordAll.ID]; !ok {
dateRecordAll.OpenIIndex = 0
@@ -497,7 +485,15 @@ func refreshUserStaticTable(wikiCountMap map[string]int, CommitCodeSizeMap map[s
dateRecordAll.OpenIIndex = OpenIIndexMap[dateRecordAll.ID]
}
- dateRecordAll.CommitModelCount = 0
+ dateRecordAll.CloudBrainTaskNum = getMapValue(dateRecordAll.ID, CloudBrainTaskMap)
+ dateRecordAll.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
+ dateRecordAll.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
+ dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
+ dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
+ dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight)
dateRecordBatch = append(dateRecordBatch, dateRecordAll)
if len(dateRecordBatch) >= BATCH_INSERT_SIZE {
@@ -508,6 +504,11 @@ func refreshUserStaticTable(wikiCountMap map[string]int, CommitCodeSizeMap map[s
}
dateRecordBatch = make([]UserBusinessAnalysisAll, 0)
}
+ if tableName == "user_business_analysis_all" {
+ if dateRecordAll.UserIndex > 0 || dateRecordAll.LoginCount > 0 {
+ userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1
+ }
+ }
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
@@ -529,7 +530,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
insertBatchSql := "INSERT INTO public." + tableName +
"(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " +
- "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date) " +
+ "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " +
"VALUES"
for i, record := range dateRecords {
@@ -537,7 +538,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) +
", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) +
", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) +
- ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "')"
+ ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')"
if i < (len(dateRecords) - 1) {
insertBatchSql += ","
}
@@ -546,36 +547,36 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
statictisSess.Exec(insertBatchSql)
}
-func RefreshUserStaticAllTabel(wikiCountMap map[string]int, CommitCodeSizeMap map[string]*git.UserKPIStats) {
+func RefreshUserStaticAllTabel(wikiCountMap map[string]int, userMetrics map[string]int) {
currentTimeNow := time.Now()
pageStartTime := time.Date(2021, 11, 5, 0, 0, 0, 0, currentTimeNow.Location())
pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_all", pageStartTime, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_all", pageStartTime, pageEndTime, userMetrics)
log.Info("refresh all data finished.")
pageStartTime = time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_current_year", pageStartTime, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_year", pageStartTime, pageEndTime, userMetrics)
thisMonth := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 0, 0, 0, 0, currentTimeNow.Location())
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_current_month", thisMonth, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_month", thisMonth, pageEndTime, userMetrics)
offset := int(time.Monday - currentTimeNow.Weekday())
if offset > 0 {
offset = -6
}
pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset)
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_current_week", pageStartTime, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_week", pageStartTime, pageEndTime, userMetrics)
pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30)
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime, userMetrics)
pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -1)
pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).AddDate(0, 0, -1)
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_yesterday", pageStartTime, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_yesterday", pageStartTime, pageEndTime, userMetrics)
pageStartTime = thisMonth.AddDate(0, -1, 0)
pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 23, 59, 59, 0, currentTimeNow.Location()).AddDate(0, 0, -1)
- refreshUserStaticTable(wikiCountMap, CommitCodeSizeMap, "user_business_analysis_last_month", pageStartTime, pageEndTime)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_last_month", pageStartTime, pageEndTime, userMetrics)
}
@@ -613,12 +614,13 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
} else {
log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
}
- CommitDatasetSizeMap := queryDatasetSize(start_unix, end_unix)
+ CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
-
+ CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
+ AiModelManageMap := queryUserModel(start_unix, end_unix)
statictisSess := xStatistic.NewSession()
defer statictisSess.Close()
@@ -628,6 +630,9 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("query user error. return.")
return err
}
+
+ ParaWeight := getParaWeight()
+ userMetrics := make(map[string]int)
var indexTotal int64
indexTotal = 0
for {
@@ -648,47 +653,14 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.Name = userRecord.Name
dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
dateRecord.DataDate = DataDate
- if _, ok := CodeMergeCountMap[dateRecord.ID]; !ok {
- dateRecord.CodeMergeCount = 0
- } else {
- dateRecord.CodeMergeCount = CodeMergeCountMap[dateRecord.ID]
- }
-
- if _, ok := CommitCountMap[dateRecord.ID]; !ok {
- dateRecord.CommitCount = 0
- } else {
- dateRecord.CommitCount = CommitCountMap[dateRecord.ID]
- }
-
- if _, ok := IssueCountMap[dateRecord.ID]; !ok {
- dateRecord.IssueCount = 0
- } else {
- dateRecord.IssueCount = IssueCountMap[dateRecord.ID]
- }
-
- if _, ok := CommentCountMap[dateRecord.ID]; !ok {
- dateRecord.CommentCount = 0
- } else {
- dateRecord.CommentCount = CommentCountMap[dateRecord.ID]
- }
-
- if _, ok := FocusRepoCountMap[dateRecord.ID]; !ok {
- dateRecord.FocusRepoCount = 0
- } else {
- dateRecord.FocusRepoCount = FocusRepoCountMap[dateRecord.ID]
- }
- if _, ok := StarRepoCountMap[dateRecord.ID]; !ok {
- dateRecord.StarRepoCount = 0
- } else {
- dateRecord.StarRepoCount = StarRepoCountMap[dateRecord.ID]
- }
-
- if _, ok := WatchedCountMap[dateRecord.ID]; !ok {
- dateRecord.WatchedCount = 0
- } else {
- dateRecord.WatchedCount = WatchedCountMap[dateRecord.ID]
- }
+ dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap)
+ dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap)
+ dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap)
+ dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap)
+ dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap)
+ dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap)
+ dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap)
if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok {
dateRecord.CommitCodeSize = 0
@@ -696,35 +668,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines)
}
- if _, ok := CommitDatasetSizeMap[dateRecord.ID]; !ok {
- dateRecord.CommitDatasetSize = 0
- } else {
- dateRecord.CommitDatasetSize = CommitDatasetSizeMap[dateRecord.ID]
- }
+ dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap)
+ dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap)
+ dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap)
- if _, ok := SolveIssueCountMap[dateRecord.ID]; !ok {
- dateRecord.SolveIssueCount = 0
- } else {
- dateRecord.SolveIssueCount = SolveIssueCountMap[dateRecord.ID]
- }
+ dateRecord.EncyclopediasCount = getMapKeyStringValue(dateRecord.Name, wikiCountMap)
- if _, ok := wikiCountMap[dateRecord.Name]; !ok {
- dateRecord.EncyclopediasCount = 0
- } else {
- dateRecord.EncyclopediasCount = wikiCountMap[dateRecord.Name]
- }
+ dateRecord.CreateRepoCount = getMapValue(dateRecord.ID, CreateRepoCountMap)
- if _, ok := CreateRepoCountMap[dateRecord.ID]; !ok {
- dateRecord.CreateRepoCount = 0
- } else {
- dateRecord.CreateRepoCount = CreateRepoCountMap[dateRecord.ID]
- }
-
- if _, ok := LoginCountMap[dateRecord.ID]; !ok {
- dateRecord.LoginCount = 0
- } else {
- dateRecord.LoginCount = LoginCountMap[dateRecord.ID]
- }
+ dateRecord.LoginCount = getMapValue(dateRecord.ID, LoginCountMap)
if _, ok := OpenIIndexMap[dateRecord.ID]; !ok {
dateRecord.OpenIIndex = 0
@@ -732,8 +684,17 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.OpenIIndex = OpenIIndexMap[dateRecord.ID]
}
- dateRecord.CommitModelCount = 0
-
+ dateRecord.CloudBrainTaskNum = getMapValue(dateRecord.ID, CloudBrainTaskMap)
+ dateRecord.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
+ dateRecord.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
+ dateRecord.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
+ dateRecord.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
+ dateRecord.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
+ dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
+ dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
+ dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
+ dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight)
+ setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord)
_, err = statictisSess.Insert(&dateRecord)
if err != nil {
log.Info("insert daterecord failed." + err.Error())
@@ -747,11 +708,142 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
}
}
- RefreshUserStaticAllTabel(wikiCountMap, CommitCodeSizeMap)
+ RefreshUserStaticAllTabel(wikiCountMap, userMetrics)
+
+ //insert userMetrics table
+ var useMetrics UserMetrics
+ useMetrics.CountDate = CountDate.Unix()
+ statictisSess.Delete(&useMetrics)
+
+ useMetrics.ActivateRegistUser = getMapKeyStringValue("ActivateRegistUser", userMetrics)
+ useMetrics.HasActivityUser = getMapKeyStringValue("HasActivityUser", userMetrics)
+ useMetrics.NotActivateRegistUser = getMapKeyStringValue("NotActivateRegistUser", userMetrics)
+ useMetrics.TotalActivateRegistUser = getMapKeyStringValue("TotalActivateRegistUser", userMetrics)
+ useMetrics.TotalHasActivityUser = getMapKeyStringValue("TotalHasActivityUser", userMetrics)
+ statictisSess.Insert(&useMetrics)
return nil
}
+func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, end_time int64, dateRecord UserBusinessAnalysis) {
+ //ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
+ //NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
+ //HasActivityUser int `xorm:"NOT NULL DEFAULT 0"`
+ //TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
+ //TotalHasActivityUser
+ regist_time := user.CreatedUnix.AsTime().Unix()
+ if regist_time >= start_time && regist_time <= end_time {
+ if user.IsActive {
+ userMetrics["ActivateRegistUser"] = getMapKeyStringValue("ActivateRegistUser", userMetrics) + 1
+ } else {
+ userMetrics["NotActivateRegistUser"] = getMapKeyStringValue("NotActivateRegistUser", userMetrics) + 1
+ }
+ }
+ if user.IsActive {
+ userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1
+ }
+
+ if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 {
+ userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1
+ }
+
+}
+
+func getParaWeight() map[string]float64 {
+ result := make(map[string]float64)
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+ statictisSess.Select("*").Table(new(UserAnalysisPara))
+ paraList := make([]*UserAnalysisPara, 0)
+ statictisSess.Find(¶List)
+ for _, paraRecord := range paraList {
+ result[paraRecord.Key] = paraRecord.Value
+ }
+ return result
+}
+
+func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight map[string]float64) float64 {
+ var result float64
+ // PR数 0.20
+ // commit数 0.20
+ // 提出任务数 0.20
+ // 评论数 0.20
+ // 关注项目数 0.10
+ // 点赞项目数 0.10
+ // 登录次数 0.10
+ result = float64(dateRecord.CodeMergeCount) * getParaWeightValue("CodeMergeCount", ParaWeight, 0.2)
+ result += float64(dateRecord.CommitCount) * getParaWeightValue("CommitCount", ParaWeight, 0.2)
+ log.Info("1 result=" + fmt.Sprint(result))
+ result += float64(dateRecord.IssueCount) * getParaWeightValue("IssueCount", ParaWeight, 0.2)
+ result += float64(dateRecord.CommentCount) * getParaWeightValue("CommentCount", ParaWeight, 0.2)
+ result += float64(dateRecord.FocusRepoCount) * getParaWeightValue("FocusRepoCount", ParaWeight, 0.1)
+ result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
+ result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
+ result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
+ result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
+ result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
+ result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
+ result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
+ result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
+ result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
+ result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
+
+ return result
+}
+
+func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64) float64 {
+ var result float64
+ // PR数 0.20
+ // commit数 0.20
+ // 提出任务数 0.20
+ // 评论数 0.20
+ // 关注项目数 0.10
+ // 点赞项目数 0.10
+ // 登录次数 0.10
+ result = float64(dateRecord.CodeMergeCount) * getParaWeightValue("CodeMergeCount", ParaWeight, 0.2)
+ result += float64(dateRecord.CommitCount) * getParaWeightValue("CommitCount", ParaWeight, 0.2)
+ log.Info("2 result=" + fmt.Sprint(result))
+ result += float64(dateRecord.IssueCount) * getParaWeightValue("IssueCount", ParaWeight, 0.2)
+ result += float64(dateRecord.CommentCount) * getParaWeightValue("CommentCount", ParaWeight, 0.2)
+ result += float64(dateRecord.FocusRepoCount) * getParaWeightValue("FocusRepoCount", ParaWeight, 0.1)
+ result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
+ result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
+ result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
+ result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
+ result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
+ result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
+ result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
+ result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
+ result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
+ result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
+
+ return result
+}
+
+func getParaWeightValue(key string, valueMap map[string]float64, defaultValue float64) float64 {
+ if _, ok := valueMap[key]; !ok {
+ return defaultValue
+ } else {
+ return valueMap[key]
+ }
+}
+
+func getMapKeyStringValue(key string, valueMap map[string]int) int {
+ if _, ok := valueMap[key]; !ok {
+ return 0
+ } else {
+ return valueMap[key]
+ }
+}
+
+func getMapValue(userId int64, valueMap map[int64]int) int {
+ if _, ok := valueMap[userId]; !ok {
+ return 0
+ } else {
+ return valueMap[userId]
+ }
+}
+
func getInt(str string) int {
re, err := strconv.ParseInt(str, 10, 32)
if err != nil {
@@ -1052,16 +1144,17 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}
-func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int {
+func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
- resultMap := make(map[int64]int)
+ resultSizeMap := make(map[int64]int)
+ resultNumMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(Attachment))
if err != nil {
log.Info("query attachment error. return.")
- return resultMap
+ return resultSizeMap, resultNumMap
}
var indexTotal int64
indexTotal = 0
@@ -1072,10 +1165,12 @@ func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int {
log.Info("query Attachment size=" + fmt.Sprint(len(attachmentList)))
for _, attachRecord := range attachmentList {
- if _, ok := resultMap[attachRecord.UploaderID]; !ok {
- resultMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB
+ if _, ok := resultSizeMap[attachRecord.UploaderID]; !ok {
+ resultSizeMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB
+ resultNumMap[attachRecord.UploaderID] = 1
} else {
- resultMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB
+ resultSizeMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB
+ resultNumMap[attachRecord.UploaderID] += 1
}
}
@@ -1085,7 +1180,7 @@ func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int {
}
}
- return resultMap
+ return resultSizeMap, resultNumMap
}
func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
@@ -1212,6 +1307,133 @@ func queryLoginCount(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}
+func queryCommitCodeSize(start_unix int64, end_unix int64) map[int64]int {
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+
+ resultMap := make(map[int64]int)
+ cond := "count_date>=" + fmt.Sprint(start_unix) + " and count_date<=" + fmt.Sprint(end_unix)
+ count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis))
+ if err != nil {
+ log.Info("query commit code size error. return.")
+ return resultMap
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ statictisSess.Select("id,commit_code_size").Table("user_business_analysis").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
+ statictisSess.Find(&userBusinessAnalysisList)
+ log.Info("query user login size=" + fmt.Sprint(len(userBusinessAnalysisList)))
+ for _, analysisRecord := range userBusinessAnalysisList {
+ if _, ok := resultMap[analysisRecord.ID]; !ok {
+ resultMap[analysisRecord.ID] = analysisRecord.CommitCodeSize
+ } else {
+ resultMap[analysisRecord.ID] += analysisRecord.CommitCodeSize
+ }
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ log.Info("user commit code size=" + fmt.Sprint(len(resultMap)))
+ return resultMap
+}
+
+func queryUserModel(start_unix int64, end_unix int64) map[int64]int {
+ sess := x.NewSession()
+ defer sess.Close()
+ resultMap := make(map[int64]int)
+ cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
+ count, err := sess.Where(cond).Count(new(AiModelManage))
+ if err != nil {
+ log.Info("query AiModelManage error. return.")
+ return resultMap
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("id,user_id").Table("ai_model_manage").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ aiModelList := make([]*AiModelManage, 0)
+ sess.Find(&aiModelList)
+ log.Info("query AiModelManage size=" + fmt.Sprint(len(aiModelList)))
+ for _, aiModelRecord := range aiModelList {
+ if _, ok := resultMap[aiModelRecord.UserId]; !ok {
+ resultMap[aiModelRecord.UserId] = 1
+ } else {
+ resultMap[aiModelRecord.UserId] += 1
+ }
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ return resultMap
+}
+
+func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[string]int) {
+ sess := x.NewSession()
+ defer sess.Close()
+ resultMap := make(map[int64]int)
+ resultItemMap := make(map[string]int)
+
+ cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
+ count, err := sess.Where(cond).Count(new(Cloudbrain))
+ if err != nil {
+ log.Info("query cloudbrain error. return.")
+ return resultMap, resultItemMap
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("id,job_type,user_id,duration,train_job_duration,type").Table("cloudbrain").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ cloudTaskList := make([]*Cloudbrain, 0)
+ sess.Find(&cloudTaskList)
+ log.Info("query cloudbrain size=" + fmt.Sprint(len(cloudTaskList)))
+ for _, cloudTaskRecord := range cloudTaskList {
+ if _, ok := resultMap[cloudTaskRecord.UserID]; !ok {
+ resultMap[cloudTaskRecord.UserID] = 1
+ } else {
+ resultMap[cloudTaskRecord.UserID] += 1
+ }
+ setMapKey("CloudBrainRunTime", cloudTaskRecord.UserID, int(cloudTaskRecord.Duration), resultItemMap)
+ if cloudTaskRecord.Type == 1 { //npu
+ if cloudTaskRecord.JobType == "TRAIN" {
+ setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
+ } else if cloudTaskRecord.JobType == "INFERENCE" {
+ setMapKey("NpuInferenceJob", cloudTaskRecord.UserID, 1, resultItemMap)
+ } else {
+ setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
+ }
+ } else { //type=0 gpu
+ if cloudTaskRecord.JobType == "TRAIN" {
+ setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
+ } else if cloudTaskRecord.JobType == "BENCHMARK" {
+ setMapKey("GpuBenchMarkJob", cloudTaskRecord.UserID, 1, resultItemMap)
+ } else {
+ setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
+ }
+ }
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+
+ return resultMap, resultItemMap
+}
+func setMapKey(key string, userId int64, value int, resultItemMap map[string]int) {
+ newKey := fmt.Sprint(userId) + "_" + key
+ if _, ok := resultItemMap[newKey]; !ok {
+ resultItemMap[newKey] = value
+ } else {
+ resultItemMap[newKey] += value
+ }
+}
+
func subMonth(t1, t2 time.Time) (month int) {
y1 := t1.Year()
y2 := t2.Year()
diff --git a/models/user_business_struct.go b/models/user_business_struct.go
index c435c0b07..17d9f046f 100644
--- a/models/user_business_struct.go
+++ b/models/user_business_struct.go
@@ -44,6 +44,18 @@ type UserBusinessAnalysisCurrentYear struct {
//user
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysisLast30Day struct {
@@ -88,6 +100,18 @@ type UserBusinessAnalysisLast30Day struct {
//user
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysisLastMonth struct {
@@ -132,6 +156,18 @@ type UserBusinessAnalysisLastMonth struct {
//user
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysisCurrentMonth struct {
@@ -176,6 +212,18 @@ type UserBusinessAnalysisCurrentMonth struct {
//user
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysisCurrentWeek struct {
@@ -220,6 +268,18 @@ type UserBusinessAnalysisCurrentWeek struct {
//user
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
}
type UserBusinessAnalysisYesterday struct {
@@ -264,4 +324,30 @@ type UserBusinessAnalysisYesterday struct {
//user
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserLocation string `xorm:"NULL"`
+}
+
+type UserAnalysisPara struct {
+ Key string `xorm:"NOT NULL"`
+ Value float64 `xorm:"NOT NULL DEFAULT 0"`
+}
+
+type UserMetrics struct {
+ CountDate int64 `xorm:"pk"`
+ ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
+ NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
+ HasActivityUser int `xorm:"NOT NULL DEFAULT 0"`
+ TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
+ TotalHasActivityUser int `xorm:"NOT NULL DEFAULT 0"`
}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 94214a25c..00cc4e273 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -493,6 +493,11 @@ static.encyclopediascount=Encyclopedias Count
static.createrepocount=Create Repo Count
static.openiindex=OpenI Index
static.registdate=Regist Date
+static.CloudBrainTaskNum=CloudBrain Task Count
+static.CloudBrainRunTime=CloudBrain Run Time
+static.CommitDatasetNum=Commit Dataset Count
+static.CommitModelCount=Commit Model Count
+static.UserIndex=User Index
static.countdate=Count Date
static.all=All
static.public.user_business_analysis_current_month=Current_Month
diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini
index e93b05ffb..b410aa52f 100755
--- a/options/locale/locale_zh-CN.ini
+++ b/options/locale/locale_zh-CN.ini
@@ -496,6 +496,11 @@ static.encyclopediascount=百科页面贡献次数
static.createrepocount=创建项目数
static.openiindex=OpenI指数
static.registdate=用户注册时间
+static.CloudBrainTaskNum=云脑任务数
+static.CloudBrainRunTime=云脑运行时间(小时)
+static.CommitDatasetNum=上传(提交)数据集文件数
+static.CommitModelCount=提交模型数
+static.UserIndex=用户指数
static.countdate=系统统计时间
static.all=所有
static.public.user_business_analysis_current_month=本月
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index 1868edcb5..9de65662f 100755
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -547,6 +547,8 @@ func RegisterRoutes(m *macaron.Macaron) {
})
}, operationReq)
+ m.Get("/query_user_metrics", operationReq, repo_ext.QueryMetrics)
+ m.Get("/query_user_rank_list", operationReq, repo_ext.QueryRankingList)
m.Get("/query_user_static_page", operationReq, repo_ext.QueryUserStaticDataPage)
m.Get("/query_user_current_month", operationReq, repo_ext.QueryUserStaticCurrentMonth)
m.Get("/query_user_current_week", operationReq, repo_ext.QueryUserStaticCurrentWeek)
diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go
index b4adfc347..9d906270f 100755
--- a/routers/repo/user_data_analysis.go
+++ b/routers/repo/user_data_analysis.go
@@ -54,7 +54,12 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
"N1": ctx.Tr("user.static.createrepocount"),
"O1": ctx.Tr("user.static.openiindex"),
"P1": ctx.Tr("user.static.registdate"),
- "Q1": ctx.Tr("user.static.countdate"),
+ "Q1": ctx.Tr("user.static.CloudBrainTaskNum"),
+ "R1": ctx.Tr("user.static.CloudBrainRunTime"),
+ "S1": ctx.Tr("user.static.CommitDatasetNum"),
+ "T1": ctx.Tr("user.static.CommitModelCount"),
+ "U1": ctx.Tr("user.static.UserIndex"),
+ "V1": ctx.Tr("user.static.countdate"),
}
for k, v := range dataHeader {
//设置单元格的值
@@ -89,8 +94,14 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "P"+rows, formatTime[0:len(formatTime)-3])
+ xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum)
+ xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
+ xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum)
+ xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount)
+ xlsx.SetCellValue(sheetName, "U"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
+
formatTime = userRecord.DataDate
- xlsx.SetCellValue(sheetName, "Q"+rows, formatTime)
+ xlsx.SetCellValue(sheetName, "V"+rows, formatTime)
}
indexTotal += PAGE_SIZE
@@ -115,6 +126,30 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
}
}
+func QueryMetrics(ctx *context.Context) {
+ startDate := ctx.Query("startDate")
+ endDate := ctx.Query("endDate")
+ startTime, _ := time.ParseInLocation("2006-01-02", startDate, time.Local)
+ endTime, _ := time.ParseInLocation("2006-01-02", endDate, time.Local)
+ result, count := models.QueryMetrics(startTime.Unix(), endTime.Unix())
+ mapInterface := make(map[string]interface{})
+ mapInterface["data"] = result
+ mapInterface["count"] = count
+ ctx.JSON(http.StatusOK, mapInterface)
+}
+
+func QueryRankingList(ctx *context.Context) {
+ key := ctx.Query("key")
+ tableName := ctx.Query("tableName")
+ limit := ctx.QueryInt("limit")
+
+ result, count := models.QueryRankList(key, tableName, limit)
+ mapInterface := make(map[string]interface{})
+ mapInterface["data"] = result
+ mapInterface["count"] = count
+ ctx.JSON(http.StatusOK, mapInterface)
+}
+
func QueryUserStaticCurrentMonth(ctx *context.Context) {
queryUserDataPage(ctx, "public.user_business_analysis_current_month", new(models.UserBusinessAnalysisCurrentMonth))
}
@@ -221,7 +256,12 @@ func QueryUserStaticDataPage(ctx *context.Context) {
"N1": ctx.Tr("user.static.createrepocount"),
"O1": ctx.Tr("user.static.openiindex"),
"P1": ctx.Tr("user.static.registdate"),
- "Q1": ctx.Tr("user.static.countdate"),
+ "Q1": ctx.Tr("user.static.CloudBrainTaskNum"),
+ "R1": ctx.Tr("user.static.CloudBrainRunTime"),
+ "S1": ctx.Tr("user.static.CommitDatasetNum"),
+ "T1": ctx.Tr("user.static.CommitModelCount"),
+ "U1": ctx.Tr("user.static.UserIndex"),
+ "V1": ctx.Tr("user.static.countdate"),
}
for k, v := range dataHeader {
//设置单元格的值
@@ -249,9 +289,13 @@ func QueryUserStaticDataPage(ctx *context.Context) {
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "P"+rows, formatTime[0:len(formatTime)-3])
-
+ xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum)
+ xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
+ xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum)
+ xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount)
+ xlsx.SetCellValue(sheetName, "U"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
formatTime = userRecord.DataDate
- xlsx.SetCellValue(sheetName, "Q"+rows, formatTime+" 00:01")
+ xlsx.SetCellValue(sheetName, "V"+rows, formatTime)
}
//设置默认打开的表单
diff --git a/web_src/js/components/UserAnalysis.vue b/web_src/js/components/UserAnalysis.vue
index 682dbc78c..241768c15 100755
--- a/web_src/js/components/UserAnalysis.vue
+++ b/web_src/js/components/UserAnalysis.vue
@@ -133,7 +133,43 @@
{{scope.row.RegistDate | transformTimestamp}}
+
+
+
+
+ {{scope.row.CloudBrainRunTime | roundingToHour}}
+
+
+
+
+
+
+
+
+ {{scope.row.UserIndex | rounding}}
+
+