| @@ -1631,12 +1631,12 @@ func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { | |||
| var err error | |||
| condition := "cloudbrain.user_id = `user`.id" | |||
| if len(opts.Keyword) == 0 { | |||
| count, err = sess.Where(cond).Count(new(Cloudbrain)) | |||
| count, err = sess.Unscoped().Where(cond).Count(new(Cloudbrain)) | |||
| } else { | |||
| lowerKeyWord := strings.ToLower(opts.Keyword) | |||
| cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) | |||
| count, err = sess.Table(&Cloudbrain{}).Where(cond). | |||
| count, err = sess.Table(&Cloudbrain{}).Unscoped().Where(cond). | |||
| Join("left", "`user`", condition).Count(new(CloudbrainInfo)) | |||
| } | |||
| @@ -82,17 +82,27 @@ type UserBusinessAnalysisAll struct { | |||
| DataDate string `xorm:"NULL"` | |||
| //cloudbraintask | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysis struct { | |||
| @@ -159,17 +169,27 @@ type UserBusinessAnalysis struct { | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysisQueryOptions struct { | |||
| @@ -410,8 +430,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
| log.Info("truncate all data from table: " + tableName) | |||
| statictisSess.Exec("TRUNCATE TABLE " + tableName) | |||
| log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) | |||
| log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) | |||
| StartTimeNextDay := pageStartTime.AddDate(0, 0, 1) | |||
| EndTimeNextDay := pageEndTime.AddDate(0, 0, 1) | |||
| log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05")) | |||
| log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05")) | |||
| start_unix := pageStartTime.Unix() | |||
| end_unix := pageEndTime.Unix() | |||
| @@ -426,8 +448,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
| CommentCountMap := queryComment(start_unix, end_unix) | |||
| FocusRepoCountMap := queryWatch(start_unix, end_unix) | |||
| StarRepoCountMap := queryStar(start_unix, end_unix) | |||
| WatchedCountMap := queryFollow(start_unix, end_unix) | |||
| CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix) | |||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | |||
| CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) | |||
| CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) | |||
| SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||
| CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) | |||
| @@ -436,6 +458,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
| OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix) | |||
| CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
| AiModelManageMap := queryUserModel(start_unix, end_unix) | |||
| CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||
| RecommendDataset := queryRecommedDataSet(start_unix, end_unix) | |||
| CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) | |||
| RecommendImage := queryRecommedImage(start_unix, end_unix) | |||
| DataDate := currentTimeNow.Format("2006-01-02") + " 00:01" | |||
| cond := "type != 1 and is_active=true" | |||
| @@ -472,6 +500,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
| dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) | |||
| dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) | |||
| dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap) | |||
| dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) | |||
| dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) | |||
| dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) | |||
| dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) | |||
| @@ -496,13 +525,20 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
| dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | |||
| dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | |||
| dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) | |||
| dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) | |||
| userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex | |||
| if maxUserIndex < dateRecordAll.UserIndex { | |||
| maxUserIndex = dateRecordAll.UserIndex | |||
| dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) | |||
| dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) | |||
| dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset) | |||
| dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage) | |||
| dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage) | |||
| dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage) | |||
| dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) | |||
| userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive | |||
| if maxUserIndex < dateRecordAll.UserIndexPrimitive { | |||
| maxUserIndex = dateRecordAll.UserIndexPrimitive | |||
| } | |||
| if minUserIndex > dateRecordAll.UserIndex { | |||
| minUserIndex = dateRecordAll.UserIndex | |||
| if minUserIndex > dateRecordAll.UserIndexPrimitive { | |||
| minUserIndex = dateRecordAll.UserIndexPrimitive | |||
| } | |||
| dateRecordBatch = append(dateRecordBatch, dateRecordAll) | |||
| if len(dateRecordBatch) >= BATCH_INSERT_SIZE { | |||
| @@ -552,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static | |||
| insertBatchSql := "INSERT INTO public." + tableName + | |||
| "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + | |||
| "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " + | |||
| "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " + | |||
| "VALUES" | |||
| for i, record := range dateRecords { | |||
| @@ -560,7 +596,8 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static | |||
| ", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) + | |||
| ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + | |||
| ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + | |||
| ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')" | |||
| ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," + | |||
| fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")" | |||
| if i < (len(dateRecords) - 1) { | |||
| insertBatchSql += "," | |||
| } | |||
| @@ -628,7 +665,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| CommentCountMap := queryComment(start_unix, end_unix) | |||
| FocusRepoCountMap := queryWatch(start_unix, end_unix) | |||
| StarRepoCountMap := queryStar(start_unix, end_unix) | |||
| WatchedCountMap := queryFollow(start_unix, end_unix) | |||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | |||
| CommitCodeSizeMap, err := GetAllUserKPIStats() | |||
| if err != nil { | |||
| @@ -643,6 +680,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) | |||
| CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
| AiModelManageMap := queryUserModel(start_unix, end_unix) | |||
| CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||
| RecommendDataset := queryRecommedDataSet(start_unix, end_unix) | |||
| CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) | |||
| RecommendImage := queryRecommedImage(start_unix, end_unix) | |||
| statictisSess := xStatistic.NewSession() | |||
| defer statictisSess.Close() | |||
| @@ -683,13 +726,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) | |||
| dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) | |||
| dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) | |||
| dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap) | |||
| if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { | |||
| dateRecord.CommitCodeSize = 0 | |||
| } else { | |||
| dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) | |||
| } | |||
| dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) | |||
| dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) | |||
| dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) | |||
| @@ -715,7 +757,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | |||
| dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | |||
| dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) | |||
| dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight) | |||
| dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset) | |||
| dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset) | |||
| dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset) | |||
| dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage) | |||
| dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) | |||
| dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) | |||
| dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) | |||
| setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) | |||
| _, err = statictisSess.Insert(&dateRecord) | |||
| if err != nil { | |||
| @@ -765,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en | |||
| userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 | |||
| } | |||
| if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 { | |||
| if getUserActivate(dateRecord) > 0 { | |||
| userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1 | |||
| } | |||
| @@ -802,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight | |||
| result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | |||
| result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||
| codeLine := float64(dateRecord.CommitCodeSize) | |||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) | |||
| if codeLine >= limitCodeLine { | |||
| codeLine = limitCodeLine | |||
| } | |||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) | |||
| result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | |||
| result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | |||
| @@ -810,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight | |||
| result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) | |||
| result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) | |||
| result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) | |||
| result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) | |||
| result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) | |||
| return result | |||
| } | |||
| func getUserActivate(dateRecord UserBusinessAnalysis) int { | |||
| var result int | |||
| result += dateRecord.CodeMergeCount | |||
| result += dateRecord.CommitCount | |||
| result += dateRecord.IssueCount | |||
| result += dateRecord.CommentCount | |||
| result += dateRecord.FocusRepoCount | |||
| result += dateRecord.StarRepoCount | |||
| result += dateRecord.SolveIssueCount | |||
| result += dateRecord.EncyclopediasCount | |||
| result += dateRecord.CreateRepoCount | |||
| result += dateRecord.CloudBrainTaskNum | |||
| result += dateRecord.CommitModelCount | |||
| result += dateRecord.CommitDatasetNum | |||
| result += dateRecord.FocusOtherUser | |||
| result += dateRecord.CollectDataset | |||
| result += dateRecord.CollectImage | |||
| result += dateRecord.CommitCodeSize | |||
| return result | |||
| } | |||
| @@ -831,12 +914,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||
| result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | |||
| codeLine := float64(dateRecord.CommitCodeSize) / 1000 | |||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) | |||
| codeLine := float64(dateRecord.CommitCodeSize) | |||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) | |||
| if codeLine >= limitCodeLine { | |||
| codeLine = limitCodeLine | |||
| } | |||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) | |||
| result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | |||
| result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | |||
| @@ -844,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||
| result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) | |||
| result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) | |||
| result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) | |||
| result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) | |||
| result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) | |||
| return result | |||
| } | |||
| @@ -1134,17 +1224,18 @@ func queryStar(start_unix int64, end_unix int64) map[int64]int { | |||
| return resultMap | |||
| } | |||
| func queryFollow(start_unix int64, end_unix int64) map[int64]int { | |||
| func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| resultFocusedByOtherMap := make(map[int64]int) | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(Follow)) | |||
| if err != nil { | |||
| log.Info("query follow error. return.") | |||
| return resultMap | |||
| return resultMap, resultFocusedByOtherMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| @@ -1160,6 +1251,11 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { | |||
| } else { | |||
| resultMap[followRecord.FollowID] += 1 | |||
| } | |||
| if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok { | |||
| resultFocusedByOtherMap[followRecord.UserID] = 1 | |||
| } else { | |||
| resultFocusedByOtherMap[followRecord.UserID] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| @@ -1168,7 +1264,215 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { | |||
| } | |||
| } | |||
| return resultMap | |||
| return resultMap, resultFocusedByOtherMap | |||
| } | |||
| func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| userIdDdatasetMap := make(map[int64]int) | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true" | |||
| count, err := sess.Where(cond).Count(new(Dataset)) | |||
| if err != nil { | |||
| log.Info("query recommend dataset error. return.") | |||
| return userIdDdatasetMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| datasetList := make([]*Dataset, 0) | |||
| sess.Find(&datasetList) | |||
| log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) | |||
| for _, datasetRecord := range datasetList { | |||
| if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { | |||
| userIdDdatasetMap[datasetRecord.UserID] = 1 | |||
| } else { | |||
| userIdDdatasetMap[datasetRecord.UserID] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return userIdDdatasetMap | |||
| } | |||
| func queryAllDataSet() (map[int64]int64, map[int64]int64) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| datasetUserIdMap := make(map[int64]int64) | |||
| userIdDdatasetMap := make(map[int64]int64) | |||
| count, err := sess.Count(new(Dataset)) | |||
| if err != nil { | |||
| log.Info("query dataset error. return.") | |||
| return datasetUserIdMap, userIdDdatasetMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| datasetList := make([]*Dataset, 0) | |||
| sess.Find(&datasetList) | |||
| log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) | |||
| for _, datasetRecord := range datasetList { | |||
| datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID | |||
| if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { | |||
| userIdDdatasetMap[datasetRecord.UserID] = 1 | |||
| } else { | |||
| userIdDdatasetMap[datasetRecord.UserID] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return datasetUserIdMap, userIdDdatasetMap | |||
| } | |||
| func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| userIdImageMap := make(map[int64]int) | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5" | |||
| count, err := sess.Where(cond).Count(new(Image)) | |||
| if err != nil { | |||
| log.Info("query recommend image error. return.") | |||
| return userIdImageMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| imageList := make([]*Image, 0) | |||
| sess.Find(&imageList) | |||
| log.Info("query imageList size=" + fmt.Sprint(len(imageList))) | |||
| for _, imageRecord := range imageList { | |||
| if _, ok := userIdImageMap[imageRecord.UID]; !ok { | |||
| userIdImageMap[imageRecord.UID] = 1 | |||
| } else { | |||
| userIdImageMap[imageRecord.UID] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return userIdImageMap | |||
| } | |||
| func queryAllImage() (map[int64]int64, map[int64]int64) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| imageUserIdMap := make(map[int64]int64) | |||
| userIdDImageMap := make(map[int64]int64) | |||
| count, err := sess.Count(new(Image)) | |||
| if err != nil { | |||
| log.Info("query image error. return.") | |||
| return imageUserIdMap, userIdDImageMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| imageList := make([]*Image, 0) | |||
| sess.Find(&imageList) | |||
| log.Info("query imageList size=" + fmt.Sprint(len(imageList))) | |||
| for _, imageRecord := range imageList { | |||
| imageUserIdMap[imageRecord.ID] = imageRecord.UID | |||
| if _, ok := userIdDImageMap[imageRecord.UID]; !ok { | |||
| userIdDImageMap[imageRecord.UID] = 1 | |||
| } else { | |||
| userIdDImageMap[imageRecord.UID] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return imageUserIdMap, userIdDImageMap | |||
| } | |||
| func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| datasetCollect := make(map[int64]int) | |||
| datasetCollected := make(map[int64]int) | |||
| datasetUserIdMap, _ := queryAllDataSet() | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(DatasetStar)) | |||
| if err != nil { | |||
| log.Info("query follow error. return.") | |||
| return datasetCollect, datasetCollected | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| datasetStarList := make([]*DatasetStar, 0) | |||
| sess.Find(&datasetStarList) | |||
| log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList))) | |||
| for _, datasetStarRecord := range datasetStarList { | |||
| if _, ok := datasetCollect[datasetStarRecord.UID]; !ok { | |||
| datasetCollect[datasetStarRecord.UID] = 1 | |||
| } else { | |||
| datasetCollect[datasetStarRecord.UID] += 1 | |||
| } | |||
| if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok { | |||
| datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1 | |||
| } else { | |||
| datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return datasetCollect, datasetCollected | |||
| } | |||
| func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| imageCollect := make(map[int64]int) | |||
| imageCollected := make(map[int64]int) | |||
| imageUserIdMap, _ := queryAllDataSet() | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(ImageStar)) | |||
| if err != nil { | |||
| log.Info("query follow error. return.") | |||
| return imageCollect, imageCollected | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| imageStarList := make([]*ImageStar, 0) | |||
| sess.Find(&imageStarList) | |||
| log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) | |||
| for _, imageStarRecord := range imageStarList { | |||
| if _, ok := imageCollect[imageStarRecord.UID]; !ok { | |||
| imageCollect[imageStarRecord.UID] = 1 | |||
| } else { | |||
| imageCollect[imageStarRecord.UID] += 1 | |||
| } | |||
| if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok { | |||
| imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1 | |||
| } else { | |||
| imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return imageCollect, imageCollected | |||
| } | |||
| func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||
| @@ -45,17 +45,26 @@ type UserBusinessAnalysisCurrentYear struct { | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysisLast30Day struct { | |||
| @@ -101,17 +110,26 @@ type UserBusinessAnalysisLast30Day struct { | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysisLastMonth struct { | |||
| @@ -157,17 +175,26 @@ type UserBusinessAnalysisLastMonth struct { | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysisCurrentMonth struct { | |||
| @@ -213,17 +240,26 @@ type UserBusinessAnalysisCurrentMonth struct { | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysisCurrentWeek struct { | |||
| @@ -269,17 +305,27 @@ type UserBusinessAnalysisCurrentWeek struct { | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserBusinessAnalysisYesterday struct { | |||
| @@ -325,17 +371,27 @@ type UserBusinessAnalysisYesterday struct { | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||
| UserLocation string `xorm:"NULL"` | |||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| type UserAnalysisPara struct { | |||
| @@ -19,7 +19,11 @@ import ( | |||
| "xorm.io/builder" | |||
| ) | |||
| func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { | |||
| const ( | |||
| SIZE_LIMIT_SCRIPT_NAME = "size_limit" | |||
| ) | |||
| func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []string) { | |||
| hookNames = []string{"pre-receive", "update", "post-receive"} | |||
| hookTpls = []string{ | |||
| fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), | |||
| @@ -31,6 +35,11 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { | |||
| fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf), | |||
| fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), | |||
| } | |||
| sizeLimitTpls = []string{ | |||
| fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=0\n\n\nstatus=\"$EXIT_SUCCESS\"\n\nfunction readINI()\n{\n FILENAME='%s'; SECTION=$1; KEY=$2\n RESULT=`awk -F '=' '/\\['$SECTION'\\]/{a=1}a==1&&$1~/'$KEY'/{print $2;exit}' $FILENAME`\n echo $RESULT\n}\n\n# skip this hook entirely if shell check is not open\ncheck_flag=$(readINI 'repository.upload' 'SHELL_FLAG')\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get the maximum filesize configured for this repository or the default\nfunction get_file_maxsize() {\n local value;\n value=$(readINI 'repository.upload' 'FILE_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE_MB\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"$(get_file_maxsize)\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \"<oldref> <newref> <refname>\\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\nfunction get_repo_maxsize() {\n local value;\n value=$(readINI 'repository' 'REPO_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n\nsizelimit_mb=\"$(get_repo_maxsize)\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nstr=`du -sb .`\narr=($str)\nreposize_b=${arr[0]}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), | |||
| fmt.Sprintf(""), | |||
| fmt.Sprintf(""), | |||
| } | |||
| return | |||
| } | |||
| @@ -41,7 +50,7 @@ func CreateDelegateHooks(repoPath string) error { | |||
| // createDelegateHooks creates all the hooks scripts for the repo | |||
| func createDelegateHooks(repoPath string) (err error) { | |||
| hookNames, hookTpls, giteaHookTpls := getHookTemplates() | |||
| hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() | |||
| hookDir := filepath.Join(repoPath, "hooks") | |||
| for i, hookName := range hookNames { | |||
| @@ -74,8 +83,26 @@ func createDelegateHooks(repoPath string) (err error) { | |||
| if err = ensureExecutable(newHookPath); err != nil { | |||
| return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err) | |||
| } | |||
| if err = writeHookTpl(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i]); err != nil { | |||
| return err | |||
| } | |||
| } | |||
| return nil | |||
| } | |||
| func writeHookTpl(hookPath, content string) error { | |||
| if content == "" { | |||
| return nil | |||
| } | |||
| if err := ioutil.WriteFile(hookPath, []byte(content), 0777); err != nil { | |||
| return fmt.Errorf("write new hook file '%s': %v", hookPath, err) | |||
| } | |||
| if err := ensureExecutable(hookPath); err != nil { | |||
| return fmt.Errorf("Unable to set %s executable. Error %v", hookPath, err) | |||
| } | |||
| return nil | |||
| } | |||
| @@ -101,7 +128,7 @@ func ensureExecutable(filename string) error { | |||
| // CheckDelegateHooks checks the hooks scripts for the repo | |||
| func CheckDelegateHooks(repoPath string) ([]string, error) { | |||
| hookNames, hookTpls, giteaHookTpls := getHookTemplates() | |||
| hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() | |||
| hookDir := filepath.Join(repoPath, "hooks") | |||
| results := make([]string, 0, 10) | |||
| @@ -146,10 +173,34 @@ func CheckDelegateHooks(repoPath string) ([]string, error) { | |||
| if !checkExecutable(newHookPath) { | |||
| results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath)) | |||
| } | |||
| if err = checkHookFile(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i], results); err != nil { | |||
| return results, err | |||
| } | |||
| } | |||
| return results, nil | |||
| } | |||
| func generateHookScriptPath(hookDir, hookName, fileName string) string { | |||
| return filepath.Join(hookDir, hookName+".d", fileName) | |||
| } | |||
| func checkHookFile(filePath, tpl string, results []string) error { | |||
| if tpl == "" { | |||
| return nil | |||
| } | |||
| contents, err := ioutil.ReadFile(filePath) | |||
| if err != nil { | |||
| return err | |||
| } | |||
| if string(contents) != tpl { | |||
| results = append(results, fmt.Sprintf("old hook file %s is out of date", filePath)) | |||
| } | |||
| if !checkExecutable(filePath) { | |||
| results = append(results, fmt.Sprintf("old hook file %s is not executable", filePath)) | |||
| } | |||
| return nil | |||
| } | |||
| // SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks | |||
| // to make sure the binary and custom conf path are up-to-date. | |||
| func SyncRepositoryHooks(ctx context.Context) error { | |||
| @@ -2,6 +2,7 @@ package storage | |||
| import ( | |||
| "encoding/xml" | |||
| "errors" | |||
| "path" | |||
| "sort" | |||
| "strconv" | |||
| @@ -129,7 +130,7 @@ func NewMultiPartUpload(uuid string) (string, error) { | |||
| return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) | |||
| } | |||
| func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||
| func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) { | |||
| client, core, err := getClients() | |||
| if err != nil { | |||
| log.Error("getClients failed:", err.Error()) | |||
| @@ -146,6 +147,11 @@ func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||
| return "", err | |||
| } | |||
| if len(partInfos) != totalChunks { | |||
| log.Error("ListObjectParts number(%d) is not equal the set total chunk number(%d)", len(partInfos), totalChunks) | |||
| return "", errors.New("the parts is not complete") | |||
| } | |||
| var complMultipartUpload completeMultipartUpload | |||
| for _, partInfo := range partInfos { | |||
| complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{ | |||
| @@ -85,7 +85,7 @@ func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err | |||
| }) | |||
| } | |||
| if len(temp.Parts) < temp.MaxParts { | |||
| if !temp.IsTruncated { | |||
| break | |||
| } else { | |||
| continue | |||
| @@ -128,7 +128,7 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) { | |||
| return output.UploadId, nil | |||
| } | |||
| func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { | |||
| func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error { | |||
| input := &obs.CompleteMultipartUploadInput{} | |||
| input.Bucket = setting.Bucket | |||
| input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | |||
| @@ -140,6 +140,11 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { | |||
| return err | |||
| } | |||
| if len(allParts.Parts) != totalChunks { | |||
| log.Error("listAllParts number(%d) is not equal the set total chunk number(%d)", len(allParts.Parts), totalChunks) | |||
| return errors.New("the parts is not complete") | |||
| } | |||
| input.Parts = allParts.Parts | |||
| output, err := ObsCli.CompleteMultipartUpload(input) | |||
| @@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count | |||
| static.CloudBrainRunTime=CloudBrain Run Time | |||
| static.CommitDatasetNum=Commit Dataset Count | |||
| static.CommitModelCount=Commit Model Count | |||
| static.UserIndex=User Index | |||
| static.UserIndex=Normalized user index | |||
| static.UserIndexPrimitive=User Index | |||
| static.countdate=Count Date | |||
| static.FocusOtherUser=Focus Other User Count | |||
| static.CollectDataset=Collect Dataset Count | |||
| static.CollectedDataset=Collected Dataset Count | |||
| static.RecommendDataset=Recommended Dataset Count | |||
| static.CollectImage=Collect Image Count | |||
| static.CollectedImage=Collected Image Count | |||
| static.RecommendImage=Recommended Image Count | |||
| static.all=All | |||
| static.public.user_business_analysis_current_month=Current_Month | |||
| static.public.user_business_analysis_current_week=Current_Week | |||
| @@ -512,8 +512,16 @@ static.CloudBrainTaskNum=云脑任务数 | |||
| static.CloudBrainRunTime=云脑运行时间(小时) | |||
| static.CommitDatasetNum=上传(提交)数据集文件数 | |||
| static.CommitModelCount=提交模型数 | |||
| static.UserIndex=用户指数 | |||
| static.UserIndex=归一化用户指数 | |||
| static.UserIndexPrimitive=用户指数 | |||
| static.countdate=系统统计时间 | |||
| static.FocusOtherUser=关注他人数 | |||
| static.CollectDataset=收藏数据集 | |||
| static.CollectedDataset=被收藏数据集 | |||
| static.RecommendDataset=被推荐数据集数 | |||
| static.CollectImage=收藏镜像数 | |||
| static.CollectedImage=被收藏镜像数 | |||
| static.RecommendImage=被推荐镜像数 | |||
| static.all=所有 | |||
| static.public.user_business_analysis_current_month=本月 | |||
| static.public.user_business_analysis_current_week=本周 | |||
| @@ -810,14 +810,7 @@ var repoAndOrgEN={ | |||
| function page(current){ | |||
| currentPage=current; | |||
| startIndex = currentPage -1; | |||
| if(startIndex < 1){ | |||
| startIndex = 1; | |||
| } | |||
| endIndex = currentPage + 2; | |||
| if(endIndex >= totalPage){ | |||
| endIndex = totalPage; | |||
| } | |||
| doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | |||
| } | |||
| @@ -888,9 +881,14 @@ function getYPosition(e){ | |||
| var html =""; | |||
| console.log("currentPage=" + currentPage); | |||
| console.log("privateTotal=" + privateTotal); | |||
| // if(totalPage==0){ | |||
| // return; | |||
| // } | |||
| startIndex = currentPage -1; | |||
| if(startIndex < 1){ | |||
| startIndex = 1; | |||
| } | |||
| endIndex = currentPage + 2; | |||
| if(endIndex >= totalPage){ | |||
| endIndex = totalPage; | |||
| } | |||
| html += "<span class=\"item\">" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "</span>" | |||
| if(currentPage > 1){ | |||
| html += "<a class=\"item navigation\" href=\"javascript:page(1)\"><span class=\"navigation_label\">" + getLabel(isZh,"search_home_page") + "</span></a>"; | |||
| @@ -23,7 +23,7 @@ func DownloadCloudBrainBoard(ctx *context.Context) { | |||
| _, total, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | |||
| ListOptions: models.ListOptions{ | |||
| Page: page, | |||
| PageSize: 1, | |||
| PageSize: pageSize, | |||
| }, | |||
| Type: models.TypeCloudBrainAll, | |||
| NeedRepoInfo: false, | |||
| @@ -855,13 +855,13 @@ func CompleteMultipart(ctx *context.Context) { | |||
| } | |||
| if typeCloudBrain == models.TypeCloudBrainOne { | |||
| _, err = storage.CompleteMultiPartUpload(uuid, uploadID) | |||
| _, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks) | |||
| if err != nil { | |||
| ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) | |||
| return | |||
| } | |||
| } else { | |||
| err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName) | |||
| err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks) | |||
| if err != nil { | |||
| ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err)) | |||
| return | |||
| @@ -19,6 +19,130 @@ const ( | |||
| PAGE_SIZE = 2000 | |||
| ) | |||
| func getExcelHeader(ctx *context.Context) map[string]string { | |||
| excelHeader := make([]string, 0) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.id")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.name")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.logincount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) | |||
| excelHeaderMap := make(map[string]string, 0) | |||
| var i byte | |||
| i = 0 | |||
| for _, value := range excelHeader { | |||
| excelColumn := getColumn(i) + fmt.Sprint(1) | |||
| log.Info("excelColumn=" + excelColumn) | |||
| excelHeaderMap[excelColumn] = value | |||
| i++ | |||
| } | |||
| return excelHeaderMap | |||
| } | |||
| func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { | |||
| rows := fmt.Sprint(row) | |||
| var tmp byte | |||
| tmp = 0 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) | |||
| tmp = tmp + 1 | |||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) | |||
| tmp = tmp + 1 | |||
| formatTime = userRecord.DataDate | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) | |||
| } | |||
| func getColumn(tmp byte) string { | |||
| var tmpA byte | |||
| tmpA = 'A' | |||
| if tmp < 26 { | |||
| return string(tmpA + tmp) | |||
| } else { | |||
| return "A" + string(tmpA+(tmp-26)) | |||
| } | |||
| } | |||
| func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { | |||
| page := ctx.QueryInt("page") | |||
| if page <= 0 { | |||
| @@ -37,30 +161,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||
| sheetName := ctx.Tr("user.static.sheetname") | |||
| index := xlsx.NewSheet(sheetName) | |||
| xlsx.DeleteSheet("Sheet1") | |||
| dataHeader := map[string]string{ | |||
| "A1": ctx.Tr("user.static.id"), | |||
| "B1": ctx.Tr("user.static.name"), | |||
| "C1": ctx.Tr("user.static.UserIndex"), | |||
| "D1": ctx.Tr("user.static.codemergecount"), | |||
| "E1": ctx.Tr("user.static.commitcount"), | |||
| "F1": ctx.Tr("user.static.issuecount"), | |||
| "G1": ctx.Tr("user.static.commentcount"), | |||
| "H1": ctx.Tr("user.static.focusrepocount"), | |||
| "I1": ctx.Tr("user.static.starrepocount"), | |||
| "J1": ctx.Tr("user.static.logincount"), | |||
| "K1": ctx.Tr("user.static.watchedcount"), | |||
| "L1": ctx.Tr("user.static.commitcodesize"), | |||
| "M1": ctx.Tr("user.static.solveissuecount"), | |||
| "N1": ctx.Tr("user.static.encyclopediascount"), | |||
| "O1": ctx.Tr("user.static.createrepocount"), | |||
| "P1": ctx.Tr("user.static.openiindex"), | |||
| "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||
| "R1": ctx.Tr("user.static.CloudBrainRunTime"), | |||
| "S1": ctx.Tr("user.static.CommitDatasetNum"), | |||
| "T1": ctx.Tr("user.static.CommitModelCount"), | |||
| "U1": ctx.Tr("user.static.registdate"), | |||
| "V1": ctx.Tr("user.static.countdate"), | |||
| } | |||
| dataHeader := getExcelHeader(ctx) | |||
| for k, v := range dataHeader { | |||
| //设置单元格的值 | |||
| xlsx.SetCellValue(sheetName, k, v) | |||
| @@ -74,31 +175,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||
| log.Info("return count=" + fmt.Sprint(count)) | |||
| for _, userRecord := range re { | |||
| row++ | |||
| rows := fmt.Sprint(row) | |||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||
| xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) | |||
| xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) | |||
| xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) | |||
| xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) | |||
| xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) | |||
| xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) | |||
| xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | |||
| xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | |||
| xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||
| xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) | |||
| xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||
| xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) | |||
| xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) | |||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||
| xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) | |||
| formatTime = userRecord.DataDate | |||
| xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | |||
| writeExcel(row, xlsx, sheetName, userRecord) | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| @@ -236,62 +313,16 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||
| sheetName := ctx.Tr("user.static.sheetname") | |||
| index := xlsx.NewSheet(sheetName) | |||
| xlsx.DeleteSheet("Sheet1") | |||
| dataHeader := map[string]string{ | |||
| "A1": ctx.Tr("user.static.id"), | |||
| "B1": ctx.Tr("user.static.name"), | |||
| "C1": ctx.Tr("user.static.UserIndex"), | |||
| "D1": ctx.Tr("user.static.codemergecount"), | |||
| "E1": ctx.Tr("user.static.commitcount"), | |||
| "F1": ctx.Tr("user.static.issuecount"), | |||
| "G1": ctx.Tr("user.static.commentcount"), | |||
| "H1": ctx.Tr("user.static.focusrepocount"), | |||
| "I1": ctx.Tr("user.static.starrepocount"), | |||
| "J1": ctx.Tr("user.static.logincount"), | |||
| "K1": ctx.Tr("user.static.watchedcount"), | |||
| "L1": ctx.Tr("user.static.commitcodesize"), | |||
| "M1": ctx.Tr("user.static.solveissuecount"), | |||
| "N1": ctx.Tr("user.static.encyclopediascount"), | |||
| "O1": ctx.Tr("user.static.createrepocount"), | |||
| "P1": ctx.Tr("user.static.openiindex"), | |||
| "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||
| "R1": ctx.Tr("user.static.CloudBrainRunTime"), | |||
| "S1": ctx.Tr("user.static.CommitDatasetNum"), | |||
| "T1": ctx.Tr("user.static.CommitModelCount"), | |||
| "U1": ctx.Tr("user.static.registdate"), | |||
| "V1": ctx.Tr("user.static.countdate"), | |||
| } | |||
| dataHeader := getExcelHeader(ctx) | |||
| for k, v := range dataHeader { | |||
| //设置单元格的值 | |||
| xlsx.SetCellValue(sheetName, k, v) | |||
| } | |||
| for i, userRecord := range re { | |||
| rows := fmt.Sprint(i + 2) | |||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||
| xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) | |||
| xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) | |||
| xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) | |||
| xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) | |||
| xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) | |||
| xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) | |||
| xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | |||
| xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | |||
| xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||
| xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) | |||
| xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||
| xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) | |||
| xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) | |||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||
| xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) | |||
| formatTime = userRecord.DataDate | |||
| xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | |||
| row := i + 2 | |||
| writeExcel(row, xlsx, sheetName, userRecord) | |||
| } | |||
| //设置默认打开的表单 | |||
| @@ -804,6 +804,7 @@ func Cloudbrains(ctx *context.Context) { | |||
| repos, _, err := models.SearchRepository(&models.SearchRepoOptions{ | |||
| Actor: ctx.User, | |||
| OwnerID: ctxUser.ID, | |||
| Private: true, | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("SearchRepository", err) | |||
| @@ -162,9 +162,9 @@ | |||
| {{end}} | |||
| </div> | |||
| <!-- 删除任务 --> | |||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}/train-job{{end}}/{{$JobID}}/del?isadminpage=true' method="post"> | |||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if or (eq .JobType "SNN4IMAGENET") (eq .JobType "BRAINSCORE")}}/cloudbrain{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}/train-job{{end}}/{{$JobID}}/del?isadminpage=true' method="post"> | |||
| {{$.CsrfTokenHtml}} | |||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?isadminpage=true" data-version="{{.VersionName}}" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?isadminpage=true" data-version="" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||
| {{$.i18n.Tr "repo.delete"}} | |||
| </a> | |||
| </form> | |||
| @@ -179,7 +179,26 @@ | |||
| </div> | |||
| </div> | |||
| <input id="store_category" type="hidden" name="get_benchmark_category"> | |||
| <div class="inline required field"> | |||
| <label>{{.i18n.Tr "repo.modelarts.code_version"}}</label> | |||
| <select class="ui dropdown width80 left2" id="code_version" name="branch_name"> | |||
| {{if .branch_name}} | |||
| <option name="branch_name" value="{{.branch_name}}">{{.branch_name}}</option> | |||
| {{range $k, $v :=.Branches}} | |||
| {{ if ne $v $.branch_name }} | |||
| <option name="branch_name" value="{{$v}}">{{$v}}</option> | |||
| {{end}} | |||
| {{end}} | |||
| {{else}} | |||
| <option name="branch_name" value="{{.branchName}}">{{.branchName}}</option> | |||
| {{range $k, $v :=.Branches}} | |||
| {{ if ne $v $.branchName }} | |||
| <option name="branch_name" value="{{$v}}">{{$v}}</option> | |||
| {{end}} | |||
| {{end}} | |||
| {{end}} | |||
| </select> | |||
| </div> | |||
| <div class="inline required field"> | |||
| <label>{{.i18n.Tr "cloudbrain.gpu_type"}}</label> | |||
| <select id="cloudbrain_gpu_type" class="ui search dropdown" placeholder="选择GPU类型" style='width:385px' name="gpu_type"> | |||
| @@ -249,7 +249,7 @@ td, th { | |||
| <div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);"> | |||
| <a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | |||
| <a class="item" data-tab="second{{$k}}" onclick="loadLog({{.VersionName}})">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||
| <a class="item log_bottom" data-tab="second{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||
| <a class="item" data-tab="third{{$k}}" onclick="loadModelFile({{.VersionName}},'','','init')">{{$.i18n.Tr "repo.model_download"}}</a> | |||
| </div> | |||
| <div class="ui tab active" data-tab="first{{$k}}"> | |||
| @@ -420,7 +420,13 @@ td, th { | |||
| </div> | |||
| </div> | |||
| <div class="ui tab" data-tab="second{{$k}}"> | |||
| <div> | |||
| <div style="position: relative;"> | |||
| <span> | |||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | |||
| </span> | |||
| <span> | |||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" class="log_bottom" data-version="{{.VersionName}}"><i class="icon-to-bottom"></i></a> | |||
| </span> | |||
| <div class="ui message message{{.VersionName}}" style="display: none;"> | |||
| <div id="header"></div> | |||
| </div> | |||
| @@ -861,9 +867,12 @@ td, th { | |||
| console.log(err); | |||
| }); | |||
| } | |||
| if(scrollTop == 0 && scrollLeft==0){ | |||
| if(scrollTop == 1 && scrollLeft==0){ | |||
| let start_line = $(`#log${version_name} input[name=start_line]`).val() | |||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { | |||
| console.log("11111") | |||
| if (data.Lines == 0){ | |||
| $(`.message${version_name} #header`).text('您已翻阅至日志顶部') | |||
| $(`.message${version_name}`).css('display', 'block') | |||
| @@ -879,4 +888,73 @@ td, th { | |||
| }); | |||
| } | |||
| } | |||
| function scrollAnimation(dom, currentY, targetY, currentX) { | |||
| let needScrollTop = targetY - currentY; | |||
| let _currentY = currentY; | |||
| setTimeout(() => { | |||
| // 一次调用滑动帧数,每次调用会不一样 | |||
| //取总距离的十分之一 | |||
| const dist = Math.ceil(needScrollTop / 10); | |||
| _currentY += dist; | |||
| //移动一个十分之一 | |||
| console.log(_currentY, targetY) | |||
| dom.scrollTo(currentX || 0, _currentY); | |||
| // 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果 | |||
| if (needScrollTop > 10 || needScrollTop < -10) { | |||
| scrollAnimation(dom, _currentY, targetY) | |||
| } else { | |||
| dom.scrollTo(0, targetY) | |||
| } | |||
| }, 1) | |||
| } | |||
| $('.log_top').click(function(){ | |||
| let logContentDom = document.querySelector('.log') | |||
| if(!logContentDom) | |||
| return | |||
| let version_name = $('.log_top').data('version') | |||
| $(`#log_file${version_name}`).siblings('pre').remove() | |||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=asc`, (data) => { | |||
| $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 | |||
| $(`#log${version_name} input[name=start_line]`).val(data.StartLine) | |||
| $(`#log${version_name}`).prepend('<pre>' + data.Content) | |||
| scrollAnimation(logContentDom, logContentDom.scrollTop, 0); | |||
| }) | |||
| }) | |||
| $('.log_bottom').click(function(){ | |||
| let logContentDom = document.querySelector('.log') | |||
| let version_name = $('.log_bottom').data('version') | |||
| console.log($(`#log${version_name}`).siblings('pre')) | |||
| $(`#log_file${version_name}`).siblings('pre').remove() | |||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => { | |||
| $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 | |||
| $(`#log${version_name} input[name=start_line]`).val(data.StartLine) | |||
| $(`#log${version_name}`).append('<pre>' + data.Content) | |||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${data.EndLine}&lines=50&order=desc`, (data) => { | |||
| if (data.Lines == 0){ | |||
| $(`.message${version_name} #header`).text('您已翻阅至日志底部') | |||
| $(`.message${version_name}`).css('display', 'block') | |||
| setTimeout(function(){ | |||
| $(`.message${version_name}`).css('display', 'none') | |||
| }, 1000) | |||
| }else{ | |||
| if(end_line===data.EndLine){ | |||
| return | |||
| } | |||
| else{ | |||
| $(`#log${version_name} input[name=end_line]`).val(data.EndLine) | |||
| $(`#log${version_name}`).append('<pre>' + data.Content) | |||
| } | |||
| } | |||
| }).fail(function(err) { | |||
| console.log(err); | |||
| }); | |||
| scrollAnimation(logContentDom, logContentDom.scrollTop+1, logContentDom.scrollHeight - logContentDom.clientHeight); | |||
| }) | |||
| }) | |||
| </script> | |||
| @@ -147,9 +147,9 @@ | |||
| {{end}} | |||
| </div> | |||
| <!-- 删除任务 --> | |||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/train-job{{else}}/cloudbrain/train-job{{end}}{{end}}/{{$JobID}}/del?ishomepage=true' method="post"> | |||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if or (eq .JobType "SNN4IMAGENET") (eq .JobType "BRAINSCORE")}}/cloudbrain{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/train-job{{else}}/cloudbrain/train-job{{end}}{{end}}/{{$JobID}}/del?ishomepage=true' method="post"> | |||
| {{$.CsrfTokenHtml}} | |||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?ishomepage=true" data-version="{{.VersionName}}" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?ishomepage=true" data-version="" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||
| {{$.i18n.Tr "repo.delete"}} | |||
| </a> | |||
| </form> | |||
| @@ -193,6 +193,9 @@ export default { | |||
| const time = new Date().getTime(); | |||
| this.status = this.dropzoneParams.data('md5-computing'); | |||
| file.totalChunkCounts = chunks; | |||
| if (file.size==0) { | |||
| file.totalChunkCounts = 1 | |||
| } | |||
| loadMd5Next(); | |||
| fileReader.onload = (e) => { | |||
| @@ -64,13 +64,22 @@ | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="UserIndex" | |||
| label="用户指数" | |||
| label="归一化用户指数" | |||
| width="120px" | |||
| align="center"> | |||
| <template slot-scope="scope"> | |||
| {{scope.row.UserIndex | rounding}} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="UserIndexPrimitive" | |||
| label="用户指数" | |||
| width="120px" | |||
| align="center"> | |||
| <template slot-scope="scope"> | |||
| {{scope.row.UserIndexPrimitive | rounding}} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CodeMergeCount" | |||
| label="PR数" | |||
| @@ -160,6 +169,48 @@ | |||
| label="提交模型数" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="FocusOtherUser" | |||
| label="关注他人数" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CollectDataset" | |||
| label="收藏数据集" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CollectedDataset" | |||
| label="被收藏数据集" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="RecommendDataset" | |||
| label="被推荐数据集数" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CollectImage" | |||
| label="收藏镜像数" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CollectedImage" | |||
| label="被收藏镜像数" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="RecommendImage" | |||
| label="被推荐镜像数" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="RegistDate" | |||
| @@ -214,7 +265,7 @@ | |||
| value_time: '', | |||
| search:'', | |||
| data:'', | |||
| columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'用户指数',key:'UserIndex'},{title:'系统统计时间',key:'CountDate'}], | |||
| columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'归一化用户指数',key:'UserIndex'},{title:'用户指数',key:'UserIndexPrimitive'},{title:'关注他人数',key:'FocusOtherUser'},{title:'收藏数据集',key:'CollectDataset'},{title:'被收藏数据集',key:'CollectedDataset'},{title:'被推荐数据集数',key:'RecommendDataset'},{title:'收藏镜像数',key:'CollectImage'},{title:'被收藏镜像数',key:'CollectedImage'},{title:'被推荐镜像数',key:'RecommendImage'},{title:'系统统计时间',key:'CountDate'}], | |||
| blob:'', | |||
| fileName:'', | |||
| dynamic:7, | |||
| @@ -3898,7 +3898,7 @@ function initVueDataset() { | |||
| }, | |||
| }, | |||
| components: { | |||
| MinioUploader, | |||
| MinioUploader | |||
| }, | |||
| mounted(){ | |||
| // if(document.getElementById('postPath')){ | |||
| @@ -4453,7 +4453,6 @@ function initVueDataAnalysis() { | |||
| render: h => h(DataAnalysis) | |||
| }); | |||
| } | |||
| function initVueWxAutorize() { | |||
| const el = document.getElementById('WxAutorize'); | |||
| if (!el) { | |||
| @@ -248,7 +248,22 @@ footer .column{margin-bottom:0!important; padding-bottom:0!important;} | |||
| .icon-bind{background-position: -550px -52px;} | |||
| .icon-unbind{background-position: -568px -52px;} | |||
| .CREATING, .STOPPING, .DELETING, .STARTING, i.WAITING ,.INIT,.KILLING{display:inline-block;background-image:url('/img/loading.gif');background-repeat:no-repeat;width:16px;height:16px;background-size:16px 16px;margin-right:5px;} | |||
| .icon-to-top{ | |||
| background:url("/img/icons.svg"); | |||
| background-position: -540px -208px; | |||
| width: 30px; | |||
| height: 30px; | |||
| display: inline-block; | |||
| cursor: pointer; | |||
| } | |||
| .icon-to-bottom{ | |||
| background:url("/img/icons.svg"); | |||
| background-position: -574px -208px; | |||
| width: 30px; | |||
| height: 30px; | |||
| display: inline-block; | |||
| cursor: pointer; | |||
| } | |||
| i.COMPLETED,i.SUCCEEDED{display:inline-block;width:18px;height:18px;background:url("/img/icons.svg");background-position: -496px -52px;background-position: -441px -52px;} | |||
| .text_over{ | |||
| overflow: hidden; | |||