diff --git a/models/cloudbrain.go b/models/cloudbrain.go index e28ba3ea5..810e68d30 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -1566,6 +1566,14 @@ func GetCloudbrainCountByUserID(userID int64, jobType string) (int, error) { return int(count), err } +func GetCloudbrainRunCountByRepoID(repoID int64) (int, error) { + count, err := x.In("status", JobWaiting, JobRunning, ModelArtsCreateQueue, ModelArtsCreating, ModelArtsStarting, + ModelArtsReadyToStart, ModelArtsResizing, ModelArtsStartQueuing, ModelArtsRunning, ModelArtsRestarting, ModelArtsTrainJobInit, + ModelArtsTrainJobImageCreating, ModelArtsTrainJobSubmitTrying, ModelArtsTrainJobWaiting, ModelArtsTrainJobRunning, + ModelArtsTrainJobScaling, ModelArtsTrainJobCheckInit, ModelArtsTrainJobCheckRunning, ModelArtsTrainJobCheckRunningCompleted).And("repo_id = ?", repoID).Count(new(Cloudbrain)) + return int(count), err +} + func GetBenchmarkCountByUserID(userID int64) (int, error) { count, err := x.In("status", JobWaiting, JobRunning).And("(job_type = ? or job_type = ? or job_type = ?) and user_id = ? and type = ?", string(JobTypeBenchmark), string(JobTypeBrainScore), string(JobTypeSnn4imagenet), userID, TypeCloudBrainOne).Count(new(Cloudbrain)) return int(count), err diff --git a/models/custom_migrations.go b/models/custom_migrations.go index d0158530b..412bedce1 100644 --- a/models/custom_migrations.go +++ b/models/custom_migrations.go @@ -1,8 +1,6 @@ package models import ( - "fmt" - "code.gitea.io/gitea/modules/log" "xorm.io/xorm" ) @@ -22,7 +20,6 @@ var customMigrations = []CustomMigration{ } var customMigrationsStatic = []CustomMigrationStatic{ - {"Delete organization user history data ", deleteNotDisplayUser}, {"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate}, } @@ -36,7 +33,6 @@ func MigrateCustom(x *xorm.Engine) { } } - } func MigrateCustomStatic(x *xorm.Engine, static *xorm.Engine) { @@ -58,24 +54,6 @@ func syncTopicStruct(x *xorm.Engine) error { return err } -func deleteNotDisplayUser(x *xorm.Engine, static *xorm.Engine) error { - - querySQL := "select id,name from public.user where type=1" - rows, err := x.Query(querySQL) - if err != nil { - log.Info("select db failed,err:", err) - return err - } - - for i, userRow := range rows { - log.Info("delete zuzi user, i=" + fmt.Sprint(i) + " userName=" + string(userRow["name"])) - deleteSql := "delete from user_business_analysis where id=" + string(userRow["id"]) + " and name='" + string(userRow["name"]) + "'" - static.Exec(deleteSql) - } - - return nil -} - func updateIssueFixedRate(x *xorm.Engine, static *xorm.Engine) error { updateSQL := "update repo_statistic set issue_fixed_rate=1.0 where num_issues=0" _, err := static.Exec(updateSQL) diff --git a/models/dataset.go b/models/dataset.go index d3a142742..d4a7748d3 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -1,10 +1,12 @@ package models import ( - "code.gitea.io/gitea/modules/log" "errors" "fmt" "sort" + "strings" + + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/timeutil" "xorm.io/builder" @@ -179,7 +181,7 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.Cond { if len(opts.Keyword) > 0 { - cond = cond.And(builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword})) + cond = cond.And(builder.Or(builder.Like{"LOWER(dataset.title)", strings.ToLower(opts.Keyword)}, builder.Like{"LOWER(dataset.description)", strings.ToLower(opts.Keyword)})) } if len(opts.Category) > 0 { diff --git a/models/models.go b/models/models.go index 2a2e119fb..9d255c5e6 100755 --- a/models/models.go +++ b/models/models.go @@ -157,6 +157,7 @@ func init() { new(UserBusinessAnalysisCurrentMonth), new(UserBusinessAnalysisCurrentWeek), new(UserBusinessAnalysisYesterday), + new(UserBusinessAnalysisLastWeek), new(UserLoginLog), new(UserMetrics), new(UserAnalysisPara), diff --git a/models/org.go b/models/org.go index 2a6528023..c956f1f89 100755 --- a/models/org.go +++ b/models/org.go @@ -160,7 +160,11 @@ func UpdateOrgStatistics() { has, _ := x.Get(orgStat) orgStat.NumScore = numScore - if has { + + count, err := GetPublicRepositoryCount(&org) + if err != nil || count == 0 { + x.ID(orgStat.ID).Delete(new(OrgStatistic)) + } else if has { x.ID(orgStat.ID).Cols("num_score").Update(&orgStat) } else { x.Insert(orgStat) diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index ac39a8de7..cbe00b9d9 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -211,7 +211,7 @@ func setKeyContributerDict(contributorDistinctDict map[string]int, email string, } } -func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) { +func GetAllUserKPIStats(startTime time.Time, endTime time.Time) (map[string]*git.UserKPIStats, error) { authors := make(map[string]*git.UserKPIStats) repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name") if err != nil { @@ -219,7 +219,7 @@ func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) { } for _, repository := range repositorys { - authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath()) + authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath(), startTime, endTime) if err1 != nil { log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error()) continue diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 5d77e876b..333867fb2 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -407,15 +407,147 @@ func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusi return userBusinessAnalysisReturnList, allCount } +func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wikiCountMap map[string]int) ([]*UserBusinessAnalysis, int64) { + log.Info("start to count other user info data") + sess := x.NewSession() + defer sess.Close() + + currentTimeNow := time.Now() + log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05")) + + start_unix := opts.StartTime + + end_unix := opts.EndTime + CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location()) + DataDate := currentTimeNow.Format("2006-01-02 15:04") + + CodeMergeCountMap := queryPullRequest(start_unix, end_unix) + CommitCountMap := queryCommitAction(start_unix, end_unix, 5) + IssueCountMap := queryCreateIssue(start_unix, end_unix) + + CommentCountMap := queryComment(start_unix, end_unix) + FocusRepoCountMap := queryWatch(start_unix, end_unix) + StarRepoCountMap := queryStar(start_unix, end_unix) + WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) + + StartTime := time.Unix(start_unix, 0) + EndTime := time.Unix(end_unix, 0) + CommitCodeSizeMap, err := GetAllUserKPIStats(StartTime, EndTime) + if err != nil { + log.Info("query commit code errr.") + } else { + log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) + CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) + log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) + } + CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) + SolveIssueCountMap := querySolveIssue(start_unix, end_unix) + CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) + LoginCountMap := queryLoginCount(start_unix, end_unix) + OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) + CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) + AiModelManageMap := queryUserModel(start_unix, end_unix) + + CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) + RecommendDataset := queryRecommedDataSet(start_unix, end_unix) + CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) + RecommendImage := queryRecommedImage(start_unix, end_unix) + + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + + cond := "type != 1 and is_active=true" + count, err := sess.Where(cond).Count(new(User)) + + ParaWeight := getParaWeight() + ResultList := make([]*UserBusinessAnalysis, 0) + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + userList := make([]*User, 0) + sess.Find(&userList) + + for i, userRecord := range userList { + var dateRecord UserBusinessAnalysis + dateRecord.ID = userRecord.ID + log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) + dateRecord.CountDate = CountDate.Unix() + dateRecord.DataDate = DataDate + dateRecord.Email = userRecord.Email + dateRecord.RegistDate = userRecord.CreatedUnix + dateRecord.Name = userRecord.Name + dateRecord.UserLocation = userRecord.Location + dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime()) + + dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap) + dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap) + dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap) + dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap) + dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) + dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) + dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) + dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap) + if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { + dateRecord.CommitCodeSize = 0 + } else { + dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) + } + dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) + dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) + dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) + + dateRecord.EncyclopediasCount = getMapKeyStringValue(dateRecord.Name, wikiCountMap) + + dateRecord.CreateRepoCount = getMapValue(dateRecord.ID, CreateRepoCountMap) + + dateRecord.LoginCount = getMapValue(dateRecord.ID, LoginCountMap) + + if _, ok := OpenIIndexMap[dateRecord.ID]; !ok { + dateRecord.OpenIIndex = 0 + } else { + dateRecord.OpenIIndex = OpenIIndexMap[dateRecord.ID] + } + + dateRecord.CloudBrainTaskNum = getMapValue(dateRecord.ID, CloudBrainTaskMap) + dateRecord.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuDebugJob", CloudBrainTaskItemMap) + dateRecord.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuDebugJob", CloudBrainTaskItemMap) + dateRecord.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuTrainJob", CloudBrainTaskItemMap) + dateRecord.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) + dateRecord.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) + dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) + dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) + dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) + + dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset) + dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset) + dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset) + dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage) + dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) + dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) + + dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) + ResultList = append(ResultList, &dateRecord) + } + + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + log.Info("query user define,count=" + fmt.Sprint(len(ResultList))) + return ResultList, int64(len(ResultList)) +} + func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) { log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll)) statictisSess := xStatistic.NewSession() defer statictisSess.Close() - currentTimeNow := time.Now() - pageStartTime := getLastCountDate() - pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix() + //currentTimeNow := time.Now() + //pageStartTime := getLastCountDate() + //pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix() var cond = builder.NewCond() if len(opts.UserName) > 0 { @@ -424,10 +556,10 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus ) } cond = cond.And( - builder.Gte{"count_date": pageStartTime}, + builder.Gte{"count_date": opts.StartTime}, ) cond = cond.And( - builder.Lte{"count_date": pageEndTime}, + builder.Lte{"count_date": opts.EndTime}, ) count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) @@ -447,7 +579,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus } userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) - if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("id desc"). + if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("count_date,id desc"). Find(&userBusinessAnalysisList); err != nil { return nil, 0 } @@ -532,10 +664,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS log.Info("truncate all data from table: " + tableName) statictisSess.Exec("TRUNCATE TABLE " + tableName) - StartTimeNextDay := pageStartTime.AddDate(0, 0, 1) - EndTimeNextDay := pageEndTime.AddDate(0, 0, 1) - log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05")) - log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05")) + log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) + log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) start_unix := pageStartTime.Unix() end_unix := pageEndTime.Unix() @@ -551,7 +681,15 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS FocusRepoCountMap := queryWatch(start_unix, end_unix) StarRepoCountMap := queryStar(start_unix, end_unix) WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) - CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) + CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime) + if err != nil { + log.Info("query commit code errr.") + } else { + log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) + CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) + log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) + } + //CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) SolveIssueCountMap := querySolveIssue(start_unix, end_unix) CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) @@ -605,7 +743,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) - dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) + if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok { + dateRecordAll.CommitCodeSize = 0 + } else { + dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines) + } + //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap) dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap) dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap) @@ -626,6 +769,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) + dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) @@ -733,7 +877,12 @@ func RefreshUserStaticAllTabel(wikiCountMap map[string]int, userMetrics map[stri pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset) refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_week", pageStartTime, pageEndTime, userMetrics) + pageEndTime = pageStartTime + pageStartTime = pageStartTime.AddDate(0, 0, -7) + refreshUserStaticTable(wikiCountMap, "user_business_analysis_last_week", pageStartTime, pageEndTime, userMetrics) + pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30) + pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) refreshUserStaticTable(wikiCountMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime, userMetrics) pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -1) @@ -774,7 +923,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, StarRepoCountMap := queryStar(start_unix, end_unix) WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) - CommitCodeSizeMap, err := GetAllUserKPIStats() + CommitCodeSizeMap, err := GetAllUserKPIStats(startTime, endTime) if err != nil { log.Info("query commit code errr.") } else { @@ -825,7 +974,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.Name = userRecord.Name dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime()) dateRecord.DataDate = DataDate - + dateRecord.UserLocation = userRecord.Location dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap) dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap) dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap) @@ -878,15 +1027,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, log.Info("has activity." + userRecord.Name) addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID) } - if userRecord.IsActive { - continue - } - statictisSess.Delete(&dateRecord) - _, err = statictisSess.Insert(&dateRecord) - if err != nil { - log.Info("insert daterecord failed." + err.Error()) - return err - } + } indexTotal += PAGE_SIZE @@ -971,7 +1112,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en //HasActivityUser int `xorm:"NOT NULL DEFAULT 0"` //TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` //TotalHasActivityUser - regist_time := user.CreatedUnix.AsTime().Unix() + regist_time := int64(user.CreatedUnix) if regist_time >= start_time && regist_time <= end_time { if user.IsActive { userMetrics["ActivateRegistUser"] = getMapKeyStringValue("ActivateRegistUser", userMetrics) + 1 @@ -1638,7 +1779,7 @@ func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64 var indexTotal int64 indexTotal = 0 for { - sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + sess.Select("id,uid,image_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) imageStarList := make([]*ImageStar, 0) sess.Find(&imageStarList) log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) @@ -1906,7 +2047,7 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s var indexTotal int64 indexTotal = 0 for { - sess.Select("id,job_type,user_id,duration,train_job_duration,type").Table("cloudbrain").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + sess.Select("id,job_type,user_id,duration,train_job_duration,type").Table("cloudbrain").Unscoped().Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) cloudTaskList := make([]*Cloudbrain, 0) sess.Find(&cloudTaskList) log.Info("query cloudbrain size=" + fmt.Sprint(len(cloudTaskList))) diff --git a/models/user_business_struct.go b/models/user_business_struct.go index 70f806c78..fec361bca 100644 --- a/models/user_business_struct.go +++ b/models/user_business_struct.go @@ -394,6 +394,72 @@ type UserBusinessAnalysisYesterday struct { RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } +type UserBusinessAnalysisLastWeek struct { + ID int64 `xorm:"pk"` + CountDate int64 `xorm:"pk"` + //action :ActionMergePullRequest // 11 + CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"` + //action :ActionCommitRepo + CommitCount int `xorm:"NOT NULL DEFAULT 0"` + //issue // 10 + IssueCount int `xorm:"NOT NULL DEFAULT 0"` + //comment table current date + CommentCount int `xorm:"NOT NULL DEFAULT 0"` + //watch table current date + FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"` + //star table current date + StarRepoCount int `xorm:"NOT NULL DEFAULT 0"` + //follow table + WatchedCount int `xorm:"NOT NULL DEFAULT 0"` + // user table + GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"` + // + CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"` + //attachement table + CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"` + //0 + CommitModelCount int `xorm:"NOT NULL DEFAULT 0"` + //issue, issueassignees + SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"` + //baike + EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"` + //user + RegistDate timeutil.TimeStamp `xorm:"NOT NULL"` + //repo + CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"` + //login count, from elk + LoginCount int `xorm:"NOT NULL DEFAULT 0"` + //openi index + OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"` + //user + Email string `xorm:"NOT NULL"` + //user + Name string `xorm:"NOT NULL"` + DataDate string `xorm:"NULL"` + + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` +} + type UserAnalysisPara struct { Key string `xorm:"NOT NULL"` Value float64 `xorm:"NOT NULL DEFAULT 0"` diff --git a/modules/auth/wechat/access_token.go b/modules/auth/wechat/access_token.go index 0a63bc2de..f9516e3e1 100644 --- a/modules/auth/wechat/access_token.go +++ b/modules/auth/wechat/access_token.go @@ -9,7 +9,7 @@ import ( const EMPTY_REDIS_VAL = "Nil" -var accessTokenLock = redis_lock.NewDistributeLock() +var accessTokenLock = redis_lock.NewDistributeLock(redis_key.AccessTokenLockKey()) func GetWechatAccessToken() string { token, _ := redis_client.Get(redis_key.WechatAccessTokenKey()) @@ -28,15 +28,15 @@ func GetWechatAccessToken() string { } func refreshAccessToken() { - if ok := accessTokenLock.Lock(redis_key.AccessTokenLockKey(), 3*time.Second); ok { - defer accessTokenLock.UnLock(redis_key.AccessTokenLockKey()) + if ok := accessTokenLock.Lock(3 * time.Second); ok { + defer accessTokenLock.UnLock() callAccessTokenAndUpdateCache() } } func refreshAndGetAccessToken() string { - if ok := accessTokenLock.LockWithWait(redis_key.AccessTokenLockKey(), 3*time.Second, 3*time.Second); ok { - defer accessTokenLock.UnLock(redis_key.AccessTokenLockKey()) + if ok := accessTokenLock.LockWithWait(3*time.Second, 3*time.Second); ok { + defer accessTokenLock.UnLock() token, _ := redis_client.Get(redis_key.WechatAccessTokenKey()) if token != "" { if token == EMPTY_REDIS_VAL { diff --git a/modules/git/repo_stats_custom.go b/modules/git/repo_stats_custom.go index d70a17052..1a7b657d5 100644 --- a/modules/git/repo_stats_custom.go +++ b/modules/git/repo_stats_custom.go @@ -58,12 +58,11 @@ func SetDevelopAge(repoPath string, stats *RepoKPIStats, fromTime time.Time) err return nil } -//获取一天内的用户贡献指标 -func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { - timeUntil := time.Now() - oneDayAgo := timeUntil.AddDate(0, 0, -1) - since := oneDayAgo.Format(time.RFC3339) - args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} +func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (map[string]*UserKPIStats, error) { + + after := startTime.Format(time.RFC3339) + until := endTime.Format(time.RFC3339) + args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until=='%s'", until)} stdout, err := NewCommand(args...).RunInDirBytes(repoPath) if err != nil { return nil, err @@ -124,6 +123,14 @@ func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { } +//获取一天内的用户贡献指标 +func getUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { + timeUntil := time.Now() + oneDayAgo := timeUntil.AddDate(0, 0, -1) + + return GetUserKPIStats(repoPath, oneDayAgo, oneDayAgo) +} + func SetRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error { since := fromTime.Format(time.RFC3339) args := []string{"log", "--numstat", "--no-merges", "HEAD", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} diff --git a/modules/redis/redis_lock/lock.go b/modules/redis/redis_lock/lock.go index 0faed3237..b8cd837f1 100644 --- a/modules/redis/redis_lock/lock.go +++ b/modules/redis/redis_lock/lock.go @@ -6,22 +6,23 @@ import ( ) type DistributeLock struct { + lockKey string } -func NewDistributeLock() *DistributeLock { - return &DistributeLock{} +func NewDistributeLock(lockKey string) *DistributeLock { + return &DistributeLock{lockKey: lockKey} } -func (lock *DistributeLock) Lock(lockKey string, expireTime time.Duration) bool { - isOk, _ := redis_client.Setnx(lockKey, "", expireTime) +func (lock *DistributeLock) Lock(expireTime time.Duration) bool { + isOk, _ := redis_client.Setnx(lock.lockKey, "", expireTime) return isOk } -func (lock *DistributeLock) LockWithWait(lockKey string, expireTime time.Duration, waitTime time.Duration) bool { +func (lock *DistributeLock) LockWithWait(expireTime time.Duration, waitTime time.Duration) bool { start := time.Now().Unix() * 1000 duration := waitTime.Milliseconds() for { - isOk, _ := redis_client.Setnx(lockKey, "", expireTime) + isOk, _ := redis_client.Setnx(lock.lockKey, "", expireTime) if isOk { return true } @@ -34,7 +35,7 @@ func (lock *DistributeLock) LockWithWait(lockKey string, expireTime time.Duratio return false } -func (lock *DistributeLock) UnLock(lockKey string) error { - _, err := redis_client.Del(lockKey) +func (lock *DistributeLock) UnLock() error { + _, err := redis_client.Del(lock.lockKey) return err } diff --git a/modules/repofiles/update.go b/modules/repofiles/update.go index d7751d50e..07440301a 100644 --- a/modules/repofiles/update.go +++ b/modules/repofiles/update.go @@ -783,6 +783,7 @@ func RenameRepoFile(repo *models.Repository, doer *models.User, opts *RenameRepo // Check that the path given in opts.treePath is valid (not a git path) treePath := CleanUploadFileName(opts.TreePath) + treePath = strings.ReplaceAll(treePath, " ", "") if treePath == "" { return models.ErrFilenameInvalid{ Path: opts.TreePath, @@ -942,16 +943,16 @@ func moveAndAddFiles(oldTreePath, newTreePath string, t *TemporaryUploadReposito } //example for v(mode SHA-1 stage file) //100755 d294c88235ac05d3dece028d8a65590f28ec46ac 0 custom/conf/app.ini - v = strings.ReplaceAll(v, "0\t", "") - tmpArray := strings.Split(v, " ") - oldPath := tmpArray[2] + tempArray := strings.Split(v, "0\t") + leftArray := strings.Split(tempArray[0], " ") + oldPath := tempArray[1] newPath := newTreePath + strings.TrimPrefix(oldPath, oldTreePath) // mode 0 means remove file stdIn.WriteString("0 0000000000000000000000000000000000000000\t") stdIn.WriteString(oldPath) stdIn.WriteByte('\000') - stdIn.WriteString(tmpArray[0] + " ") - stdIn.WriteString(tmpArray[1] + "\t") + stdIn.WriteString(leftArray[0] + " ") + stdIn.WriteString(leftArray[1] + "\t") stdIn.WriteString(newPath) stdIn.WriteByte('\000') } diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index ef3ad7705..c52a369ce 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -519,15 +519,18 @@ static.RecommendDataset=Recommended Dataset Count static.CollectImage=Collect Image Count static.CollectedImage=Collected Image Count static.RecommendImage=Recommended Image Count +static.email=Email +static.location=Location static.all=All static.public.user_business_analysis_current_month=Current_Month static.public.user_business_analysis_current_week=Current_Week +static.public.user_business_analysis_last_week=Last_Week static.public.user_business_analysis_current_year=Current_Year static.public.user_business_analysis_last30_day=Last_30_day static.public.user_business_analysis_last_month=Last_Month static.public.user_business_analysis_yesterday=Yesterday static.public.user_business_analysis_all=All - +static.downloadinfo=Due to the large amount of data generated in the customized time period and long calculation time, please download and export the data from the following address and export the data download address: metrics.sheetname=User Trend Analysis metrics.date=Count Date metrics.newregistuser=New registered user @@ -536,6 +539,7 @@ metrics.hasactivateuser=New contributing activities metrics.newregistnotactiveuser=New inactive metrics.averageuser=Average new users metrics.newuseractiveindex=Activation rate of new users +metrics.currentdayactivity=Current day contributing activities metrics.totalregistuser=Cumulative registered users metrics.totalactiveduser=Cumulative activated users metrics.totalhasactivityuser=Cumulative active users @@ -813,6 +817,7 @@ settings.delete_notices_1= - This operation CANNOT be undone. settings.delete_notices_2= - This operation will permanently delete the %s dataset. settings.delete_notices_fork_1= - Forks of this dataset will become independent after deletion. settings.deletion_success= The dataset has been deleted. +settings.deletion_notice_cloudbrain = you need to stop the cloudbrain task under the project before remove the project! task.machine_translation= machine translation task.question_answering_system= question answering system task.information_retrieval= information retrieval @@ -2945,6 +2950,7 @@ raw_minutes = minutes [dropzone] default_message = Drop files or click here to upload. +default_dataset_message = Click to add files or directly drag and drop files here invalid_input_type = You can not upload files of this type. file_too_big = File size ({{filesize}} MB) exceeds the maximum size of ({{maxFilesize}} MB). remove_file = Remove file diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 2f0bbe91d..cb1c7565a 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -524,15 +524,18 @@ static.RecommendDataset=被推荐数据集数 static.CollectImage=收藏镜像数 static.CollectedImage=被收藏镜像数 static.RecommendImage=被推荐镜像数 +static.email=Email +static.location=所在地区 static.all=所有 static.public.user_business_analysis_current_month=本月 static.public.user_business_analysis_current_week=本周 +static.public.user_business_analysis_last_week=上周 static.public.user_business_analysis_current_year=今年 static.public.user_business_analysis_last30_day=近30天 static.public.user_business_analysis_last_month=上月 static.public.user_business_analysis_yesterday=昨天 static.public.user_business_analysis_all=所有 - +static.downloadinfo=因自定义时间段产生的数据量比较大,计算时间比较长,请您从如下地址下载导出数据,导出数据下载地址: metrics.sheetname=用户趋势分析 metrics.date=日期 metrics.newregistuser=新增注册用户 @@ -541,6 +544,7 @@ metrics.hasactivateuser=新增有贡献活动 metrics.newregistnotactiveuser=新增未激活 metrics.averageuser=平均新增用户 metrics.newuseractiveindex=新增用户激活率 +metrics.currentdayactivity=当日有贡献活动 metrics.totalregistuser=累计注册用户 metrics.totalactiveduser=累计已激活 metrics.totalhasactivityuser=累计有贡献活动 @@ -1927,6 +1931,7 @@ settings.delete_notices_1=- 此操作 不可以 被回滚。 settings.delete_notices_2=- 此操作将永久删除项目 %s,包括 Git 数据、 任务、评论、百科和协作者的操作权限。 settings.delete_notices_fork_1=- 在此项目删除后,它的派生项目将变成独立项目。 settings.deletion_success=项目已被删除。 +settings.deletion_notice_cloudbrain=请先停止项目内正在运行的云脑任务,然后再删除项目。 settings.update_settings_success=项目设置已更新。 settings.transfer_owner=新拥有者 settings.make_transfer=开始转移 @@ -2955,6 +2960,7 @@ raw_minutes=分钟 [dropzone] default_message=拖动文件或者点击此处上传。 +default_dataset_message=点击添加文件或直接拖拽文件到此处。 invalid_input_type=您不能上传该类型的文件 file_too_big=文件体积({{filesize}} MB)超过了最大允许体积({{maxFilesize}} MB) remove_file=移除文件 diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index d2c6e3633..9a05aa8ae 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -559,10 +559,12 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/query_metrics_all", operationReq, repo_ext.QueryUserMetricsAll) m.Get("/query_user_metrics_page", operationReq, repo_ext.QueryUserMetricDataPage) + m.Get("/download_user_define_file", operationReq, repo_ext.DownloadUserDefineFile) m.Get("/query_user_rank_list", operationReq, repo_ext.QueryRankingList) m.Get("/query_user_static_page", operationReq, repo_ext.QueryUserStaticDataPage) m.Get("/query_user_current_month", operationReq, repo_ext.QueryUserStaticCurrentMonth) m.Get("/query_user_current_week", operationReq, repo_ext.QueryUserStaticCurrentWeek) + m.Get("/query_user_last_week", operationReq, repo_ext.QueryUserStaticLastWeek) m.Get("/query_user_current_year", operationReq, repo_ext.QueryUserStaticCurrentYear) m.Get("/query_user_last30_day", operationReq, repo_ext.QueryUserStaticLast30Day) m.Get("/query_user_last_month", operationReq, repo_ext.QueryUserStaticLastMonth) diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index aa52a1400..dc2c417e4 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -902,16 +902,17 @@ func CompleteMultipart(ctx *context.Context) { if err != nil { log.Error("SendDecompressTask(%s) failed:%s", uuid, err.Error()) } else { - attachment.DecompressState = models.DecompressStateIng - err = models.UpdateAttachment(attachment) - if err != nil { - log.Error("UpdateAttachment state(%s) failed:%s", uuid, err.Error()) - } + updateAttachmentDecompressStateIng(attachment) } } if typeCloudBrain == models.TypeCloudBrainTwo { attachjson, _ := json.Marshal(attachment) - labelmsg.SendDecompressAttachToLabelOBS(string(attachjson)) + err = labelmsg.SendDecompressAttachToLabelOBS(string(attachjson)) + if err != nil { + log.Error("SendDecompressTask to labelsystem (%s) failed:%s", attachment.UUID, err.Error()) + } else { + updateAttachmentDecompressStateIng(attachment) + } } } else { var labelMap map[string]string diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 12d254812..a5dd52956 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -206,7 +206,7 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { ctx.Data["PageIsCloudBrain"] = true displayJobName := form.DisplayJobName jobName := util.ConvertDisplayJobNameToJobName(displayJobName) - image := form.Image + image := strings.TrimSpace(form.Image) uuid := form.Attachment jobType := form.JobType gpuQueue := form.GpuType @@ -283,30 +283,6 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { mkModelPath(modelPath) uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/") - benchmarkPath := setting.JobPath + jobName + cloudbrain.BenchMarkMountPath - if setting.IsBenchmarkEnabled && jobType == string(models.JobTypeBenchmark) { - var gpuType string - for _, gpuInfo := range gpuInfos.GpuInfo { - if gpuInfo.Queue == gpuQueue { - gpuType = gpuInfo.Value - } - } - downloadRateCode(repo, jobName, setting.BenchmarkOwner, setting.BenchmarkName, benchmarkPath, form.BenchmarkCategory, gpuType) - uploadCodeToMinio(benchmarkPath+"/", jobName, cloudbrain.BenchMarkMountPath+"/") - } - - snn4imagenetPath := setting.JobPath + jobName + cloudbrain.Snn4imagenetMountPath - if setting.IsSnn4imagenetEnabled && jobType == string(models.JobTypeSnn4imagenet) { - downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "") - uploadCodeToMinio(snn4imagenetPath+"/", jobName, cloudbrain.Snn4imagenetMountPath+"/") - } - - brainScorePath := setting.JobPath + jobName + cloudbrain.BrainScoreMountPath - if setting.IsBrainScoreEnabled && jobType == string(models.JobTypeBrainScore) { - downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "") - uploadCodeToMinio(brainScorePath+"/", jobName, cloudbrain.BrainScoreMountPath+"/") - } - err = cloudbrain.GenerateTask(ctx, displayJobName, jobName, image, command, uuid, storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"), storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), @@ -482,6 +458,17 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo ctx.Data["resource_type"] = resourceType.Value } } + } else if cloudbrain.IsBenchmarkJob(task.JobType) { + if benchmarkGpuInfos == nil { + json.Unmarshal([]byte(setting.BenchmarkGpuTypes), &benchmarkGpuInfos) + } + + for _, resourceType := range benchmarkGpuInfos.GpuInfo { + if resourceType.Queue == jobRes.Config.GpuType { + ctx.Data["resource_type"] = resourceType.Value + } + } + } else { if gpuInfos == nil { json.Unmarshal([]byte(setting.GpuTypes), &gpuInfos) @@ -1241,7 +1228,7 @@ func downloadCode(repo *models.Repository, codePath, branchName string) error { return nil } -func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepoName, codePath, benchmarkCategory, gpuType string) error { +func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepoName, codePath, benchmarkCategory, gpuType, userName string) error { err := os.MkdirAll(codePath, os.ModePerm) if err != nil { log.Error("mkdir codePath failed", err.Error()) @@ -1269,7 +1256,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo defer f.Close() data, err := json.Marshal(models.TaskInfo{ - Username: repo.Owner.Name, + Username: userName, TaskName: taskName, CodeName: repo.Name, BenchmarkCategory: strings.Split(benchmarkCategory, ","), @@ -1845,7 +1832,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo ctx.Data["PageIsCloudBrain"] = true displayJobName := form.DisplayJobName jobName := util.ConvertDisplayJobNameToJobName(displayJobName) - image := form.Image + image := strings.TrimSpace(form.Image) gpuQueue := form.GpuType command := cloudbrain.CommandBenchmark codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath @@ -1970,7 +1957,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo } } - if err := downloadRateCode(repo, jobName, childInfo.Owner, childInfo.RepoName, benchmarkPath, form.BenchmarkCategory, gpuType); err != nil { + if err := downloadRateCode(repo, jobName, childInfo.Owner, childInfo.RepoName, benchmarkPath, form.BenchmarkCategory, gpuType, ctx.User.Name); err != nil { log.Error("downloadRateCode failed, %v", err, ctx.Data["MsgID"]) //cloudBrainNewDataPrepare(ctx) //ctx.RenderWithErr("system error", tplCloudBrainBenchmarkNew, &form) @@ -2068,7 +2055,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) snn4imagenetPath := setting.JobPath + jobName + cloudbrain.Snn4imagenetMountPath if setting.IsSnn4imagenetEnabled && jobType == string(models.JobTypeSnn4imagenet) { - downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "") + downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "", ctx.User.Name) uploadCodeToMinio(snn4imagenetPath+"/", jobName, cloudbrain.Snn4imagenetMountPath+"/") command = fmt.Sprintf(cloudbrain.Snn4imagenetCommand, displayJobName, trimSpaceNewlineInString(form.Description)) @@ -2076,7 +2063,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) benchmarkChildTypeID := 0 brainScorePath := setting.JobPath + jobName + cloudbrain.BrainScoreMountPath if setting.IsBrainScoreEnabled && jobType == string(models.JobTypeBrainScore) { - downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "") + downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "", ctx.User.Name) uploadCodeToMinio(brainScorePath+"/", jobName, cloudbrain.BrainScoreMountPath+"/") benchmarkChildTypeID = form.BenchmarkChildTypeID command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, trimSpaceNewlineInString(form.Description)) @@ -2136,7 +2123,7 @@ func CloudBrainTrainJobNew(ctx *context.Context) { func getTrainJobCommand(form auth.CreateCloudBrainForm) (string, error) { var command string - bootFile := form.BootFile + bootFile := strings.TrimSpace(form.BootFile) params := form.Params if !strings.HasSuffix(bootFile, ".py") { diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go index 73036a2cc..0e57fe1a0 100755 --- a/routers/repo/dataset.go +++ b/routers/repo/dataset.go @@ -106,6 +106,8 @@ func DatasetIndex(ctx *context.Context) { MustEnableDataset(ctx) ctx.Data["PageIsDataset"] = true + ctx.Data["SortType"] = ctx.Query("sort") + repo := ctx.Repo.Repository dataset, err := models.GetDatasetByRepo(repo) @@ -128,9 +130,31 @@ func DatasetIndex(ctx *context.Context) { attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo) - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) + if ctx.Data["SortType"] == "nameAsc" { + sort.Slice(attachments, func(i, j int) bool { + return strings.ToLower(attachments[i].Name) < strings.ToLower(attachments[j].Name) + }) + } else if ctx.Data["SortType"] == "nameDesc" { + sort.Slice(attachments, func(i, j int) bool { + return strings.ToLower(attachments[i].Name) > strings.ToLower(attachments[j].Name) + }) + } else if ctx.Data["SortType"] == "sizeAsc" { + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].Size < attachments[j].Size + }) + } else if ctx.Data["SortType"] == "sizeDesc" { + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].Size > attachments[j].Size + }) + } else if ctx.Data["SortType"] == "timeAsc" { + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].CreatedUnix < attachments[j].CreatedUnix + }) + } else { + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].CreatedUnix > attachments[j].CreatedUnix + }) + } page := ctx.QueryInt("page") if page <= 0 { diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index bcc6f6156..95ca8df62 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -764,6 +764,7 @@ func trainJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModelArts ctx.Data["bootFile"] = form.BootFile ctx.Data["uuid"] = form.Attachment ctx.Data["branch_name"] = form.BranchName + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } @@ -954,6 +955,7 @@ func versionErrorDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrai return err } ctx.Data["config_list"] = configList.ParaConfigs + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } @@ -967,7 +969,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) description := form.Description workServerNumber := form.WorkServerNumber engineID := form.EngineID - bootFile := form.BootFile + bootFile := strings.TrimSpace(form.BootFile) flavorCode := form.Flavor params := form.Params poolID := form.PoolID @@ -1210,7 +1212,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ description := form.Description workServerNumber := form.WorkServerNumber engineID := form.EngineID - bootFile := form.BootFile + bootFile := strings.TrimSpace(form.BootFile) flavorCode := form.Flavor params := form.Params poolID := form.PoolID @@ -1284,7 +1286,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ var parameters models.Parameters param := make([]models.Parameter, 0) - existDeviceTarget := true + existDeviceTarget := false if len(params) != 0 { err := json.Unmarshal([]byte(params), ¶meters) if err != nil { @@ -1471,8 +1473,8 @@ func obsMkdir(dir string) error { } func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error { - if !strings.HasSuffix(form.BootFile, ".py") { - log.Error("the boot file(%s) must be a python file", form.BootFile) + if !strings.HasSuffix(strings.TrimSpace(form.BootFile), ".py") { + log.Error("the boot file(%s) must be a python file", strings.TrimSpace(form.BootFile)) return errors.New("启动文件必须是python文件") } @@ -1489,8 +1491,8 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error { } func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) error { - if !strings.HasSuffix(form.BootFile, ".py") { - log.Error("the boot file(%s) must be a python file", form.BootFile) + if !strings.HasSuffix(strings.TrimSpace(form.BootFile), ".py") { + log.Error("the boot file(%s) must be a python file", strings.TrimSpace(form.BootFile)) return errors.New("启动文件必须是python文件") } @@ -1803,7 +1805,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference description := form.Description workServerNumber := form.WorkServerNumber engineID := form.EngineID - bootFile := form.BootFile + bootFile := strings.TrimSpace(form.BootFile) flavorCode := form.Flavor params := form.Params poolID := form.PoolID @@ -2175,6 +2177,7 @@ func inferenceJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModel ctx.Data["model_version"] = form.ModelVersion ctx.Data["ckpt_name"] = form.CkptName ctx.Data["train_url"] = form.TrainUrl + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } diff --git a/routers/repo/setting.go b/routers/repo/setting.go index af28f3290..fed89513a 100644 --- a/routers/repo/setting.go +++ b/routers/repo/setting.go @@ -6,7 +6,6 @@ package repo import ( - "code.gitea.io/gitea/modules/notification" "errors" "fmt" "io/ioutil" @@ -15,6 +14,8 @@ import ( "strings" "time" + "code.gitea.io/gitea/modules/notification" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/base" @@ -477,16 +478,27 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) { ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil) return } - - if err := repo_service.DeleteRepository(ctx.User, ctx.Repo.Repository); err != nil { - ctx.ServerError("DeleteRepository", err) + count, err := models.GetCloudbrainRunCountByRepoID(repo.ID) + if err != nil { + ctx.ServerError("GetCloudbrainCountByRepoID failed", err) return - } - log.Trace("Repository deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name) - go StopJobsByRepoID(repo.ID) + } else { + if count >= 1 { + ctx.Data["Err_RepoName"] = nil + ctx.Flash.Error(ctx.Tr("repo.settings.deletion_notice_cloudbrain")) + ctx.Redirect(ctx.Repo.RepoLink + "/settings") + return + } + if err := repo_service.DeleteRepository(ctx.User, ctx.Repo.Repository); err != nil { + ctx.ServerError("DeleteRepository", err) + return + } + log.Trace("Repository deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name) + go StopJobsByRepoID(repo.ID) - ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success")) - ctx.Redirect(ctx.Repo.Owner.DashboardLink()) + ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success")) + ctx.Redirect(ctx.Repo.Owner.DashboardLink()) + } case "delete-wiki": if !ctx.Repo.IsOwner() { diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 1bebb9f3e..207727af1 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -4,6 +4,7 @@ import ( "fmt" "net/http" "net/url" + "os" "time" "code.gitea.io/gitea/models" @@ -16,7 +17,8 @@ import ( ) const ( - PAGE_SIZE = 2000 + PAGE_SIZE = 2000 + Excel_File_Path = "/useranalysis/" ) func getUserMetricsExcelHeader(ctx *context.Context) map[string]string { @@ -27,6 +29,7 @@ func getUserMetricsExcelHeader(ctx *context.Context) map[string]string { excelHeader = append(excelHeader, ctx.Tr("user.metrics.hasactivateuser")) excelHeader = append(excelHeader, ctx.Tr("user.metrics.newregistnotactiveuser")) excelHeader = append(excelHeader, ctx.Tr("user.metrics.newuseractiveindex")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.currentdayactivity")) excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalregistuser")) excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalactiveduser")) excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalhasactivityuser")) @@ -65,6 +68,10 @@ func writeUserMetricsExcel(row int, xlsx *excelize.File, sheetName string, userM } xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, value) tmp = tmp + 1 + + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.HasActivityUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalUser) tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalActivateRegistUser) @@ -104,6 +111,9 @@ func getExcelHeader(ctx *context.Context) map[string]string { excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage")) excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.email")) + excelHeader = append(excelHeader, ctx.Tr("user.static.location")) + excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) @@ -179,6 +189,13 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) tmp = tmp + 1 + + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email) + tmp = tmp + 1 + + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation) + tmp = tmp + 1 + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) tmp = tmp + 1 @@ -186,6 +203,82 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode formatTime = userRecord.DataDate xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) } + +func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysis) { + rows := fmt.Sprint(row) + var tmp byte + tmp = 0 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) + tmp = tmp + 1 + + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email) + tmp = tmp + 1 + + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation) + tmp = tmp + 1 + + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) + tmp = tmp + 1 + + formatTime = userRecord.DataDate + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) +} + func getColumn(tmp byte) string { var tmpA byte tmpA = 'A' @@ -330,6 +423,23 @@ func QueryRankingList(ctx *context.Context) { ctx.JSON(http.StatusOK, mapInterface) } +func DownloadUserDefineFile(ctx *context.Context) { + filename := ctx.Query("filename") + length := len(filename) + if filename[0:1] == "\"" { + filename = filename[1 : length-1] + } + allFilename := setting.AppDataPath + Excel_File_Path + filename + log.Info("allFilename=" + allFilename) + _, err := os.Stat(allFilename) + if err != nil { //文件不存在 + log.Info("file not exist.") + ctx.JSON(http.StatusOK, "File Not Exist.") + } else { + ctx.ServeFile(allFilename, url.QueryEscape(filename)) + } +} + func QueryUserMetricsCurrentMonth(ctx *context.Context) { currentTimeNow := time.Now() @@ -365,6 +475,10 @@ func QueryUserMetricsCurrentWeek(ctx *context.Context) { func QueryUserStaticCurrentWeek(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_current_week", new(models.UserBusinessAnalysisCurrentWeek)) } +func QueryUserStaticLastWeek(ctx *context.Context) { + queryUserDataPage(ctx, "public.user_business_analysis_last_week", new(models.UserBusinessAnalysisLastWeek)) +} + func QueryUserMetricsCurrentYear(ctx *context.Context) { currentTimeNow := time.Now() pageStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location()) @@ -450,14 +564,13 @@ func QueryUserStaticDataPage(ctx *context.Context) { endTime = time.Now() } else { startTime, _ = time.ParseInLocation("2006-01-02", startDate, time.Local) - startTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), 12, 0, 0, 0, startTime.Location()) + startTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), 0, 0, 0, 0, startTime.Location()) settingStartTime, _ := time.Parse("2006-01-02", setting.RadarMap.RecordBeginTime) if startTime.Unix() < settingStartTime.Unix() { startTime = settingStartTime startDate = settingStartTime.Format("2006-01-02") } endTime, _ = time.ParseInLocation("2006-01-02", endDate, time.Local) - endTime = endTime.AddDate(0, 0, 1) endTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), 23, 59, 59, 0, startTime.Location()) isAll = false @@ -481,36 +594,14 @@ func QueryUserStaticDataPage(ctx *context.Context) { } if IsReturnFile { - re, count := models.QueryUserStaticDataAll(pageOpts) - log.Info("return count=" + fmt.Sprint(count)) - //writer exec file. - xlsx := excelize.NewFile() + //re, count := models.QueryUserStaticDataAll(pageOpts) + wikiMap, _ := queryWikiCountMap(startTime, endTime) + re, count := models.QueryUserStaticDataForUserDefine(pageOpts, wikiMap) sheetName := ctx.Tr("user.static.sheetname") - index := xlsx.NewSheet(sheetName) - xlsx.DeleteSheet("Sheet1") - - dataHeader := getExcelHeader(ctx) - for k, v := range dataHeader { - //设置单元格的值 - xlsx.SetCellValue(sheetName, k, v) - } - - for i, userRecord := range re { - row := i + 2 - writeExcel(row, xlsx, sheetName, userRecord) - } - - //设置默认打开的表单 - xlsx.SetActiveSheet(index) - - filename := sheetName + "_" + ctx.Tr("user.static.all") + ".xlsx" - - ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) - ctx.Resp.Header().Set("Content-Type", "application/octet-stream") - if _, err := xlsx.WriteTo(ctx.Resp); err != nil { - log.Info("writer exel error." + err.Error()) - } - + filename := sheetName + "_" + startDate + "_" + endDate + ".xlsx" + os.Remove(setting.AppDataPath + Excel_File_Path + filename) + go writeFileToDisk(ctx, count, re, filename) + ctx.JSON(http.StatusOK, ctx.Tr("user.static.downloadinfo")+setting.AppURL+"api/v1/download_user_define_file?filename="+filename) } else { mapInterface := make(map[string]interface{}) re, count := models.QueryUserStaticDataPage(pageOpts) @@ -520,22 +611,47 @@ func QueryUserStaticDataPage(ctx *context.Context) { } } -func TimingCountDataByDateAndReCount(date string, isReCount bool) { +func writeFileToDisk(ctx *context.Context, count int64, re []*models.UserBusinessAnalysis, filename string) { + log.Info("return count=" + fmt.Sprint(count)) + //writer exec file. + xlsx := excelize.NewFile() + sheetName := ctx.Tr("user.static.sheetname") + index := xlsx.NewSheet(sheetName) + xlsx.DeleteSheet("Sheet1") + + dataHeader := getExcelHeader(ctx) + for k, v := range dataHeader { + //设置单元格的值 + xlsx.SetCellValue(sheetName, k, v) + } - t, _ := time.Parse("2006-01-02", date) - startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) + for i, userRecord := range re { + row := i + 2 + writeExcelPage(row, xlsx, sheetName, userRecord) + } - endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location()) + //设置默认打开的表单 + xlsx.SetActiveSheet(index) - //query wiki data - log.Info("start to time count data") + //ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) + //ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + filename = setting.AppDataPath + Excel_File_Path + filename + os.Mkdir(setting.AppDataPath+Excel_File_Path, 0755) + if err := xlsx.SaveAs(filename); err != nil { + log.Info("writer exel error." + err.Error()) + } else { + log.Info("write to file succeed, filepath=" + filename) + } +} + +func queryWikiCountMap(startTime time.Time, endTime time.Time) (map[string]int, error) { wikiMap := make(map[string]int) warnEmailMessage := "用户统计信息入库失败,请尽快定位。" repoList, err := models.GetAllRepositories() if err != nil { log.Error("query repo error." + err.Error()) mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) - return + return nil, err } log.Info("start to query wiki data") for _, repoRecord := range repoList { @@ -543,7 +659,7 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) { time, err := git.GetLatestCommitTime(wikiPath) if err == nil { log.Info("last commit time:" + time.Format("2006-01-02 15:04:05") + " wikiPath=" + wikiPath) - if time.After(startTime) { + if time.After(startTime) && time.Before(endTime) { wikiRepo, _, err := FindWikiRepoCommitByWikiPath(wikiPath) if err != nil { log.Error("wiki not exist. wikiPath=" + wikiPath) @@ -568,14 +684,29 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) { } } } + return wikiMap, nil +} + +func TimingCountDataByDateAndReCount(date string, isReCount bool) { + + t, _ := time.Parse("2006-01-02", date) + startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) + startTime = startTime.UTC() + endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location()) + endTime = endTime.UTC() + log.Info("startTime time:" + startTime.Format("2006-01-02 15:04:05")) + log.Info("endTime time:" + endTime.Format("2006-01-02 15:04:05")) + warnEmailMessage := "用户统计信息入库失败,请尽快定位。" + //query wiki data + log.Info("start to time count data") + wikiMap, err := queryWikiCountMap(startTime, endTime) //other user info data err = models.CounDataByDateAndReCount(wikiMap, startTime, endTime, isReCount) if err != nil { log.Error("count user info error." + err.Error()) mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) } - log.Info("start to count all user info data") - //models.RefreshUserStaticAllTabel(wikiMap) + log.Info("end to count all user info data") } diff --git a/templates/custom/select_dataset.tmpl b/templates/custom/select_dataset.tmpl index befd186c5..d545f487e 100644 --- a/templates/custom/select_dataset.tmpl +++ b/templates/custom/select_dataset.tmpl @@ -1,138 +1,171 @@ - -
+