diff --git a/custom/conf/app.ini.sample b/custom/conf/app.ini.sample index 9d6689493..8bc971f2c 100755 --- a/custom/conf/app.ini.sample +++ b/custom/conf/app.ini.sample @@ -1103,3 +1103,41 @@ PROJECT_NAME = cn-south-222_test USERNAME = test1 PASSWORD = Qizhi@test. DOMAIN = cn-south-222 + +[radar_map] +impact=0.3 +impact_watch=0.1 +impact_star=0.3 +impact_fork=0.3 +impact_code_download=0.2 +impact_comments=0.1 +impact_browser=0.1 + +completeness=0.1 +completeness_issues_closed=0.2 +completeness_releases=0.3 +completeness_develop_age=0.1 +completeness_dataset=0.1 +completeness_model=0.1 +completeness_wiki=0.1 + +liveness=0.3 +liveness_commit=0.2 +liveness_issue=0.2 +liveness_pr=0.2 +liveness_release=0.4 + +project_health=0.1 +project_health_issue_complete_ratio=100 + +team_health=0.1 +team_health_contributors=0.2 +team_health_key_contributors=0.6 +team_health_contributors_added=0.2 + +growth=0.1 +growth_code_lines=0.2 +growth_issue=0.2 +growth_contributors=0.2 +growth_commit=0.2 +growth_comments=0.2 diff --git a/models/attachment.go b/models/attachment.go index db486e8f3..75e937913 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -473,3 +473,7 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) { return total, nil } + +func GetAllAttachmentSize() (int64, error) { + return x.SumInt(&Attachment{}, "size") +} diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 22665b797..4b39dbd11 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -28,6 +28,7 @@ const ( JobTypeDebug JobType = "DEBUG" JobTypeBenchmark JobType = "BENCHMARK" JobTypeSnn4imagenet JobType = "SNN4IMAGENET" + JobTypeBrainScore JobType = "BRAINSCORE" ModelArtsCreateQueue ModelArtsJobStatus = "CREATE_QUEUING" //免费资源创建排队中 ModelArtsCreating ModelArtsJobStatus = "CREATING" //创建中 @@ -156,23 +157,42 @@ type TaskPod struct { TaskRoleStatus struct { Name string `json:"name"` } `json:"taskRoleStatus"` - TaskStatuses []struct { - TaskIndex int `json:"taskIndex"` - PodUID string `json:"podUid"` - PodIP string `json:"podIp"` - PodName string `json:"podName"` - ContainerID string `json:"containerId"` - ContainerIP string `json:"containerIp"` - ContainerGpus string `json:"containerGpus"` - State string `json:"state"` - StartAt time.Time `json:"startAt"` - FinishedAt time.Time `json:"finishedAt"` - ExitCode int `json:"exitCode"` - ExitDiagnostics string `json:"exitDiagnostics"` - RetriedCount int `json:"retriedCount"` - StartTime string - FinishedTime string - } `json:"taskStatuses"` + //TaskStatuses []struct { + // TaskIndex int `json:"taskIndex"` + // PodUID string `json:"podUid"` + // PodIP string `json:"podIp"` + // PodName string `json:"podName"` + // ContainerID string `json:"containerId"` + // ContainerIP string `json:"containerIp"` + // ContainerGpus string `json:"containerGpus"` + // State string `json:"state"` + // StartAt time.Time `json:"startAt"` + // FinishedAt time.Time `json:"finishedAt"` + // ExitCode int `json:"exitCode"` + // ExitDiagnostics string `json:"exitDiagnostics"` + // RetriedCount int `json:"retriedCount"` + // StartTime string + // FinishedTime string + //} `json:"taskStatuses"` + TaskStatuses []TaskStatuses `json:"taskStatuses"` +} + +type TaskStatuses struct { + TaskIndex int `json:"taskIndex"` + PodUID string `json:"podUid"` + PodIP string `json:"podIp"` + PodName string `json:"podName"` + ContainerID string `json:"containerId"` + ContainerIP string `json:"containerIp"` + ContainerGpus string `json:"containerGpus"` + State string `json:"state"` + StartAt time.Time `json:"startAt"` + FinishedAt time.Time `json:"finishedAt"` + ExitCode int `json:"exitCode"` + ExitDiagnostics string `json:"exitDiagnostics"` + RetriedCount int `json:"retriedCount"` + StartTime string + FinishedTime string } type TaskInfo struct { @@ -260,6 +280,11 @@ func ConvertToJobResultPayload(input map[string]interface{}) (JobResultPayload, err := json.Unmarshal(data, &jobResultPayload) jobResultPayload.JobStatus.StartTime = time.Unix(jobResultPayload.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05") jobResultPayload.JobStatus.EndTime = time.Unix(jobResultPayload.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05") + + if jobResultPayload.JobStatus.State == string(JobWaiting) { + jobResultPayload.JobStatus.StartTime = "-" + jobResultPayload.JobStatus.EndTime = "-" + } return jobResultPayload, err } @@ -949,7 +974,7 @@ func GetCloudbrainByName(jobName string) (*Cloudbrain, error) { } func CanDelJob(isSigned bool, user *User, job *CloudbrainInfo) bool { - if !isSigned || job.Status != string(JobStopped) { + if !isSigned || (job.Status != string(JobStopped) && job.Status != string(JobFailed) && job.Status != string(ModelArtsStartFailed) && job.Status != string(ModelArtsCreateFailed)){ return false } repo, err := GetRepositoryByID(job.RepoID) diff --git a/models/dataset.go b/models/dataset.go index e7160006d..402a548ef 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -139,7 +139,14 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) if opts.OwnerID > 0 { - cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID}) + if len(opts.Keyword) == 0 { + cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID}) + } else { + subCon := builder.NewCond() + subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Like{"dataset.title", opts.Keyword}) + cond = cond.Or(subCon) + + } } } else if opts.OwnerID > 0 { cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID}) diff --git a/models/models.go b/models/models.go index 412148235..0cdcee4fd 100755 --- a/models/models.go +++ b/models/models.go @@ -137,6 +137,7 @@ func init() { tablesStatistic = append(tablesStatistic, new(RepoStatistic), + new(SummaryStatistic), new(UserBusinessAnalysis), ) diff --git a/models/repo.go b/models/repo.go index 744e2744f..c8629875e 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1431,6 +1431,15 @@ func GetAllRepositoriesByFilterCols(columns ...string) ([]*Repository, error) { } +func GetAllRepositoriesCount() (int64, error) { + repo := new(Repository) + return x.Count(repo) +} + +func GetAllRepositoriesSize() (int64, error) { + return x.SumInt(&Repository{}, "size") +} + func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err error) { repo.LowerName = strings.ToLower(repo.Name) diff --git a/models/repo_statistic.go b/models/repo_statistic.go index b987f4f46..9863c331a 100755 --- a/models/repo_statistic.go +++ b/models/repo_statistic.go @@ -1,38 +1,62 @@ package models import ( - "code.gitea.io/gitea/modules/timeutil" "fmt" + "time" + + "code.gitea.io/gitea/modules/timeutil" ) // RepoStatistic statistic info of all repository type RepoStatistic struct { - ID int64 `xorm:"pk autoincr"` - RepoID int64 `xorm:"unique(s) NOT NULL"` - Date string `xorm:"unique(s) NOT NULL"` - NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` - NumStars int64 `xorm:"NOT NULL DEFAULT 0"` - NumForks int64 `xorm:"NOT NULL DEFAULT 0"` - NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"` - NumComments int64 `xorm:"NOT NULL DEFAULT 0"` - NumVisits int64 `xorm:"NOT NULL DEFAULT 0"` - NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"` - NumVersions int64 `xorm:"NOT NULL DEFAULT 0"` - //develop months - NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"` - RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` - DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` - NumModels int64 `xorm:"NOT NULL DEFAULT 0"` - NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"` - NumCommits int64 `xorm:"NOT NULL DEFAULT 0"` - NumIssues int64 `xorm:"NOT NULL DEFAULT 0"` - NumPulls int64 `xorm:"NOT NULL DEFAULT 0"` - IssueFixedRate float32 `xorm:"NOT NULL"` - NumContributor int64 `xorm:"NOT NULL DEFAULT 0"` - NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"` - - CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` - UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"unique(s) NOT NULL"` + Date string `xorm:"unique(s) NOT NULL"` + NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` + NumWatchesAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumStars int64 `xorm:"NOT NULL DEFAULT 0"` + NumStarsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumForks int64 `xorm:"NOT NULL DEFAULT 0"` + NumForksAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"` + NumDownloadsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumComments int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommentsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumVisits int64 `xorm:"NOT NULL DEFAULT 0"` + NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"` + NumClosedIssuesAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumVersions int64 `xorm:"NOT NULL DEFAULT 0"` + NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"` + RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` + DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` + NumModels int64 `xorm:"NOT NULL DEFAULT 0"` + NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommits int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommitsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssues int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssuesAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumPulls int64 `xorm:"NOT NULL DEFAULT 0"` + NumPullsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + IssueFixedRate float32 `xorm:"NOT NULL"` + NumContributor int64 `xorm:"NOT NULL DEFAULT 0"` + NumContributorAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"` + + NumContributorsGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommitsGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommitLinesGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssuesGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommentsGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + + Impact float64 `xorm:"NOT NULL DEFAULT 0"` + Completeness float64 `xorm:"NOT NULL DEFAULT 0"` + Liveness float64 `xorm:"NOT NULL DEFAULT 0"` + ProjectHealth float64 `xorm:"NOT NULL DEFAULT 0"` + TeamHealth float64 `xorm:"NOT NULL DEFAULT 0"` + Growth float64 `xorm:"NOT NULL DEFAULT 0"` + RadarTotal float64 `xorm:"NOT NULL DEFAULT 0"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` } func DeleteRepoStatDaily(date string) error { @@ -55,6 +79,35 @@ func DeleteRepoStatDaily(date string) error { return nil } +func GetRepoStatisticByDate(date string) ([]*RepoStatistic, error) { + repoStatistics := make([]*RepoStatistic, 0) + err := xStatistic.Where("date = ?", date).Find(&repoStatistics) + return repoStatistics, err + +} + +func GetOneRepoStatisticBeforeTime(time time.Time) (*RepoStatistic, error) { + repoStatistics := make([]*RepoStatistic, 0) + err := xStatistic.Where("created_unix >= ?", time.Unix()).OrderBy("created_unix").Limit(1).Find(&repoStatistics) + if err != nil { + return nil, err + } else { + if len(repoStatistics) == 0 { + return nil, fmt.Errorf("the repo statistic record count is 0") + } else { + return repoStatistics[0], nil + } + } + +} + func InsertRepoStat(repoStat *RepoStatistic) (int64, error) { return xStatistic.Insert(repoStat) } + +func UpdateRepoStat(repoStat *RepoStatistic) error { + sql := "update repo_statistic set impact=?,completeness=?,liveness=?,project_health=?,team_health=?,growth=?,radar_total=? where repo_id=? and date=?" + + _, err := xStatistic.Exec(sql, repoStat.Impact, repoStat.Completeness, repoStat.Liveness, repoStat.ProjectHealth, repoStat.TeamHealth, repoStat.Growth, repoStat.RadarTotal, repoStat.RepoID, repoStat.Date) + return err +} diff --git a/models/summary_statistic.go b/models/summary_statistic.go new file mode 100644 index 000000000..0addd472b --- /dev/null +++ b/models/summary_statistic.go @@ -0,0 +1,69 @@ +package models + +import ( + "fmt" + + "code.gitea.io/gitea/modules/timeutil" +) + +var DomainMap = map[string]int{ + "大模型": 0, + "ai开发工具": 1, + "计算机视觉": 2, + "自然语言处理": 3, + "机器学习": 4, + "神经网络": 5, + "自动驾驶": 6, + "机器人": 7, + "联邦学习": 8, + "数据挖掘": 9, + "risc-v开发": 10, +} + +type SummaryStatistic struct { + ID int64 `xorm:"pk autoincr"` + Date string `xorm:"unique(s) NOT NULL"` + NumUsers int64 `xorm:"NOT NULL DEFAULT 0"` + RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` + DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` + NumOrganizations int64 `xorm:"NOT NULL DEFAULT 0"` + NumModels int64 `xorm:"NOT NULL DEFAULT 0"` + NumRepos int64 `xorm:"NOT NULL DEFAULT 0"` + NumRepoBigModel int `xorm:"NOT NULL DEFAULT 0"` + NumRepoAI int `xorm:"NOT NULL DEFAULT 0"` + NumRepoVision int `xorm:"NOT NULL DEFAULT 0"` + NumRepoNLP int `xorm:"NOT NULL DEFAULT 0"` + NumRepoML int `xorm:"NOT NULL DEFAULT 0"` + NumRepoNN int `xorm:"NOT NULL DEFAULT 0"` + NumRepoAutoDrive int `xorm:"NOT NULL DEFAULT 0"` + NumRepoRobot int `xorm:"NOT NULL DEFAULT 0"` + NumRepoLeagueLearn int `xorm:"NOT NULL DEFAULT 0"` + NumRepoDataMining int `xorm:"NOT NULL DEFAULT 0"` + NumRepoRISC int `xorm:"NOT NULL DEFAULT 0"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` +} + +func DeleteSummaryStatisticDaily(date string) error { + sess := xStatistic.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return fmt.Errorf("Begin: %v", err) + } + + if _, err := sess.Where("date = ?", date).Delete(&SummaryStatistic{}); err != nil { + return fmt.Errorf("Delete: %v", err) + } + + if err := sess.Commit(); err != nil { + sess.Close() + return fmt.Errorf("Commit: %v", err) + } + + sess.Close() + return nil +} + +func InsertSummaryStatistic(summaryStatistic *SummaryStatistic) (int64, error) { + return xStatistic.Insert(summaryStatistic) +} diff --git a/models/topic.go b/models/topic.go index b8d3d9d85..5533da7bc 100644 --- a/models/topic.go +++ b/models/topic.go @@ -98,6 +98,13 @@ func GetTopicByName(name string) (*Topic, error) { return &topic, nil } +func GetAllUsedTopics() ([]*Topic, error) { + topics := make([]*Topic, 0) + err := x.Where("repo_count > ?", 0).Find(&topics) + return topics, err + +} + // addTopicByNameToRepo adds a topic name to a repo and increments the topic count. // Returns topic after the addition func addTopicByNameToRepo(e Engine, repoID int64, topicName string) (*Topic, error) { @@ -178,7 +185,7 @@ func (opts *FindTopicOptions) toConds() builder.Cond { } if opts.Keyword != "" { - cond = cond.And(builder.Like{"topic.name", opts.Keyword}) + cond = cond.And(builder.Like{"topic.name", strings.ToLower(opts.Keyword)}) } return cond diff --git a/models/user.go b/models/user.go index 78ab4627a..1ee20d74c 100755 --- a/models/user.go +++ b/models/user.go @@ -2071,6 +2071,18 @@ func SyncExternalUsers(ctx context.Context, updateExisting bool) error { return nil } +func GetUsersCount() (int64, error) { + user := new(User) + return x.Where("type=0").Count(user) + +} + +func GetOrganizationsCount() (int64, error) { + user := new(User) + return x.Where("type=1").Count(user) + +} + func GetBlockChainUnSuccessUsers() ([]*User, error) { users := make([]*User, 0, 10) err := x.Where("public_key = ''"). diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index a7d549cd4..8b7691677 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -71,6 +71,49 @@ type UserBusinessAnalysis struct { Name string `xorm:"NOT NULL"` } +func QueryUserStaticData(startTime int64, endTime int64) []*UserBusinessAnalysis { + log.Info("query startTime =" + fmt.Sprint(startTime) + " endTime=" + fmt.Sprint(endTime)) + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + + statictisSess.Select("*").Table("user_business_analysis").Where(" count_date>=" + fmt.Sprint(startTime) + " and count_date<=" + fmt.Sprint(endTime)).OrderBy("count_date desc") + + userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) + statictisSess.Find(&userBusinessAnalysisList) + + resultMap := make(map[int64]*UserBusinessAnalysis) + log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList))) + for _, userRecord := range userBusinessAnalysisList { + if _, ok := resultMap[userRecord.ID]; !ok { + resultMap[userRecord.ID] = userRecord + } else { + resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount + resultMap[userRecord.ID].CommitCount += userRecord.CommitCount + resultMap[userRecord.ID].IssueCount += userRecord.IssueCount + resultMap[userRecord.ID].CommentCount += userRecord.CommentCount + resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount + resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount + resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount + resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize + resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize + resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount + resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount + resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount + resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount + resultMap[userRecord.ID].LoginCount += userRecord.LoginCount + } + } + + userBusinessAnalysisReturnList := make([]*UserBusinessAnalysis, len(resultMap)) + index := 0 + for _, v := range resultMap { + userBusinessAnalysisReturnList[index] = v + index += 1 + } + log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList))) + return userBusinessAnalysisReturnList +} + func CountData(wikiCountMap map[string]int) { log.Info("start to count other user info data") sess := x.NewSession() @@ -92,7 +135,7 @@ func CountData(wikiCountMap map[string]int) { CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location()) - CodeMergeCountMap := queryAction(start_unix, end_unix, 11) + CodeMergeCountMap := queryPullRequest(start_unix, end_unix) CommitCountMap := queryAction(start_unix, end_unix, 5) IssueCountMap := queryAction(start_unix, end_unix, 10) @@ -223,6 +266,28 @@ func querySolveIssue(start_unix int64, end_unix int64) map[int64]int { } +func queryPullRequest(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + + sess.Select("issue.*").Table("issue"). + Join("inner", "pull_request", "issue.id=pull_request.issue_id"). + Where("pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix)) + + issueList := make([]*Issue, 0) + sess.Find(&issueList) + resultMap := make(map[int64]int) + log.Info("query issue(PR) size=" + fmt.Sprint(len(issueList))) + for _, issueRecord := range issueList { + if _, ok := resultMap[issueRecord.PosterID]; !ok { + resultMap[issueRecord.PosterID] = 1 + } else { + resultMap[issueRecord.PosterID] += 1 + } + } + return resultMap +} + func queryAction(start_unix int64, end_unix int64, actionType int64) map[int64]int { sess := x.NewSession() defer sess.Close() @@ -341,7 +406,7 @@ func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int { func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int { sess := x.NewSession() defer sess.Close() - sess.Select("id,owner_id,name").Table("repository").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + sess.Select("id,owner_id,name").Table("repository").Where("is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) repoList := make([]*Repository, 0) sess.Find(&repoList) resultMap := make(map[int64]int) @@ -354,7 +419,6 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int { } } return resultMap - } func subMonth(t1, t2 time.Time) (month int) { diff --git a/modules/base/tool.go b/modules/base/tool.go index 8145522e2..cf2972990 100644 --- a/modules/base/tool.go +++ b/modules/base/tool.go @@ -224,7 +224,7 @@ func SizedAvatarLinkWithDomain(email string, size int) string { // FileSize calculates the file size and generate user-friendly string. func FileSize(s int64) string { - return humanize.IBytes(uint64(s)) + return humanize.Bytes(uint64(s)) } // PrettyNumber produces a string form of the given number in base 10 with diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 749b86720..8f6bf4e17 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -16,6 +16,7 @@ const ( ModelMountPath = "/model" BenchMarkMountPath = "/benchmark" Snn4imagenetMountPath = "/snn4imagenet" + BrainScoreMountPath = "/brainscore" TaskInfoName = "/taskInfo" SubTaskName = "task1" @@ -27,7 +28,7 @@ var ( ResourceSpecs *models.ResourceSpecs ) -func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, jobType, gpuQueue string, resourceSpecId int) error { +func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, brainScorePath, jobType, gpuQueue string, resourceSpecId int) error { dataActualPath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.Attachment.Minio.BasePath + @@ -103,6 +104,13 @@ func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, ReadOnly: true, }, }, + { + HostPath: models.StHostPath{ + Path: brainScorePath, + MountPath: BrainScoreMountPath, + ReadOnly: true, + }, + }, }, }) if err != nil { diff --git a/modules/context/context.go b/modules/context/context.go old mode 100644 new mode 100755 index 71c8986fb..6877780e3 --- a/modules/context/context.go +++ b/modules/context/context.go @@ -310,9 +310,11 @@ func Contexter() macaron.Handler { ctx.Data["SignedUserID"] = ctx.User.ID ctx.Data["SignedUserName"] = ctx.User.Name ctx.Data["IsAdmin"] = ctx.User.IsAdmin + c.Data["SignedUserName"] = ctx.User.Name } else { ctx.Data["SignedUserID"] = int64(0) ctx.Data["SignedUserName"] = "" + c.Data["SignedUserName"] = "" } // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go index 26cd16778..ed9829cef 100755 --- a/modules/cron/tasks_basic.go +++ b/modules/cron/tasks_basic.go @@ -174,6 +174,16 @@ func registerHandleRepoStatistic() { }) } +func registerHandleSummaryStatistic() { + RegisterTaskFatal("handle_summary_statistic", &BaseConfig{ + Enabled: true, + RunAtStart: false, + Schedule: "@daily", + }, func(ctx context.Context, _ *models.User, _ Config) error { + repo.SummaryStatistic() + return nil + }) +} func registerHandleUserStatistic() { RegisterTaskFatal("handle_user_statistic", &BaseConfig{ Enabled: true, @@ -202,4 +212,5 @@ func initBasicTasks() { registerHandleRepoStatistic() registerHandleUserStatistic() + registerHandleSummaryStatistic() } diff --git a/modules/normalization/normalization.go b/modules/normalization/normalization.go new file mode 100644 index 000000000..f651cb5f1 --- /dev/null +++ b/modules/normalization/normalization.go @@ -0,0 +1,83 @@ +package normalization + +import ( + "code.gitea.io/gitea/modules/setting" +) + +func Normalization(value float64, minValue float64, maxValue float64) float64 { + + min := int64(minValue * 100) + max := int64(maxValue * 100) + + if min == max { + return 100.0 + } else { + return 100 * (value - minValue) / (maxValue - minValue) + } + +} + +func GetRadarValue(impactValue float64, completeValue float64, livenessValue float64, projectHealthValue float64, teamHealthValue float64, growthValue float64) float64 { + return setting.RadarMap.Impact*impactValue + + setting.RadarMap.Completeness*completeValue + + setting.RadarMap.Liveness*livenessValue + + setting.RadarMap.ProjectHealth*projectHealthValue + + setting.RadarMap.TeamHealth*teamHealthValue + + setting.RadarMap.Growth*growthValue + +} + +func GetImpactInitValue(watch int64, star int64, fork int64, download int64, comments int64, browser int64) float64 { + + return setting.RadarMap.ImpactWatch*float64(watch) + + setting.RadarMap.ImpactStar*float64(star) + + setting.RadarMap.ImpactFork*float64(fork) + + setting.RadarMap.ImpactCodeDownload*float64(download)*0.001 + + setting.RadarMap.ImpactComments*float64(comments) + + setting.RadarMap.ImpactBrowser*float64(browser) + +} + +func GetCompleteInitValue(issuesClosed int64, releases int64, developAge int64, dataset int64, model int64, wiki int64) float64 { + + return setting.RadarMap.CompletenessIssuesClosed*float64(issuesClosed) + + setting.RadarMap.CompletenessReleases*float64(releases) + + setting.RadarMap.CompletenessDevelopAge*float64(developAge) + + setting.RadarMap.CompletenessDataset*float64(dataset/(1024*1024)) + + setting.RadarMap.CompletenessModel*float64(model) + + setting.RadarMap.CompletenessWiki*float64(wiki) + +} + +func GetLivenessInitValue(commits int64, issues int64, pr int64, release int64) float64 { + + return setting.RadarMap.LivenessCommit*float64(commits) + + setting.RadarMap.LivenessIssue*float64(issues) + + setting.RadarMap.LivenessPR*float64(pr) + + setting.RadarMap.LivenessRelease*float64(release) + +} + +func GetProjectHealthInitValue(issueClosedRatio float32) float64 { + + return setting.RadarMap.ProjectHealthIssueCompleteRatio * float64(issueClosedRatio) + +} + +func GetTeamHealthInitValue(contributors int64, keyContributors int64, newContributors int64) float64 { + + return setting.RadarMap.TeamHealthContributors*float64(contributors) + + setting.RadarMap.TeamHealthKeyContributors*float64(keyContributors) + + setting.RadarMap.TeamHealthContributorsAdded*float64(newContributors) + +} + +func GetRepoGrowthInitValue(codelinesGrowth int64, issueGrowth int64, commitsGrowth int64, newContributors int64, commentsGrowth int64) float64 { + + return setting.RadarMap.GrowthCodeLines*float64(codelinesGrowth) + + setting.RadarMap.GrowthIssue*float64(issueGrowth) + + setting.RadarMap.GrowthCommit*float64(commitsGrowth) + + setting.RadarMap.GrowthContributors*float64(newContributors) + + setting.RadarMap.GrowthComments*float64(commentsGrowth) + +} diff --git a/modules/repository/elk_pagedata.go b/modules/repository/elk_pagedata.go index bb027726d..03a61be1f 100644 --- a/modules/repository/elk_pagedata.go +++ b/modules/repository/elk_pagedata.go @@ -4,10 +4,10 @@ import ( "bytes" "encoding/base64" "encoding/json" - "fmt" "io/ioutil" "net/http" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" ) @@ -99,8 +99,8 @@ type ResultInfo struct { //elk输出的json结构end -//发送post请求到elk -func SendReqToElk(jsonStr []byte) (content string) { +//处理返回的elk数据,只保留totalView,即访问量;loaded是分片载入次数,用来判断返回的数据是否准确 +func GetResultFromElk(resultInfo ResultInfo, jsonStr []byte) (loaded int, totalView int, err error) { ElkBase64Init := setting.ElkUser + ":" + setting.ElkPassword ElkBase64 := base64.StdEncoding.EncodeToString([]byte(ElkBase64Init)) BasicElkBase64 := "Basic" + " " + ElkBase64 @@ -117,15 +117,11 @@ func SendReqToElk(jsonStr []byte) (content string) { } defer resp.Body.Close() body, _ := ioutil.ReadAll(resp.Body) - return string(body) -} -//处理返回的elk数据,只保留totalView,即访问量;loaded是分片载入次数,用来判断返回的数据是否准确 -func GetResultFromElk(resultinfo ResultInfo, jobResult string) (loaded int, totalView int) { - var resultTest ResultInfo - errs := json.Unmarshal([]byte(jobResult), &resultTest) - fmt.Println(errs) - return resultTest.Result.Loaded, resultTest.Result.RawResponse.Hits.Total + errs := json.Unmarshal([]byte(string(body)), &resultInfo) + log.Info("Get resultJson failed", errs) + + return resultInfo.Result.Loaded, resultInfo.Result.RawResponse.Hits.Total, err } //初始化传给elk的数据结构,给定用户名和项目名,查询的起止时间,返回初始化后的结构 @@ -211,23 +207,23 @@ func TagNameInit(MessageInfo string, Tagname string, Gte string, Lte string) (pr } //向elk发送请求,将获取的结果只保留访问量,输入是初始化后的数据结构,返回访问量 -func ViewInfo(viewInfo InputInfo) (totalView int) { +func ViewInfo(viewInfo InputInfo) (totalView int, err error) { jsons, errs := json.Marshal(viewInfo) if errs != nil { - fmt.Println("errs:", errs.Error()) + log.Info("errs:", errs) } - // fmt.Println("viewInfoInit:",string(jsons)) var jsonStr = []byte(jsons) var resultInfo ResultInfo - loaded, totalView := GetResultFromElk(resultInfo, SendReqToElk(jsonStr)) + loaded, totalView, err := GetResultFromElk(resultInfo, jsonStr) + time := 0 for { if loaded == 0 { - loaded_next, totalView := GetResultFromElk(resultInfo, SendReqToElk(jsonStr)) + loaded_next, totalView, err := GetResultFromElk(resultInfo, jsonStr) time++ + log.Info("time:", time) if loaded_next != 0 && time < 100 { - fmt.Println("totalView:", totalView) - return totalView + return totalView, err } if time > 100 { break @@ -236,20 +232,20 @@ func ViewInfo(viewInfo InputInfo) (totalView int) { break } } - fmt.Println("loaded:", loaded) - return totalView + return totalView, err } -// @title ProjectView +// @title AppointProjectView // @description 获取指定用户和项目的访问量 // @param User string "用户名" // @param Project string "项目名" // @param Gte string "起始时间" 如time.Now().AddDate(0, 0, -1).Format(time.RFC3339) // @param Lte string "结束时间" 如time.Now().Format(time.RFC3339) // @return totalView int "访问量" -func AppointProjectView(User string, Project string, Gte string, Lte string) (totalView int) { - InitInfo := ProjectViewInit(User, Project, Gte, Lte) - return ViewInfo(InitInfo) +func AppointProjectView(User string, Project string, Gte string, Lte string) (totalView int, err error) { + ProjectViewInitInfo := ProjectViewInit(User, Project, Gte, Lte) + ProjectTotalView, err := ViewInfo(ProjectViewInitInfo) + return ProjectTotalView, err } //统计项目相关页面的访问量 @@ -287,26 +283,44 @@ type ProjectInfo struct { Project_forks int } +type ErrorInfo struct { + Project_dataset_type_0 error + Project_dataset_type_1 error + Project_issues error + Project_labels error + Project_milestones error + Project_pulls error + Project_release error + Project_wiki error + Project_activity error + Project_cloudbrain error + Project_modelarts error + Project_blockchain error + Project_watchers error + Project_stars error + Project_forks error +} + // @title AllProjectView // @description 获取指定用户和项目的访问量 // @param Gte string "起始时间" 如time.Now().AddDate(0, 0, -1).Format(time.RFC3339) // @param Lte string "结束时间" // @return projectInfo ProjectInfo "统计所有项目中页面的浏览情况,不需要区分项目" -func AllProjectView(Gte string, Lte string) (projectInfo ProjectInfo) { - projectInfo.Project_dataset_type_0 = ViewInfo(AllProjectViewInit("/datasets?type=0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_dataset_type_1 = ViewInfo(AllProjectViewInit("/datasets?type=1", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_issues = ViewInfo(AllProjectViewInit("/issues HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_labels = ViewInfo(TagNameInit("/labels HTTP/2.0", "labels", Gte, Lte)) - projectInfo.Project_milestones = ViewInfo(AllProjectViewInit("/milestones HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_pulls = ViewInfo(AllProjectViewInit("/pulls HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_release = ViewInfo(AllProjectViewInit("/release HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_wiki = ViewInfo(AllProjectViewInit("/wiki HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_activity = ViewInfo(AllProjectViewInit("/activity HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_cloudbrain = ViewInfo(AllProjectViewInit("/cloudbrain HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_modelarts = ViewInfo(AllProjectViewInit("/modelarts HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_blockchain = ViewInfo(AllProjectViewInit("/blockchain HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_watchers = ViewInfo(AllProjectViewInit("/watchers HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_stars = ViewInfo(AllProjectViewInit("/stars HTTP/2.0", "%{[request][2]}", Gte, Lte)) - projectInfo.Project_forks = ViewInfo(AllProjectViewInit("/forks HTTP/2.0", "%{[request][2]}", Gte, Lte)) - return projectInfo +func AllProjectView(Gte string, Lte string) (projectViewInfo ProjectInfo, errorInfo ErrorInfo) { + projectViewInfo.Project_dataset_type_0, errorInfo.Project_dataset_type_0 = ViewInfo(AllProjectViewInit("/datasets?type=0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_dataset_type_1, errorInfo.Project_dataset_type_1 = ViewInfo(AllProjectViewInit("/datasets?type=1", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_issues, errorInfo.Project_issues = ViewInfo(AllProjectViewInit("/issues HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_labels, errorInfo.Project_labels = ViewInfo(TagNameInit("/labels HTTP/2.0", "labels", Gte, Lte)) + projectViewInfo.Project_milestones, errorInfo.Project_milestones = ViewInfo(AllProjectViewInit("/milestones HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_pulls, errorInfo.Project_pulls = ViewInfo(AllProjectViewInit("/pulls HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_release, errorInfo.Project_release = ViewInfo(AllProjectViewInit("/release HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_wiki, errorInfo.Project_wiki = ViewInfo(AllProjectViewInit("/wiki HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_activity, errorInfo.Project_activity = ViewInfo(AllProjectViewInit("/activity HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_cloudbrain, errorInfo.Project_cloudbrain = ViewInfo(AllProjectViewInit("/cloudbrain HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_modelarts, errorInfo.Project_modelarts = ViewInfo(AllProjectViewInit("/modelarts HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_blockchain, errorInfo.Project_blockchain = ViewInfo(AllProjectViewInit("/blockchain HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_watchers, errorInfo.Project_watchers = ViewInfo(AllProjectViewInit("/watchers HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_stars, errorInfo.Project_stars = ViewInfo(AllProjectViewInit("/stars HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectViewInfo.Project_forks, errorInfo.Project_forks = ViewInfo(AllProjectViewInit("/forks HTTP/2.0", "%{[request][2]}", Gte, Lte)) + return projectViewInfo, errorInfo } diff --git a/modules/setting/radarmap.go b/modules/setting/radarmap.go new file mode 100644 index 000000000..26624d143 --- /dev/null +++ b/modules/setting/radarmap.go @@ -0,0 +1,7 @@ +package setting + +func UpdateRadarMap() { + Cfg.DeleteSection("radar_map") + Cfg.Reload() + SetRadarMapConfig() +} diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 5f0000667..2e512eae0 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -457,6 +457,11 @@ var ( Snn4imagenetCode string Snn4imagenetServerHost string + //snn4imagenet config + IsBrainScoreEnabled bool + BrainScoreCode string + BrainScoreServerHost string + //blockchain config BlockChainHost string CommitValidDate string @@ -499,6 +504,44 @@ var ( //nginx proxy PROXYURL string + RadarMap = struct { + Impact float64 + ImpactWatch float64 + ImpactStar float64 + ImpactFork float64 + ImpactCodeDownload float64 + ImpactComments float64 + ImpactBrowser float64 + + Completeness float64 + CompletenessIssuesClosed float64 + CompletenessReleases float64 + CompletenessDevelopAge float64 + CompletenessDataset float64 + CompletenessModel float64 + CompletenessWiki float64 + + Liveness float64 + LivenessCommit float64 + LivenessIssue float64 + LivenessPR float64 + LivenessRelease float64 + + ProjectHealth float64 + ProjectHealthIssueCompleteRatio float64 + + TeamHealth float64 + TeamHealthContributors float64 + TeamHealthKeyContributors float64 + TeamHealthContributorsAdded float64 + + Growth float64 + GrowthCodeLines float64 + GrowthIssue float64 + GrowthContributors float64 + GrowthCommit float64 + GrowthComments float64 + }{} ) // DateLang transforms standard language locale name to corresponding value in datetime plugin. @@ -1190,8 +1233,13 @@ func NewContext() { sec = Cfg.Section("snn4imagenet") IsSnn4imagenetEnabled = sec.Key("ENABLED").MustBool(false) - Snn4imagenetCode = sec.Key("SNN4IMAGENETCODE").MustString("https://yangzhx:justfortest123@git.openi.org.cn/yangzhx/detection_benchmark_script.git") - Snn4imagenetServerHost = sec.Key("HOST").MustString("http://192.168.202.90:3366/") + Snn4imagenetCode = sec.Key("SNN4IMAGENETCODE").MustString("https://yult:19910821ylt@git.openi.org.cn/yult/snn4imagenet_script.git") + Snn4imagenetServerHost = sec.Key("HOST").MustString("http://192.168.207.76:8080/") + + sec = Cfg.Section("brainscore") + IsBrainScoreEnabled = sec.Key("ENABLED").MustBool(false) + BrainScoreCode = sec.Key("BRAINSCORECODE").MustString("https://yult:19910821ylt@git.openi.org.cn/yult/brainscore_script.git") + BrainScoreServerHost = sec.Key("HOST").MustString("http://192.168.207.76:8080/") sec = Cfg.Section("blockchain") BlockChainHost = sec.Key("HOST").MustString("http://192.168.136.66:3302/") @@ -1233,6 +1281,44 @@ func NewContext() { TimeField = sec.Key("TIMEFIELD").MustString(" @timestamptest") ElkTimeFormat = sec.Key("ELKTIMEFORMAT").MustString("date_time") + SetRadarMapConfig() +} + +func SetRadarMapConfig() { + sec := Cfg.Section("radar_map") + + RadarMap.Impact = sec.Key("impact").MustFloat64(0.3) + RadarMap.ImpactWatch = sec.Key("impact_watch").MustFloat64(0.1) + RadarMap.ImpactStar = sec.Key("impact_star").MustFloat64(0.3) + RadarMap.ImpactFork = sec.Key("impact_fork").MustFloat64(0.3) + RadarMap.ImpactCodeDownload = sec.Key("impact_code_download").MustFloat64(0.2) + RadarMap.ImpactComments = sec.Key("impact_comments").MustFloat64(0.1) + RadarMap.ImpactBrowser = sec.Key("impact_browser").MustFloat64(0.1) + RadarMap.Completeness = sec.Key("completeness").MustFloat64(0.1) + RadarMap.CompletenessIssuesClosed = sec.Key("completeness_issues_closed").MustFloat64(0.2) + RadarMap.CompletenessReleases = sec.Key("completeness_releases").MustFloat64(0.3) + RadarMap.CompletenessDevelopAge = sec.Key("completeness_develop_age").MustFloat64(0.1) + RadarMap.CompletenessDataset = sec.Key("completeness_dataset").MustFloat64(0.1) + RadarMap.CompletenessModel = sec.Key("completeness_model").MustFloat64(0.1) + RadarMap.CompletenessWiki = sec.Key("completeness_wiki").MustFloat64(0.1) + RadarMap.Liveness = sec.Key("liveness").MustFloat64(0.3) + RadarMap.LivenessCommit = sec.Key("liveness_commit").MustFloat64(0.2) + RadarMap.LivenessIssue = sec.Key("liveness_issue").MustFloat64(0.2) + RadarMap.LivenessPR = sec.Key("liveness_pr").MustFloat64(0.2) + RadarMap.LivenessRelease = sec.Key("liveness_release").MustFloat64(0.4) + RadarMap.ProjectHealth = sec.Key("project_health").MustFloat64(0.1) + RadarMap.ProjectHealthIssueCompleteRatio = sec.Key("project_health_issue_complete_ratio").MustFloat64(100) + RadarMap.TeamHealth = sec.Key("team_health").MustFloat64(0.1) + RadarMap.TeamHealthContributors = sec.Key("team_health_contributors").MustFloat64(0.2) + RadarMap.TeamHealthKeyContributors = sec.Key("team_health_key_contributors").MustFloat64(0.6) + RadarMap.TeamHealthContributorsAdded = sec.Key("team_health_contributors_added").MustFloat64(0.2) + RadarMap.Growth = sec.Key("growth").MustFloat64(0.1) + RadarMap.GrowthCodeLines = sec.Key("growth_code_lines").MustFloat64(0.2) + RadarMap.GrowthIssue = sec.Key("growth_issue").MustFloat64(0.2) + RadarMap.GrowthContributors = sec.Key("growth_contributors").MustFloat64(0.2) + RadarMap.GrowthCommit = sec.Key("growth_commit").MustFloat64(0.2) + RadarMap.GrowthComments = sec.Key("growth_comments").MustFloat64(0.2) + } func loadInternalToken(sec *ini.Section) string { diff --git a/modules/storage/obs.go b/modules/storage/obs.go index 813e9d017..d5817fd3f 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -123,7 +123,7 @@ func ObsMultiPartUpload(uuid string, uploadId string, partNumber int, fileName s log.Info(obsError.Message) return obsError } else { - log.Error("error:", err) + log.Error("error:", err.Error()) return err } } diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 62b502866..8ac895ec9 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -777,6 +777,7 @@ cloudbrain_task=任务名称 cloudbrain_operate=操作 cloudbrain_status_createtime=状态/创建时间 cloudbrain_status_runtime = 运行时长 +cloudbrain_jobname_err=只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。 modelarts.notebook=调试作业 modelarts.train_job=训练作业 @@ -798,6 +799,7 @@ modelarts.train_job.algorithm_origin=算法来源 modelarts.train_job.AI_driver=AI引擎 modelarts.train_job.start_file=启动文件 modelarts.train_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。 +modelarts.train_job.boot_file_place=填写启动文件路径,默认为train.py modelarts.train_job.dataset=数据集 modelarts.train_job.run_parameter=运行参数 modelarts.train_job.add_run_parameter=增加运行参数 diff --git a/public/img/org-jd@2x-80.jpg b/public/img/org-jd@2x-80.jpg new file mode 100644 index 000000000..4c99c8acc Binary files /dev/null and b/public/img/org-jd@2x-80.jpg differ diff --git a/public/self/labelTaskPage.js b/public/self/labelTaskPage.js index a160c3961..03acd6df1 100644 --- a/public/self/labelTaskPage.js +++ b/public/self/labelTaskPage.js @@ -309,11 +309,11 @@ function label_task_create(task_name, relate_task_id, taskType,assign_user_id,la success:function(res){ console.log(res); if(res.code == 0){ - alert("自动标注任务创建成功!"); + alert("标注任务创建成功!"); createsucced = true; } else{ - alert("创建自动标注任务失败," + res.message); + alert("创建标注任务失败," + res.message); createsucced = false; } }, diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go index f4364f1e4..bfba5236b 100755 --- a/routers/api/v1/repo/cloudbrain.go +++ b/routers/api/v1/repo/cloudbrain.go @@ -65,12 +65,15 @@ func GetCloudbrainTask(ctx *context.APIContext) { return } - taskRoles := result.TaskRoles - taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) + job.Status = result.JobStatus.State + if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) { + taskRoles := result.TaskRoles + taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) - job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP - job.ContainerID = taskRes.TaskStatuses[0].ContainerID - job.Status = taskRes.TaskStatuses[0].State + job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP + job.ContainerID = taskRes.TaskStatuses[0].ContainerID + job.Status = taskRes.TaskStatuses[0].State + } if result.JobStatus.State != string(models.JobWaiting) { err = models.UpdateJob(job) diff --git a/routers/home.go b/routers/home.go index 70fea437c..71ba5b6c5 100755 --- a/routers/home.go +++ b/routers/home.go @@ -281,10 +281,10 @@ func ExploreDatasets(ctx *context.Context) { } pager := context.NewPagination(int(count), opts.PageSize, page, 5) + ctx.Data["Keyword"] = opts.Keyword pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager - ctx.Data["Keyword"] = opts.Keyword ctx.Data["Datasets"] = datasets ctx.Data["Total"] = count ctx.Data["PageIsDatasets"] = true diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index 5b946f832..3c54f982e 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -286,7 +286,6 @@ func GetAttachment(ctx *context.Context) { return } defer fr.Close() - log.Info("go here to download.") if err = increaseDownloadCount(attach, dataSet); err != nil { ctx.ServerError("Update", err) return @@ -673,6 +672,7 @@ func PutOBSProxyUpload(ctx *context.Context) { fileName := ctx.Query("file_name") RequestBody := ctx.Req.Body() + if RequestBody == nil { ctx.Error(500, fmt.Sprintf("FormFile: %v", RequestBody)) return diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 803f2a4ec..33e8b807f 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -40,6 +40,8 @@ var ( categories *models.Categories ) +var jobNamePattern = regexp.MustCompile(`^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$`) + // MustEnableDataset check if repository enable internal cb func MustEnableCloudbrain(ctx *context.Context) { if !ctx.Repo.CanRead(models.UnitTypeCloudBrain) { @@ -175,6 +177,9 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { ctx.Data["snn4imagenet_path"] = cloudbrain.Snn4imagenetMountPath ctx.Data["is_snn4imagenet_enabled"] = setting.IsSnn4imagenetEnabled + ctx.Data["brainscore_path"] = cloudbrain.BrainScoreMountPath + ctx.Data["is_brainscore_enabled"] = setting.IsBrainScoreEnabled + return nil } @@ -197,8 +202,13 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { gpuQueue := setting.JobType codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath resourceSpecId := form.ResourceSpecId + + if !jobNamePattern.MatchString(jobName) { + ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplModelArtsNew, &form) + return + } - if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet) { + if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet) && jobType != string(models.JobTypeBrainScore) { log.Error("jobtype error:", jobType, ctx.Data["MsgID"]) cloudBrainNewDataPrepare(ctx) ctx.RenderWithErr("jobtype error", tplCloudBrainNew, &form) @@ -247,7 +257,12 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { downloadRateCode(repo, jobName, setting.Snn4imagenetCode, snn4imagenetPath, "", "") } - err = cloudbrain.GenerateTask(ctx, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, jobType, gpuQueue, resourceSpecId) + brainScorePath := setting.JobPath + jobName + cloudbrain.BrainScoreMountPath + if setting.IsBrainScoreEnabled && jobType == string(models.JobTypeBrainScore) { + downloadRateCode(repo, jobName, setting.BrainScoreCode, brainScorePath, "", "") + } + + err = cloudbrain.GenerateTask(ctx, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, brainScorePath, jobType, gpuQueue, resourceSpecId) if err != nil { cloudBrainNewDataPrepare(ctx) ctx.RenderWithErr(err.Error(), tplCloudBrainNew, &form) @@ -273,17 +288,30 @@ func CloudBrainShow(ctx *context.Context) { if result != nil { jobRes, _ := models.ConvertToJobResultPayload(result.Payload) jobRes.Resource.Memory = strings.ReplaceAll(jobRes.Resource.Memory, "Mi", "MB") - ctx.Data["result"] = jobRes taskRoles := jobRes.TaskRoles - taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) - ctx.Data["taskRes"] = taskRes - task.Status = taskRes.TaskStatuses[0].State - task.ContainerID = taskRes.TaskStatuses[0].ContainerID - task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP - err = models.UpdateJob(task) - if err != nil { - ctx.Data["error"] = err.Error() + if jobRes.JobStatus.State != string(models.JobFailed) { + taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) + ctx.Data["taskRes"] = taskRes + task.Status = taskRes.TaskStatuses[0].State + task.ContainerID = taskRes.TaskStatuses[0].ContainerID + task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP + err = models.UpdateJob(task) + if err != nil { + ctx.Data["error"] = err.Error() + } + } else { + task.Status = jobRes.JobStatus.State + taskRes := models.TaskPod{TaskStatuses: []models.TaskStatuses{ + { + State: jobRes.JobStatus.State, + }, + }} + ctx.Data["taskRes"] = taskRes + jobRes.JobStatus.StartTime = time.Unix(int64(task.CreatedUnix), 0).Format("2006-01-02 15:04:05") + jobRes.JobStatus.EndTime = time.Unix(int64(task.UpdatedUnix), 0).Format("2006-01-02 15:04:05") } + + ctx.Data["result"] = jobRes } ctx.Data["task"] = task @@ -343,7 +371,7 @@ func CloudBrainStop(ctx *context.Context) { return } - if task.Status == string(models.JobStopped) { + if task.Status == string(models.JobStopped) || task.Status == string(models.JobFailed) { log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"]) ctx.ServerError("the job has been stopped", errors.New("the job has been stopped")) return @@ -446,7 +474,7 @@ func CloudBrainDel(ctx *context.Context) { return } - if task.Status != string(models.JobStopped) { + if task.Status != string(models.JobStopped) && task.Status != string(models.JobFailed){ log.Error("the job(%s) has not been stopped", task.JobName, ctx.Data["msgID"]) ctx.ServerError("the job has not been stopped", errors.New("the job has not been stopped")) return @@ -583,6 +611,8 @@ func GetRate(ctx *context.Context) { ctx.Redirect(setting.BenchmarkServerHost + "?username=" + ctx.User.Name) } else if job.JobType == string(models.JobTypeSnn4imagenet) { ctx.Redirect(setting.Snn4imagenetServerHost) + } else if job.JobType == string(models.JobTypeBrainScore) { + ctx.Redirect(setting.BrainScoreServerHost) } else { log.Error("JobType error:%s", job.JobType, ctx.Data["msgID"]) } diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index a15f5e0cb..117210621 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -119,7 +119,10 @@ func ModelArtsCreate(ctx *context.Context, form auth.CreateModelArtsForm) { uuid := form.Attachment description := form.Description //repo := ctx.Repo.Repository - + if !jobNamePattern.MatchString(jobName) { + ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplModelArtsNew, &form) + return + } err := modelarts.GenerateTask(ctx, jobName, uuid, description) if err != nil { ctx.RenderWithErr(err.Error(), tplModelArtsNew, &form) @@ -247,7 +250,7 @@ func ModelArtsDel(ctx *context.Context) { return } - if task.Status != string(models.JobStopped) { + if task.Status != string(models.ModelArtsCreateFailed) && task.Status != string(models.ModelArtsStartFailed) && task.Status != string(models.ModelArtsStopped){ log.Error("the job(%s) has not been stopped", task.JobName) ctx.ServerError("the job has not been stopped", errors.New("the job has not been stopped")) return diff --git a/routers/repo/repo_statistic.go b/routers/repo/repo_statistic.go index ceb410958..4d23e79f2 100755 --- a/routers/repo/repo_statistic.go +++ b/routers/repo/repo_statistic.go @@ -3,19 +3,27 @@ package repo import ( "time" + "code.gitea.io/gitea/modules/setting" + + "code.gitea.io/gitea/modules/normalization" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/repository" ) //auto daily or manually func RepoStatisticAuto() { log.Info("", time.Now()) yesterday := time.Now().AddDate(0, 0, -1).Format("2006-01-02") + setting.UpdateRadarMap() RepoStatisticDaily(yesterday) } func RepoStatisticDaily(date string) { log.Info("%s", date) + log.Info("begin Repo Statistic") + t, _ := time.Parse("2006-01-02", "date") if err := models.DeleteRepoStatDaily(date); err != nil { log.Error("DeleteRepoStatDaily failed: %v", err.Error()) return @@ -27,13 +35,25 @@ func RepoStatisticDaily(date string) { return } - for _, repo := range repos { + var reposRadar = make([]*models.RepoStatistic, 0) + + var minRepoRadar models.RepoStatistic + var maxRepoRadar models.RepoStatistic + + for i, repo := range repos { log.Info("start statistic: %s", repo.Name) + var numDevMonths, numWikiViews, numContributor, numKeyContributor, numCommitsGrowth, numCommitLinesGrowth, numContributorsGrowth int64 repoGitStat, err := models.GetRepoKPIStats(repo) if err != nil { log.Error("GetRepoKPIStats failed: %s", repo.Name) - log.Error("failed statistic: %s", repo.Name) - continue + } else { + numDevMonths = repoGitStat.DevelopAge + numKeyContributor = repoGitStat.KeyContributors + numWikiViews = repoGitStat.WikiPages + numContributor = repoGitStat.Contributors + numCommitsGrowth = repoGitStat.CommitsAdded + numCommitLinesGrowth = repoGitStat.CommitLinesModified + numContributorsGrowth = repoGitStat.ContributorsAdded } var issueFixedRate float32 @@ -41,63 +61,178 @@ func RepoStatisticDaily(date string) { issueFixedRate = float32(repo.NumClosedIssues) / float32(repo.NumIssues) } - numVersions, err := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{}) + var numVersions int64 + numVersions, err = models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{}) if err != nil { - log.Error("GetReleaseCountByRepoID failed: %s", repo.Name) - log.Error("failed statistic: %s", repo.Name) - continue + log.Error("GetReleaseCountByRepoID failed(%s): %v", repo.Name, err) } - datasetSize, err := getDatasetSize(repo) + var datasetSize int64 + datasetSize, err = getDatasetSize(repo) if err != nil { - log.Error("getDatasetSize failed: %s", repo.Name) - log.Error("failed statistic: %s", repo.Name) - continue + log.Error("getDatasetSize failed(%s): %v", repo.Name, err) } - numComments, err := models.GetCommentCountByRepoID(repo.ID) + var numComments int64 + numComments, err = models.GetCommentCountByRepoID(repo.ID) if err != nil { - log.Error("GetCommentCountByRepoID failed: %s", repo.Name) - log.Error("failed statistic: %s", repo.Name) - continue + log.Error("GetCommentCountByRepoID failed(%s): %v", repo.Name, err) } - //beginTime, endTime := getStatTime(date) - //numVisits := repository.AppointProjectView(repo.OwnerName, repo.Name, beginTime, endTime) - numVisits := 0 + beginTime, endTime := getStatTime(date) + var numVisits int + numVisits, err = repository.AppointProjectView(repo.OwnerName, repo.Name, beginTime, endTime) + if err != nil { + log.Error("AppointProjectView failed(%s): %v", repo.Name, err) + } repoStat := models.RepoStatistic{ - RepoID: repo.ID, - Date: date, - NumWatches: int64(repo.NumWatches), - NumStars: int64(repo.NumStars), - NumDownloads: repo.CloneCnt, - NumComments: numComments, - NumVisits: int64(numVisits), - NumClosedIssues: int64(repo.NumClosedIssues), - NumVersions: numVersions, - NumDevMonths: repoGitStat.DevelopAge, - RepoSize: repo.Size, - DatasetSize: datasetSize, - NumModels: 0, - NumWikiViews: repoGitStat.WikiPages, - NumCommits: repo.NumCommit, - NumIssues: int64(repo.NumIssues), - NumPulls: int64(repo.NumPulls), - IssueFixedRate: issueFixedRate, - NumContributor: repoGitStat.Contributors, - NumKeyContributor: repoGitStat.KeyContributors, + RepoID: repo.ID, + Date: date, + NumWatches: int64(repo.NumWatches), + NumStars: int64(repo.NumStars), + NumDownloads: repo.CloneCnt, + NumComments: numComments, + NumVisits: int64(numVisits), + NumClosedIssues: int64(repo.NumClosedIssues), + NumVersions: numVersions, + NumDevMonths: numDevMonths, + RepoSize: repo.Size, + DatasetSize: datasetSize, + NumModels: 0, + NumWikiViews: numWikiViews, + NumCommits: repo.NumCommit, + NumIssues: int64(repo.NumIssues), + NumPulls: int64(repo.NumPulls), + IssueFixedRate: issueFixedRate, + NumContributor: numContributor, + NumKeyContributor: numKeyContributor, + NumCommitsGrowth: numCommitsGrowth, + NumCommitLinesGrowth: numCommitLinesGrowth, + NumContributorsGrowth: numContributorsGrowth, + } + + dayBeforeDate := t.AddDate(0, 0, -1).Format("2006-01-02") + repoStatisticsBefore, err := models.GetRepoStatisticByDate(dayBeforeDate) + + if err != nil { + log.Error("get data of day before the date failed ", err) + } else { + if len(repoStatisticsBefore) > 0 { + repoStatisticBefore := repoStatisticsBefore[0] + repoStat.NumWatchesAdded = repoStat.NumWatches - repoStatisticBefore.NumWatches + repoStat.NumStarsAdded = repoStat.NumStars - repoStatisticBefore.NumStars + repoStat.NumForksAdded = repoStat.NumForks - repoStatisticBefore.NumForks + repoStat.NumDownloadsAdded = repoStat.NumDownloads - repoStatisticBefore.NumDownloads + repoStat.NumCommentsAdded = repoStat.NumComments - repoStatisticBefore.NumComments + repoStat.NumClosedIssuesAdded = repoStat.NumClosedIssues - repoStatisticBefore.NumClosedIssues + repoStat.NumCommitsAdded = repoStat.NumCommits - repoStatisticBefore.NumCommits + repoStat.NumIssuesAdded = repoStat.NumIssues - repoStatisticBefore.NumIssues + repoStat.NumPullsAdded = repoStat.NumPulls - repoStatisticBefore.NumPulls + repoStat.NumContributorAdded = repoStat.NumContributor - repoStatisticBefore.NumContributor + } + } + day4MonthsAgo := t.AddDate(0, -4, 0) + repoStatisticFourMonthsAgo, err := models.GetOneRepoStatisticBeforeTime(day4MonthsAgo) + if err != nil { + log.Error("Get data of 4 moth ago failed.", err) + } else { + repoStat.NumCommentsGrowth = repoStat.NumComments - repoStatisticFourMonthsAgo.NumComments + repoStat.NumIssuesGrowth = repoStat.NumIssues - repoStatisticFourMonthsAgo.NumIssues } if _, err = models.InsertRepoStat(&repoStat); err != nil { - log.Error("InsertRepoStat failed: %s", repo.Name) + log.Error("InsertRepoStat failed(%s): %v", repo.Name, err) log.Error("failed statistic: %s", repo.Name) continue } + tempRepoStat := models.RepoStatistic{ + RepoID: repoStat.RepoID, + Date: repoStat.Date, + Impact: normalization.GetImpactInitValue(repoStat.NumWatches, repoStat.NumStars, repoStat.NumForks, repoStat.NumDownloads, repoStat.NumComments, repoStat.NumVisits), + Completeness: normalization.GetCompleteInitValue(repoStat.NumClosedIssues, repoStat.NumVersions, repoStat.NumDevMonths, repoStat.DatasetSize, repoStat.NumModels, repoStat.NumWikiViews), + Liveness: normalization.GetLivenessInitValue(repoStat.NumCommits, repoStat.NumIssues, repoStat.NumPulls, repoStat.NumVisits), + ProjectHealth: normalization.GetProjectHealthInitValue(repoStat.IssueFixedRate), + TeamHealth: normalization.GetTeamHealthInitValue(repoStat.NumContributor, repoStat.NumKeyContributor, repoStat.NumContributorsGrowth), + Growth: normalization.GetRepoGrowthInitValue(repoStat.NumCommitLinesGrowth, repoStat.NumIssuesGrowth, repoStat.NumCommitsGrowth, repoStat.NumContributorsGrowth, repoStat.NumCommentsGrowth), + } + + reposRadar = append(reposRadar, &tempRepoStat) + + if i == 0 { + minRepoRadar = tempRepoStat + maxRepoRadar = tempRepoStat + } else { + + if tempRepoStat.Impact < minRepoRadar.Impact { + minRepoRadar.Impact = tempRepoStat.Impact + } + + if tempRepoStat.Impact > maxRepoRadar.Impact { + maxRepoRadar.Impact = tempRepoStat.Impact + } + + if tempRepoStat.Completeness < minRepoRadar.Completeness { + minRepoRadar.Completeness = tempRepoStat.Completeness + } + + if tempRepoStat.Completeness > maxRepoRadar.Completeness { + maxRepoRadar.Completeness = tempRepoStat.Completeness + } + + if tempRepoStat.Liveness < minRepoRadar.Completeness { + minRepoRadar.Liveness = tempRepoStat.Liveness + } + + if tempRepoStat.Liveness > maxRepoRadar.Liveness { + maxRepoRadar.Liveness = tempRepoStat.Liveness + } + + if tempRepoStat.ProjectHealth < minRepoRadar.ProjectHealth { + minRepoRadar.ProjectHealth = tempRepoStat.ProjectHealth + } + + if tempRepoStat.ProjectHealth > maxRepoRadar.ProjectHealth { + maxRepoRadar.ProjectHealth = tempRepoStat.ProjectHealth + } + + if tempRepoStat.TeamHealth < minRepoRadar.TeamHealth { + minRepoRadar.TeamHealth = tempRepoStat.TeamHealth + } + + if tempRepoStat.TeamHealth > maxRepoRadar.TeamHealth { + maxRepoRadar.TeamHealth = tempRepoStat.TeamHealth + } + + if tempRepoStat.Growth < minRepoRadar.Growth { + minRepoRadar.Growth = tempRepoStat.Growth + } + + if tempRepoStat.Growth > maxRepoRadar.Growth { + maxRepoRadar.Growth = tempRepoStat.Growth + } + + } + log.Info("finish statistic: %s", repo.Name) } + //radar map + log.Info("begin statistic radar") + for _, radarInit := range reposRadar { + radarInit.Impact = normalization.Normalization(radarInit.Impact, minRepoRadar.Impact, maxRepoRadar.Impact) + radarInit.Completeness = normalization.Normalization(radarInit.Completeness, minRepoRadar.Completeness, maxRepoRadar.Completeness) + radarInit.Liveness = normalization.Normalization(radarInit.Liveness, minRepoRadar.Liveness, maxRepoRadar.Liveness) + radarInit.ProjectHealth = normalization.Normalization(radarInit.ProjectHealth, minRepoRadar.ProjectHealth, maxRepoRadar.ProjectHealth) + radarInit.TeamHealth = normalization.Normalization(radarInit.TeamHealth, minRepoRadar.TeamHealth, maxRepoRadar.TeamHealth) + radarInit.Growth = normalization.Normalization(radarInit.Growth, minRepoRadar.Growth, maxRepoRadar.Growth) + radarInit.RadarTotal = normalization.GetRadarValue(radarInit.Impact, radarInit.Completeness, radarInit.Liveness, radarInit.ProjectHealth, radarInit.TeamHealth, radarInit.Growth) + models.UpdateRepoStat(radarInit) + } + + log.Info("finish statistic: radar") + } func getDatasetSize(repo *models.Repository) (int64, error) { diff --git a/routers/repo/repo_summary_statistic.go b/routers/repo/repo_summary_statistic.go new file mode 100644 index 000000000..53270664c --- /dev/null +++ b/routers/repo/repo_summary_statistic.go @@ -0,0 +1,94 @@ +package repo + +import ( + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" +) + +func SummaryStatistic() { + log.Info("Generate summary statistic begin") + yesterday := time.Now().AddDate(0, 0, -1).Format("2006-01-02") + SummaryStatisticDaily(yesterday) + log.Info("Generate summary statistic end") +} + +func SummaryStatisticDaily(date string) { + log.Info("%s", date) + if err := models.DeleteSummaryStatisticDaily(date); err != nil { + log.Error("DeleteRepoStatDaily failed: %v", err.Error()) + return + } + + //user number + userNumber, err := models.GetUsersCount() + if err != nil { + log.Error("can not get user number", err) + userNumber = 0 + } + //organization number + organizationNumber, err := models.GetOrganizationsCount() + if err != nil { + log.Error("can not get orgnazition number", err) + organizationNumber = 0 + } + // repository number + repositoryNumer, err := models.GetAllRepositoriesCount() + if err != nil { + log.Error("can not get repository number", err) + repositoryNumer = 0 + } + //repository size + repositorySize, err := models.GetAllRepositoriesSize() + if err != nil { + log.Error("can not get repository size", err) + repositorySize = 0 + } + // dataset size + allDatasetSize, err := models.GetAllAttachmentSize() + if err != nil { + log.Error("can not get dataset size", err) + allDatasetSize = 0 + } + //topic repo number + topics, err := models.GetAllUsedTopics() + if err != nil { + log.Error("can not get topics", err) + } + var topicsCount [11]int + for _, topic := range topics { + + index, exists := models.DomainMap[topic.Name] + if exists { + topicsCount[index] = topic.RepoCount + } + + } + + summaryStat := models.SummaryStatistic{ + Date: date, + NumUsers: userNumber, + RepoSize: repositorySize, + DatasetSize: allDatasetSize, + NumOrganizations: organizationNumber, + NumRepos: repositoryNumer, + NumRepoBigModel: topicsCount[0], + NumRepoAI: topicsCount[1], + NumRepoVision: topicsCount[2], + NumRepoNLP: topicsCount[3], + NumRepoML: topicsCount[4], + NumRepoNN: topicsCount[5], + NumRepoAutoDrive: topicsCount[6], + NumRepoRobot: topicsCount[7], + NumRepoLeagueLearn: topicsCount[8], + NumRepoDataMining: topicsCount[9], + NumRepoRISC: topicsCount[10], + } + + if _, err = models.InsertSummaryStatistic(&summaryStat); err != nil { + log.Error("Insert summary Stat failed: %v", err.Error()) + } + + log.Info("finish summary statistic") +} diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 878868d6d..b7b9333d0 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -114,14 +114,14 @@ func RouterHandler(level log.Level) func(ctx *macaron.Context) { } // SetLogMsgID set msgID in Context -func SetLogMsgID() func(ctx *macaron.Context) { +func SetLogMsgID() macaron.Handler { return func(ctx *macaron.Context) { start := time.Now() uuid := gouuid.NewV4().String() ctx.Data["MsgID"] = uuid - log.Info("Started %s %s for %s", log.ColoredMethod(ctx.Req.Method), ctx.Req.URL.RequestURI(), ctx.RemoteAddr(), ctx.Data["MsgID"]) + log.Info("%s Started %s %s for %s", ctx.Data["SignedUserName"], log.ColoredMethod(ctx.Req.Method), ctx.Req.URL.RequestURI(), ctx.RemoteAddr(), ctx.Data["MsgID"]) rw := ctx.Resp.(macaron.ResponseWriter) ctx.Next() @@ -149,7 +149,7 @@ func NewMacaron() *macaron.Macaron { m.Use(macaron.Logger()) } } - m.Use(SetLogMsgID()) + //m.Use(SetLogMsgID()) // Access Logger is similar to Router Log but more configurable and by default is more like the NCSA Common Log format if setting.EnableAccessLog { setupAccessLogger(m) @@ -257,6 +257,7 @@ func NewMacaron() *macaron.Macaron { DisableDebug: !setting.EnablePprof, })) m.Use(context.Contexter()) + m.Use(SetLogMsgID()) // OK we are now set-up enough to allow us to create a nicer recovery than // the default macaron recovery m.Use(context.Recovery()) diff --git a/routers/user/auth.go b/routers/user/auth.go index 126d0a4c8..13e338565 100755 --- a/routers/user/auth.go +++ b/routers/user/auth.go @@ -11,8 +11,6 @@ import ( "net/http" "strings" - "code.gitea.io/gitea/routers/repo" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/auth/oauth2" @@ -1058,7 +1056,6 @@ func SignOut(ctx *context.Context) { }) } HandleSignOut(ctx) - go repo.StopJobsByUserID(ctx.User.ID) ctx.Redirect(setting.AppSubURL + "/") } diff --git a/routers/user/setting/account.go b/routers/user/setting/account.go index 6165bfc5b..0a0fc558c 100644 --- a/routers/user/setting/account.go +++ b/routers/user/setting/account.go @@ -8,6 +8,8 @@ package setting import ( "errors" + "code.gitea.io/gitea/routers/repo" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/base" @@ -240,6 +242,7 @@ func DeleteAccount(ctx *context.Context) { ctx.ServerError("DeleteUser", err) } } else { + go repo.StopJobsByUserID(ctx.User.ID) log.Trace("Account deleted: %s", ctx.User.Name) ctx.Redirect(setting.AppSubURL + "/") } diff --git a/templates/explore/dataset_list.tmpl b/templates/explore/dataset_list.tmpl index 48ae78127..7abc03363 100755 --- a/templates/explore/dataset_list.tmpl +++ b/templates/explore/dataset_list.tmpl @@ -29,8 +29,12 @@ {{.Repo.OwnerName}} / {{.Title}}
- {{svg "octicon-tasklist" 16}} {{$.i18n.Tr (printf "dataset.task.%s" .Task)}} - {{svg "octicon-tag" 16}}{{$.i18n.Tr (printf "dataset.category.%s" .Category)}} + {{if .Task}} + {{svg "octicon-tasklist" 16}} {{$.i18n.Tr (printf "dataset.task.%s" .Task)}} + {{end}} + {{if .Category}} + {{svg "octicon-tag" 16}}{{$.i18n.Tr (printf "dataset.category.%s" .Category)}} + {{end}} {{svg "octicon-flame" 16}} {{.DownloadTimes}}
diff --git a/templates/explore/repo_orgtop.tmpl b/templates/explore/repo_orgtop.tmpl index f0e6d9118..df132e811 100755 --- a/templates/explore/repo_orgtop.tmpl +++ b/templates/explore/repo_orgtop.tmpl @@ -50,6 +50,13 @@ +
+
+ + 京东 + +
+
diff --git a/templates/repo/cloudbrain/index.tmpl b/templates/repo/cloudbrain/index.tmpl index 8dd104ef8..795e35aac 100755 --- a/templates/repo/cloudbrain/index.tmpl +++ b/templates/repo/cloudbrain/index.tmpl @@ -361,9 +361,9 @@ 调试 -
+ {{$.CsrfTokenHtml}} - + 停止
@@ -515,7 +515,7 @@ console.log("---------",index,job) const jobID = job.dataset.jobid; const repoPath = job.dataset.repopath; - if (job.textContent.trim() == 'STOPPED') { + if (job.textContent.trim() == 'STOPPED' || job.textContent.trim() == 'FAILED') { return } @@ -523,7 +523,6 @@ $.get(`/api/v1/repos/${repoPath}/cloudbrain/${jobID}`, (data) => { const jobID = data.JobID const status = data.JobStatus - console.log("status",status) if (status != job.textContent.trim()) { //$('#' + jobID).text(status) //if (status == 'STOPPED') { diff --git a/templates/repo/cloudbrain/new.tmpl b/templates/repo/cloudbrain/new.tmpl index c45776b89..b3b827558 100755 --- a/templates/repo/cloudbrain/new.tmpl +++ b/templates/repo/cloudbrain/new.tmpl @@ -89,6 +89,10 @@ display: none; } + .inline.required.field.cloudbrain_brainscore { + display: none; + } + .select2-container .select2-selection--single{ height:38px !important; } @@ -116,7 +120,7 @@
{{template "base/alert" .}} -
+

@@ -130,12 +134,19 @@
-
+
@@ -209,11 +220,15 @@
- +
- + +
+
+ +
- {{if ne .DecompressState 0}} + {{if eq .DecompressState 1}}
{{svg "octicon-file-directory" 16}} {{if $.IsSigned}} diff --git a/templates/repo/datasets/label/index.tmpl b/templates/repo/datasets/label/index.tmpl index cb268c5b9..9a0863974 100644 --- a/templates/repo/datasets/label/index.tmpl +++ b/templates/repo/datasets/label/index.tmpl @@ -123,7 +123,7 @@ diff --git a/templates/repo/modelarts/new.tmpl b/templates/repo/modelarts/new.tmpl index 8cfa680f7..79a31286f 100755 --- a/templates/repo/modelarts/new.tmpl +++ b/templates/repo/modelarts/new.tmpl @@ -100,7 +100,7 @@
{{template "base/alert" .}} -
+

@@ -179,11 +179,11 @@ let value_task = $("input[name='job_name']").val() - let re = /^[a-z0-9][a-z0-9-_]{1,36}$/ + let re = /^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/ let flag = re.test(value_task) if(!flag){ $('#messageInfo').css('display','block') - let str = '只能以小写字母或数字开头且只包含小写字母、数字、_和-、最长36个字符。' + let str = '只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。' $('#messageInfo p').text(str) return false } diff --git a/web_src/js/components/EditTopics.vue b/web_src/js/components/EditTopics.vue index d2c871e03..d2109e178 100644 --- a/web_src/js/components/EditTopics.vue +++ b/web_src/js/components/EditTopics.vue @@ -11,16 +11,16 @@
-
{{arr.topic_name}}
+
{{arr.topic_name.toLowerCase()}}
- 点击或回车添加{{input}}标签 + 点击或回车添加{{input.toLowerCase()}}标签
-
{{input}}
+
{{input.toLowerCase()}}
@@ -134,7 +134,7 @@ export default { this.showSearchTopic = true } - else if(this.arrayTopics.indexOf(this.input)>-1){ + else if(this.arrayTopics.indexOf(this.input.toLowerCase())>-1){ this.showInputValue = false this.showSearchTopic = false @@ -142,7 +142,7 @@ export default { this.showInitTopic = [] let timestamp=new Date().getTime() - this.params.q = this.input + this.params.q = this.input.toLowerCase() this.params._ = timestamp this.$axios.get('/api/v1/topics/search',{ params:this.params @@ -164,7 +164,7 @@ export default { let findelement = this.array.some((item)=>{ - return item.topic_name===this.input + return item.topic_name===this.input.toLowerCase() }) this.showInputValue = !findelement @@ -224,11 +224,11 @@ export default { return }else{ let topic = this.input - if(this.arrayTopics.includes(topic)){ + if(this.arrayTopics.includes(topic.toLowerCase())){ return } else{ - this.arrayTopics.push(topic) + this.arrayTopics.push(topic.toLowerCase()) let topics = this.arrayTopics let strTopics = topics.join(',') @@ -250,7 +250,10 @@ export default { addPostTopic(){ if(this.showAddFlage){ - this.arrayTopics.pop() + // this.arrayTopics.pop() + + let cancleIndex = this.arrayTopics.indexOf(this.input) + this.arrayTopics.splice(cancleIndex,1) let topics = this.arrayTopics let strTopics = topics.join(',') let data = this.qs.stringify({ @@ -268,7 +271,7 @@ export default { } else if(!this.showAddFlage){ let topic = this.input - this.arrayTopics.push(topic) + this.arrayTopics.push(topic.toLowerCase()) let topics = this.arrayTopics let strTopics = topics.join(',')