| @@ -10,6 +10,7 @@ import ( | |||
| "io" | |||
| "path" | |||
| "strings" | |||
| "time" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/obs" | |||
| @@ -104,6 +105,14 @@ func (a *Attachment) IncreaseDownloadCount() error { | |||
| return nil | |||
| } | |||
| func (a *Attachment) UpdateDatasetUpdateUnix() error { | |||
| // Update download count. | |||
| if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { | |||
| return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err) | |||
| } | |||
| return nil | |||
| } | |||
| // APIFormat converts models.Attachment to api.Attachment | |||
| func (a *Attachment) APIFormat() *api.Attachment { | |||
| return &api.Attachment{ | |||
| @@ -831,7 +831,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||
| result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | |||
| result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||
| codeLine := float64(dateRecord.CommitCodeSize) / 1000 | |||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) | |||
| if codeLine >= limitCodeLine { | |||
| codeLine = limitCodeLine | |||
| } | |||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||
| result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | |||
| result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | |||
| @@ -2854,7 +2854,7 @@ mirror_sync_create = synced new reference <a href="%s/src/%s">%[2]s</a> to <a hr | |||
| mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror | |||
| approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| upload_dataset=`upload dataset <a href="%s/datasets?type=%s">%s</a>` | |||
| upload_dataset=`upload dataset <a href="%s/datasets">%s</a>` | |||
| task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>` | |||
| task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>` | |||
| task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>` | |||
| @@ -2864,7 +2864,7 @@ mirror_sync_create=从镜像同步了新的引用 <a href="%s/src/%s">%[2]s</a> | |||
| mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code> | |||
| approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| upload_dataset=`上传了数据集文件 <a href="%s/datasets?type=%s">%s</a>` | |||
| upload_dataset=`上传了数据集文件 <a href="%s/datasets">%s</a>` | |||
| task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>` | |||
| task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>` | |||
| task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>` | |||
| @@ -99,6 +99,11 @@ socket.onmessage = function (e) { | |||
| console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); | |||
| continue; | |||
| } | |||
| if(record.OpType == "24"){ | |||
| if(record.Content.indexOf("true") != -1){ | |||
| continue; | |||
| } | |||
| } | |||
| var recordPrefix = getMsg(record); | |||
| if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | |||
| html += recordPrefix + actionName; | |||
| @@ -162,7 +167,7 @@ socket.onmessage = function (e) { | |||
| function getTaskLink(record){ | |||
| var re = getRepoLink(record); | |||
| if(record.OpType == 24){ | |||
| re = re + "/datasets?type=" + record.Content; | |||
| re = re + "/datasets"; | |||
| }else if(record.OpType == 25){ | |||
| re = re + "/cloudbrain/" + record.Content; | |||
| }else if(record.OpType == 26){ | |||
| @@ -101,16 +101,20 @@ function initPageInfo(){ | |||
| function searchItem(type,sortType){ | |||
| console.log("enter item 2."); | |||
| currentSearchKeyword = document.getElementById("keyword_input").value; | |||
| if(!isEmpty(currentSearchKeyword)){ | |||
| initPageInfo(); | |||
| currentSearchTableName = itemType[type]; | |||
| currentSearchSortBy = sortBy[sortType]; | |||
| currentSearchAscending = sortAscending[sortType]; | |||
| OnlySearchLabel =false; | |||
| page(currentPage); | |||
| if(OnlySearchLabel){ | |||
| doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType]) | |||
| }else{ | |||
| emptySearch(); | |||
| currentSearchKeyword = document.getElementById("keyword_input").value; | |||
| if(!isEmpty(currentSearchKeyword)){ | |||
| initPageInfo(); | |||
| currentSearchTableName = itemType[type]; | |||
| currentSearchSortBy = sortBy[sortType]; | |||
| currentSearchAscending = sortAscending[sortType]; | |||
| OnlySearchLabel =false; | |||
| page(currentPage); | |||
| }else{ | |||
| emptySearch(); | |||
| } | |||
| } | |||
| } | |||
| @@ -806,17 +810,21 @@ var repoAndOrgEN={ | |||
| function page(current){ | |||
| currentPage=current; | |||
| startIndex = currentPage -1; | |||
| if(startIndex < 1){ | |||
| startIndex = 1; | |||
| } | |||
| endIndex = currentPage + 2; | |||
| if(endIndex >= totalPage){ | |||
| endIndex = totalPage; | |||
| } | |||
| doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | |||
| } | |||
| function nextPage(){ | |||
| currentPage = currentPage+1; | |||
| console.log("currentPage=" + currentPage); | |||
| if(currentPage >= endIndex){ | |||
| startIndex=startIndex+1; | |||
| endIndex = endIndex +1; | |||
| } | |||
| page(currentPage); | |||
| } | |||
| @@ -824,10 +832,6 @@ function page(current){ | |||
| console.log("currentPage=" + currentPage); | |||
| if(currentPage > 1){ | |||
| currentPage = currentPage-1; | |||
| if(currentPage <= startIndex && startIndex > 1){ | |||
| startIndex = startIndex -1; | |||
| endIndex = endIndex - 1; | |||
| } | |||
| console.log("currentPage=" + (currentPage)); | |||
| page(currentPage); | |||
| } | |||
| @@ -862,7 +866,7 @@ function getYPosition(e){ | |||
| showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); | |||
| } | |||
| else if(goNum<=totalPage){ | |||
| page(goNum); | |||
| page(parseInt(goNum,10)); | |||
| } | |||
| else{ | |||
| showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); | |||
| @@ -908,6 +912,11 @@ function getYPosition(e){ | |||
| } | |||
| } | |||
| if (endIndex < totalPage-1){ | |||
| html += "..."; | |||
| html += "<a id=\"page_" + totalPage+ "\" class=\"item\" href=\"javascript:page(" + totalPage +")\">" + totalPage + "</a>"; | |||
| } | |||
| if(currentPage >=totalPage){ | |||
| html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>"; | |||
| html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>"; | |||
| @@ -892,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) { | |||
| ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) | |||
| return | |||
| } | |||
| attachment.UpdateDatasetUpdateUnix() | |||
| repository, _ := models.GetRepositoryByID(dataset.RepoID) | |||
| notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) | |||
| notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment) | |||
| if attachment.DatasetID != 0 { | |||
| if isCanDecompress(attachment.Name) { | |||
| if typeCloudBrain == models.TypeCloudBrainOne { | |||
| @@ -247,7 +247,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm | |||
| func NotebookShow(ctx *context.Context) { | |||
| ctx.Data["PageIsCloudBrain"] = true | |||
| debugListType := ctx.Query("debugListType") | |||
| if debugListType == "" { | |||
| debugListType = "all" | |||
| } | |||
| var ID = ctx.Params(":id") | |||
| task, err := models.GetCloudbrainByIDWithDeleted(ID) | |||
| if err != nil { | |||
| @@ -40,8 +40,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||
| dataHeader := map[string]string{ | |||
| "A1": ctx.Tr("user.static.id"), | |||
| "B1": ctx.Tr("user.static.name"), | |||
| "C1": ctx.Tr("user.static.codemergecount"), | |||
| "D1": ctx.Tr("user.static.UserIndex"), | |||
| "C1": ctx.Tr("user.static.UserIndex"), | |||
| "D1": ctx.Tr("user.static.codemergecount"), | |||
| "E1": ctx.Tr("user.static.commitcount"), | |||
| "F1": ctx.Tr("user.static.issuecount"), | |||
| "G1": ctx.Tr("user.static.commentcount"), | |||
| @@ -77,8 +77,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||
| rows := fmt.Sprint(row) | |||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||
| @@ -239,8 +239,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||
| dataHeader := map[string]string{ | |||
| "A1": ctx.Tr("user.static.id"), | |||
| "B1": ctx.Tr("user.static.name"), | |||
| "C1": ctx.Tr("user.static.codemergecount"), | |||
| "D1": ctx.Tr("user.static.UserIndex"), | |||
| "C1": ctx.Tr("user.static.UserIndex"), | |||
| "D1": ctx.Tr("user.static.codemergecount"), | |||
| "E1": ctx.Tr("user.static.commitcount"), | |||
| "F1": ctx.Tr("user.static.issuecount"), | |||
| "G1": ctx.Tr("user.static.commentcount"), | |||
| @@ -270,8 +270,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||
| @@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||
| topicsQuery := elastic.NewMatchQuery("topics", Key) | |||
| boolQ.Should(topicsQuery) | |||
| res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||
| res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||
| } | |||
| } | |||
| func getSort(SortBy string, ascending bool) elastic.Sorter { | |||
| var sort elastic.Sorter | |||
| sort = elastic.NewScoreSort() | |||
| if SortBy != "" { | |||
| if SortBy == "default" { | |||
| return sort | |||
| func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter { | |||
| sort := make([]elastic.Sorter, 0) | |||
| if SortBy == "default" || SortBy == "" { | |||
| sort = append(sort, elastic.NewScoreSort()) | |||
| if secondSortBy != "" { | |||
| log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy) | |||
| sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending)) | |||
| } | |||
| return elastic.NewFieldSort(SortBy).Order(ascending) | |||
| } else { | |||
| sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending)) | |||
| } | |||
| log.Info("sort size=" + fmt.Sprint(len(sort))) | |||
| return sort | |||
| } | |||
| @@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||
| topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") | |||
| boolQ.Should(nameQuery, descriptionQuery, topicsQuery) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||
| } else { | |||
| log.Info("query all content.") | |||
| //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in | |||
| boolQ.Must(UserOrOrgQuery) | |||
| } | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||
| fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") | |||
| categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") | |||
| boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||
| } else { | |||
| log.Info("query all datasets.") | |||
| //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in | |||
| boolQ.Must(isIssueQuery) | |||
| } | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -10,7 +10,7 @@ import ( | |||
| "github.com/elliotchance/orderedmap" | |||
| ) | |||
| var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31} | |||
| var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} | |||
| type ClientsManager struct { | |||
| Clients *orderedmap.OrderedMap | |||
| @@ -187,7 +187,7 @@ td, th { | |||
| {{.i18n.Tr "repo.cloudbrain"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||
| {{$.i18n.Tr "repo.modelarts.notebook"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| @@ -193,7 +193,7 @@ td, th { | |||
| {{.i18n.Tr "repo.cloudbrain"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||
| {{$.i18n.Tr "repo.modelarts.notebook"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| @@ -71,7 +71,7 @@ | |||
| {{ $index := index .GetIssueInfos 0}} | |||
| {{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} | |||
| {{else if eq .GetOpType 24}} | |||
| {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}} | |||
| {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}} | |||
| {{else if eq .GetOpType 25}} | |||
| {{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} | |||
| {{else if eq .GetOpType 26}} | |||
| @@ -62,11 +62,6 @@ | |||
| <a :href="AppSubUrl +'../../../'+ scope.row.Name">{{scope.row.Name}} </a> | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CodeMergeCount" | |||
| label="PR数" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="UserIndex" | |||
| label="用户指数" | |||
| @@ -76,6 +71,11 @@ | |||
| {{scope.row.UserIndex | rounding}} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CodeMergeCount" | |||
| label="PR数" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CommitCount" | |||
| label="commit数" | |||