| @@ -10,6 +10,7 @@ import ( | |||
| "io" | |||
| "path" | |||
| "strings" | |||
| "time" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/obs" | |||
| @@ -64,6 +65,7 @@ type AttachmentInfo struct { | |||
| Repo *Repository `xorm:"extends"` | |||
| RelAvatarLink string `xorm:"extends"` | |||
| UserName string `xorm:"extends"` | |||
| Recommend bool `xorm:"-"` | |||
| } | |||
| type AttachmentsOptions struct { | |||
| @@ -78,6 +80,7 @@ type AttachmentsOptions struct { | |||
| JustNeedZipFile bool | |||
| NeedRepoInfo bool | |||
| Keyword string | |||
| RecommendOnly bool | |||
| } | |||
| func (a *Attachment) AfterUpdate() { | |||
| @@ -104,6 +107,14 @@ func (a *Attachment) IncreaseDownloadCount() error { | |||
| return nil | |||
| } | |||
| func (a *Attachment) UpdateDatasetUpdateUnix() error { | |||
| // Update download count. | |||
| if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { | |||
| return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err) | |||
| } | |||
| return nil | |||
| } | |||
| // APIFormat converts models.Attachment to api.Attachment | |||
| func (a *Attachment) APIFormat() *api.Attachment { | |||
| return &api.Attachment{ | |||
| @@ -570,6 +581,11 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { | |||
| builder.Eq{"attachment.is_private": opts.IsPrivate}, | |||
| ) | |||
| } | |||
| if opts.RecommendOnly { | |||
| cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id"). | |||
| From("attachment"). | |||
| Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true"))) | |||
| } | |||
| if opts.JustNeedZipFile { | |||
| var DecompressState []int32 | |||
| @@ -618,6 +634,7 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { | |||
| if err != nil { | |||
| return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) | |||
| } | |||
| attachment.Recommend = dataset.Recommend | |||
| repo, err := GetRepositoryByID(dataset.RepoID) | |||
| if err == nil { | |||
| attachment.Repo = repo | |||
| @@ -580,6 +580,8 @@ type CommitImageParams struct { | |||
| Topics []string | |||
| CloudBrainType int | |||
| UID int64 | |||
| Place string | |||
| Type int | |||
| } | |||
| type CommitImageResult struct { | |||
| @@ -567,12 +567,12 @@ func isImageStaring(e Engine, userID, imageID int64) bool { | |||
| } | |||
| func RecommendImage(imageId int64, recommond bool) error { | |||
| image := Image{Type: getRecommondType(recommond)} | |||
| image := Image{Type: GetRecommondType(recommond)} | |||
| _, err := x.ID(imageId).Cols("type").Update(image) | |||
| return err | |||
| } | |||
| func getRecommondType(recommond bool) int { | |||
| func GetRecommondType(recommond bool) int { | |||
| if recommond { | |||
| return RECOMMOND_TYPE | |||
| @@ -23,7 +23,8 @@ type Dataset struct { | |||
| Category string | |||
| Description string `xorm:"TEXT"` | |||
| DownloadTimes int64 | |||
| NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||
| NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||
| Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| License string | |||
| Task string | |||
| ReleaseID int64 `xorm:"INDEX"` | |||
| @@ -99,6 +100,7 @@ type SearchDatasetOptions struct { | |||
| OwnerID int64 | |||
| RepoID int64 | |||
| IncludePublic bool | |||
| RecommendOnly bool | |||
| Category string | |||
| Task string | |||
| License string | |||
| @@ -132,6 +134,13 @@ func CreateDataset(dataset *Dataset) (err error) { | |||
| } | |||
| func RecommendDataset(dataSetId int64, recommend bool) error { | |||
| dataset := Dataset{Recommend: recommend} | |||
| _, err := x.ID(dataSetId).Cols("recommend").Update(dataset) | |||
| return err | |||
| } | |||
| func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) { | |||
| cond := SearchDatasetCondition(opts) | |||
| return SearchDatasetByCondition(opts, cond) | |||
| @@ -146,6 +155,9 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { | |||
| if opts.RepoID > 0 { | |||
| cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) | |||
| } | |||
| if opts.RecommendOnly { | |||
| cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) | |||
| } | |||
| if opts.IncludePublic { | |||
| cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) | |||
| @@ -198,7 +210,7 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da | |||
| defer sess.Close() | |||
| datasets := make(DatasetList, 0, opts.PageSize) | |||
| selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars" | |||
| selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend" | |||
| count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). | |||
| Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). | |||
| @@ -1554,6 +1554,11 @@ func GetAllMirrorRepositoriesCount() (int64, error) { | |||
| return x.Where("is_mirror = ?", true).Count(repo) | |||
| } | |||
| func GetAllOrgRepositoriesCount() (int64, error) { | |||
| repo := new(Repository) | |||
| return x.Table("repository").Join("INNER", []string{"\"user\"", "u"}, "repository.owner_id = u.id and u.type=1").Count(repo) | |||
| } | |||
| func GetAllForkRepositoriesCount() (int64, error) { | |||
| repo := new(Repository) | |||
| return x.Where("is_fork = ?", true).Count(repo) | |||
| @@ -2,6 +2,8 @@ package models | |||
| import ( | |||
| "fmt" | |||
| "strconv" | |||
| "time" | |||
| "code.gitea.io/gitea/modules/timeutil" | |||
| ) | |||
| @@ -45,6 +47,7 @@ type SummaryStatistic struct { | |||
| NumRepoFork int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| NumRepoMirror int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| NumRepoSelf int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| NumRepoOrg int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
| UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | |||
| } | |||
| @@ -69,6 +72,37 @@ func DeleteSummaryStatisticDaily(date string) error { | |||
| return nil | |||
| } | |||
| func GetLatest2SummaryStatistic() ([]*SummaryStatistic, error) { | |||
| summaryStatistics := make([]*SummaryStatistic, 0) | |||
| err := xStatistic.Desc("created_unix").Limit(2).Find(&summaryStatistics) | |||
| return summaryStatistics, err | |||
| } | |||
| func GetSummaryStatisticByTimeCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
| summaryStatistics := new(SummaryStatistic) | |||
| total, err := xStatistic.Asc("created_unix").Where("created_unix>=" + strconv.FormatInt(beginTime.Unix(), 10) + " and created_unix<" + strconv.FormatInt(endTime.Unix(), 10)).Count(summaryStatistics) | |||
| return total, err | |||
| } | |||
| func GetSummaryStatisticByDateCount(dates []string) (int64, error) { | |||
| summaryStatistics := new(SummaryStatistic) | |||
| total, err := xStatistic.Asc("created_unix").In("date", dates).Count(summaryStatistics) | |||
| return total, err | |||
| } | |||
| func GetSummaryStatisticByTime(beginTime time.Time, endTime time.Time, page int, pageSize int) ([]*SummaryStatistic, error) { | |||
| summaryStatistics := make([]*SummaryStatistic, 0) | |||
| err := xStatistic.Asc("created_unix").Limit(pageSize+1, (page-1)*pageSize).Where("created_unix>=" + strconv.FormatInt(beginTime.Unix(), 10) + " and created_unix<" + strconv.FormatInt(endTime.Unix(), 10)).Find(&summaryStatistics) | |||
| return summaryStatistics, err | |||
| } | |||
| func GetSummaryStatisticByDates(dates []string, page int, pageSize int) ([]*SummaryStatistic, error) { | |||
| summaryStatistics := make([]*SummaryStatistic, 0) | |||
| err := xStatistic.Asc("created_unix").In("date", dates).Limit(pageSize+1, (page-1)*pageSize).Find(&summaryStatistics) | |||
| return summaryStatistics, err | |||
| } | |||
| func InsertSummaryStatistic(summaryStatistic *SummaryStatistic) (int64, error) { | |||
| return xStatistic.Insert(summaryStatistic) | |||
| } | |||
| @@ -831,7 +831,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||
| result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | |||
| result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||
| codeLine := float64(dateRecord.CommitCodeSize) / 1000 | |||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) | |||
| if codeLine >= limitCodeLine { | |||
| codeLine = limitCodeLine | |||
| } | |||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||
| result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | |||
| result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | |||
| result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | |||
| @@ -33,6 +33,16 @@ type CommitImageCloudBrainForm struct { | |||
| Topics string `form:"topics"` | |||
| } | |||
| type CommitAdminImageCloudBrainForm struct { | |||
| Description string `form:"description" binding:"Required"` | |||
| Type int `form:"type" binding:"Required"` | |||
| Tag string `form:"tag" binding:"Required;MaxSize(100)" ` | |||
| IsPrivate bool `form:"isPrivate" binding:"Required"` | |||
| Topics string `form:"topics"` | |||
| Place string `form:"place" binding:"Required"` | |||
| IsRecommend bool `form:"isRecommend" binding:"Required"` | |||
| } | |||
| type EditImageCloudBrainForm struct { | |||
| ID int64 `form:"id" binding:"Required"` | |||
| Description string `form:"description" binding:"Required"` | |||
| @@ -312,12 +312,51 @@ sendjob: | |||
| return nil | |||
| }) | |||
| if err == nil { | |||
| go updateImageStatus(image, isSetCreatedUnix, createTime) | |||
| } | |||
| return err | |||
| } | |||
| func CommitAdminImage(params models.CommitImageParams) error { | |||
| exist, err := models.IsImageExist(params.ImageTag) | |||
| if err != nil { | |||
| return fmt.Errorf("resty CommitImage: %v", err) | |||
| } | |||
| if exist { | |||
| return models.ErrorImageTagExist{ | |||
| Tag: params.ImageTag, | |||
| } | |||
| } | |||
| image := models.Image{ | |||
| CloudbrainType: params.CloudBrainType, | |||
| UID: params.UID, | |||
| IsPrivate: params.IsPrivate, | |||
| Tag: params.ImageTag, | |||
| Description: params.ImageDescription, | |||
| Place: params.Place, | |||
| Status: models.IMAGE_STATUS_SUCCESS, | |||
| Type: params.Type, | |||
| } | |||
| err = models.WithTx(func(ctx models.DBContext) error { | |||
| if err := models.CreateLocalImage(&image); err != nil { | |||
| log.Error("Failed to insert image record.", err) | |||
| return fmt.Errorf("resty CommitImage: %v", err) | |||
| } | |||
| if err := models.SaveImageTopics(image.ID, params.Topics...); err != nil { | |||
| log.Error("Failed to insert image record.", err) | |||
| return fmt.Errorf("resty CommitImage: %v", err) | |||
| } | |||
| return nil | |||
| }) | |||
| return err | |||
| } | |||
| func updateImageStatus(image models.Image, isSetCreatedUnix bool, createTime time.Time) { | |||
| attemps := 5 | |||
| commitSuccess := false | |||
| @@ -2,6 +2,7 @@ package storage | |||
| import ( | |||
| "encoding/xml" | |||
| "errors" | |||
| "path" | |||
| "sort" | |||
| "strconv" | |||
| @@ -129,7 +130,7 @@ func NewMultiPartUpload(uuid string) (string, error) { | |||
| return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) | |||
| } | |||
| func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||
| func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) { | |||
| client, core, err := getClients() | |||
| if err != nil { | |||
| log.Error("getClients failed:", err.Error()) | |||
| @@ -146,6 +147,11 @@ func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||
| return "", err | |||
| } | |||
| if len(partInfos) != totalChunks { | |||
| log.Error("ListObjectParts number(%d) is not equal the set total chunk number(%d)", len(partInfos), totalChunks) | |||
| return "", errors.New("the parts is not complete") | |||
| } | |||
| var complMultipartUpload completeMultipartUpload | |||
| for _, partInfo := range partInfos { | |||
| complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{ | |||
| @@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) { | |||
| return hasObject, nil | |||
| } | |||
| func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) { | |||
| output = &obs.ListPartsOutput{} | |||
| partNumberMarker := 0 | |||
| for { | |||
| temp, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||
| Bucket: setting.Bucket, | |||
| Key: key, | |||
| UploadId: uploadID, | |||
| MaxParts: MAX_LIST_PARTS, | |||
| PartNumberMarker: partNumberMarker, | |||
| }) | |||
| if err != nil { | |||
| log.Error("ListParts failed:", err.Error()) | |||
| return output, err | |||
| } | |||
| partNumberMarker = temp.NextPartNumberMarker | |||
| log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts)) | |||
| for _, partInfo := range temp.Parts { | |||
| output.Parts = append(output.Parts, obs.Part{ | |||
| PartNumber: partInfo.PartNumber, | |||
| ETag: partInfo.ETag, | |||
| }) | |||
| } | |||
| if !temp.IsTruncated { | |||
| break | |||
| } else { | |||
| continue | |||
| } | |||
| break | |||
| } | |||
| return output, nil | |||
| } | |||
| func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { | |||
| key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | |||
| output, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||
| Bucket: setting.Bucket, | |||
| Key: key, | |||
| UploadId: uploadID, | |||
| }) | |||
| allParts, err := listAllParts(uuid, uploadID, key) | |||
| if err != nil { | |||
| log.Error("ListParts failed:", err.Error()) | |||
| log.Error("listAllParts failed: %v", err) | |||
| return "", err | |||
| } | |||
| var chunks string | |||
| for _, partInfo := range output.Parts { | |||
| for _, partInfo := range allParts.Parts { | |||
| chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," | |||
| } | |||
| @@ -94,45 +128,25 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) { | |||
| return output.UploadId, nil | |||
| } | |||
| func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { | |||
| func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error { | |||
| input := &obs.CompleteMultipartUploadInput{} | |||
| input.Bucket = setting.Bucket | |||
| input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | |||
| input.UploadId = uploadID | |||
| partNumberMarker := 0 | |||
| for { | |||
| output, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||
| Bucket: setting.Bucket, | |||
| Key: input.Key, | |||
| UploadId: uploadID, | |||
| MaxParts: MAX_LIST_PARTS, | |||
| PartNumberMarker: partNumberMarker, | |||
| }) | |||
| if err != nil { | |||
| log.Error("ListParts failed:", err.Error()) | |||
| return err | |||
| } | |||
| partNumberMarker = output.NextPartNumberMarker | |||
| log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts)) | |||
| for _, partInfo := range output.Parts { | |||
| input.Parts = append(input.Parts, obs.Part{ | |||
| PartNumber: partInfo.PartNumber, | |||
| ETag: partInfo.ETag, | |||
| }) | |||
| } | |||
| if len(output.Parts) < output.MaxParts { | |||
| break | |||
| } else { | |||
| continue | |||
| } | |||
| allParts, err := listAllParts(uuid, uploadID, input.Key) | |||
| if err != nil { | |||
| log.Error("listAllParts failed: %v", err) | |||
| return err | |||
| } | |||
| break | |||
| if len(allParts.Parts) != totalChunks { | |||
| log.Error("listAllParts number(%d) is not equal the set total chunk number(%d)", len(allParts.Parts), totalChunks) | |||
| return errors.New("the parts is not complete") | |||
| } | |||
| input.Parts = allParts.Parts | |||
| output, err := ObsCli.CompleteMultipartUpload(input) | |||
| if err != nil { | |||
| log.Error("CompleteMultipartUpload failed:", err.Error()) | |||
| @@ -937,6 +937,15 @@ model_manager = Model | |||
| model_noright=No right | |||
| model_rename=Duplicate model name, please modify model name. | |||
| date=Date | |||
| repo_add=Project Increment | |||
| repo_total=Project Total | |||
| repo_public_add=Public Project Increment | |||
| repo_private_add=Private Project Increment | |||
| repo_fork_add=Fork Project Increment | |||
| repo_mirror_add=Mirror Project Increment | |||
| repo_self_add=Custom Project Increment | |||
| debug=Debug | |||
| debug_again=Restart | |||
| stop=Stop | |||
| @@ -1001,7 +1010,9 @@ get_repo_stat_error=Can not get the statistics of the repository. | |||
| get_repo_info_error=Can not get the information of the repository. | |||
| generate_statistic_file_error=Failed to generate file. | |||
| repo_stat_inspect=ProjectAnalysis | |||
| repo_stat_develop=ProjectGrowthAnalysis | |||
| all=All | |||
| current_year=Current_Year | |||
| computing.all = All | |||
| computing.Introduction=Introduction | |||
| @@ -1389,6 +1400,7 @@ issues.filter_sort.feweststars = Fewest stars | |||
| issues.filter_sort.mostforks = Most forks | |||
| issues.filter_sort.fewestforks = Fewest forks | |||
| issues.filter_sort.downloadtimes = Most downloaded | |||
| issues.filter_sort.moststars = Most star | |||
| issues.action_open = Open | |||
| issues.action_close = Close | |||
| issues.action_label = Label | |||
| @@ -2502,11 +2514,15 @@ repos.contributor=Contributor | |||
| repos.yes=Yes | |||
| repos.no=No | |||
| images.recommend = Recommend | |||
| images.unrecommend = Unrecommend | |||
| datasets.dataset_manage_panel= Dataset Manage | |||
| datasets.owner=Owner | |||
| datasets.name=name | |||
| datasets.private=Private | |||
| datasets.recommend=Set recommend | |||
| datasets.unrecommend=Set unrecommend | |||
| cloudbrain.all_task_types=All Task Types | |||
| cloudbrain.all_computing_resources=All Computing Resources | |||
| @@ -2854,7 +2870,7 @@ mirror_sync_create = synced new reference <a href="%s/src/%s">%[2]s</a> to <a hr | |||
| mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror | |||
| approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| upload_dataset=`upload dataset <a href="%s/datasets?type=%s">%s</a>` | |||
| upload_dataset=`upload dataset <a href="%s/datasets">%s</a>` | |||
| task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>` | |||
| task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>` | |||
| task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>` | |||
| @@ -2964,6 +2980,7 @@ snn4imagenet_path = Snn4imagenet script path | |||
| brainscore_path = Brainscore script path | |||
| start_command = Start command | |||
| choose_mirror = select mirror or enter mirror path | |||
| input_mirror = Please enter image path | |||
| select_dataset = select dataset | |||
| specification = specification | |||
| select_specification = select specification | |||
| @@ -938,6 +938,16 @@ model_manager = 模型 | |||
| model_noright=无权限操作 | |||
| model_rename=模型名称重复,请修改模型名称 | |||
| date=日期 | |||
| repo_add=新增项目 | |||
| repo_total=累计项目 | |||
| repo_public_add=新增公开项目 | |||
| repo_private_add=新增私有项目 | |||
| repo_fork_add=新增派生项目 | |||
| repo_mirror_add=新增镜像项目 | |||
| repo_self_add=新增自建项目 | |||
| debug=调试 | |||
| debug_again=再次调试 | |||
| stop=停止 | |||
| @@ -1009,7 +1019,9 @@ get_repo_stat_error=查询当前仓库的统计信息失败。 | |||
| get_repo_info_error=查询当前仓库信息失败。 | |||
| generate_statistic_file_error=生成文件失败。 | |||
| repo_stat_inspect=项目分析 | |||
| repo_stat_develop=项目增长趋势 | |||
| all=所有 | |||
| current_year=今年 | |||
| computing.all=全部 | |||
| computing.Introduction=简介 | |||
| @@ -1401,6 +1413,7 @@ issues.filter_sort.feweststars=点赞由少到多 | |||
| issues.filter_sort.mostforks=派生由多到少 | |||
| issues.filter_sort.fewestforks=派生由少到多 | |||
| issues.filter_sort.downloadtimes=下载次数 | |||
| issues.filter_sort.moststars=收藏数量 | |||
| issues.action_open=开启 | |||
| issues.action_close=关闭 | |||
| issues.action_label=标签 | |||
| @@ -2512,11 +2525,15 @@ repos.contributor=贡献者数 | |||
| repos.yes=是 | |||
| repos.no=否 | |||
| images.recommend = 推荐 | |||
| images.unrecommend = 不推荐 | |||
| datasets.dataset_manage_panel=数据集管理 | |||
| datasets.owner=所有者 | |||
| datasets.name=名称 | |||
| datasets.private=私有 | |||
| datasets.recommend=设为推荐 | |||
| datasets.unrecommend=取消推荐 | |||
| cloudbrain.all_task_types=全部任务类型 | |||
| cloudbrain.all_computing_resources=全部计算资源 | |||
| @@ -2864,7 +2881,7 @@ mirror_sync_create=从镜像同步了新的引用 <a href="%s/src/%s">%[2]s</a> | |||
| mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code> | |||
| approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>` | |||
| upload_dataset=`上传了数据集文件 <a href="%s/datasets?type=%s">%s</a>` | |||
| upload_dataset=`上传了数据集文件 <a href="%s/datasets">%s</a>` | |||
| task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>` | |||
| task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>` | |||
| task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>` | |||
| @@ -2974,6 +2991,7 @@ snn4imagenet_path = snn4imagenet脚本存放路径 | |||
| brainscore_path = brainscore脚本存放路径 | |||
| start_command = 启动命令 | |||
| choose_mirror = 选择镜像或输入镜像地址 | |||
| input_mirror = 请输入云脑镜像地址 | |||
| select_dataset = 选择数据集 | |||
| specification = 规格 | |||
| select_specification = 选择资源规格 | |||
| @@ -99,6 +99,11 @@ socket.onmessage = function (e) { | |||
| console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); | |||
| continue; | |||
| } | |||
| if(record.OpType == "24"){ | |||
| if(record.Content.indexOf("true") != -1){ | |||
| continue; | |||
| } | |||
| } | |||
| var recordPrefix = getMsg(record); | |||
| if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | |||
| html += recordPrefix + actionName; | |||
| @@ -162,7 +167,7 @@ socket.onmessage = function (e) { | |||
| function getTaskLink(record){ | |||
| var re = getRepoLink(record); | |||
| if(record.OpType == 24){ | |||
| re = re + "/datasets?type=" + record.Content; | |||
| re = re + "/datasets"; | |||
| }else if(record.OpType == 25){ | |||
| re = re + "/cloudbrain/" + record.Content; | |||
| }else if(record.OpType == 26){ | |||
| @@ -101,16 +101,20 @@ function initPageInfo(){ | |||
| function searchItem(type,sortType){ | |||
| console.log("enter item 2."); | |||
| currentSearchKeyword = document.getElementById("keyword_input").value; | |||
| if(!isEmpty(currentSearchKeyword)){ | |||
| initPageInfo(); | |||
| currentSearchTableName = itemType[type]; | |||
| currentSearchSortBy = sortBy[sortType]; | |||
| currentSearchAscending = sortAscending[sortType]; | |||
| OnlySearchLabel =false; | |||
| page(currentPage); | |||
| if(OnlySearchLabel){ | |||
| doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType]) | |||
| }else{ | |||
| emptySearch(); | |||
| currentSearchKeyword = document.getElementById("keyword_input").value; | |||
| if(!isEmpty(currentSearchKeyword)){ | |||
| initPageInfo(); | |||
| currentSearchTableName = itemType[type]; | |||
| currentSearchSortBy = sortBy[sortType]; | |||
| currentSearchAscending = sortAscending[sortType]; | |||
| OnlySearchLabel =false; | |||
| page(currentPage); | |||
| }else{ | |||
| emptySearch(); | |||
| } | |||
| } | |||
| } | |||
| @@ -806,17 +810,21 @@ var repoAndOrgEN={ | |||
| function page(current){ | |||
| currentPage=current; | |||
| startIndex = currentPage -1; | |||
| if(startIndex < 1){ | |||
| startIndex = 1; | |||
| } | |||
| endIndex = currentPage + 2; | |||
| if(endIndex >= totalPage){ | |||
| endIndex = totalPage; | |||
| } | |||
| doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | |||
| } | |||
| function nextPage(){ | |||
| currentPage = currentPage+1; | |||
| console.log("currentPage=" + currentPage); | |||
| if(currentPage >= endIndex){ | |||
| startIndex=startIndex+1; | |||
| endIndex = endIndex +1; | |||
| } | |||
| page(currentPage); | |||
| } | |||
| @@ -824,10 +832,6 @@ function page(current){ | |||
| console.log("currentPage=" + currentPage); | |||
| if(currentPage > 1){ | |||
| currentPage = currentPage-1; | |||
| if(currentPage <= startIndex && startIndex > 1){ | |||
| startIndex = startIndex -1; | |||
| endIndex = endIndex - 1; | |||
| } | |||
| console.log("currentPage=" + (currentPage)); | |||
| page(currentPage); | |||
| } | |||
| @@ -862,7 +866,7 @@ function getYPosition(e){ | |||
| showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); | |||
| } | |||
| else if(goNum<=totalPage){ | |||
| page(goNum); | |||
| page(parseInt(goNum,10)); | |||
| } | |||
| else{ | |||
| showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); | |||
| @@ -908,6 +912,11 @@ function getYPosition(e){ | |||
| } | |||
| } | |||
| if (endIndex < totalPage-1){ | |||
| html += "..."; | |||
| html += "<a id=\"page_" + totalPage+ "\" class=\"item\" href=\"javascript:page(" + totalPage +")\">" + totalPage + "</a>"; | |||
| } | |||
| if(currentPage >=totalPage){ | |||
| html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>"; | |||
| html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>"; | |||
| @@ -21,6 +21,7 @@ import ( | |||
| const ( | |||
| tplCloudBrains base.TplName = "admin/cloudbrain/list" | |||
| tplImages base.TplName = "admin/cloudbrain/images" | |||
| tplCommitImages base.TplName = "admin/cloudbrain/imagecommit" | |||
| EXCEL_DATE_FORMAT = "20060102150405" | |||
| CREATE_TIME_FORMAT = "2006/01/02 15:04:05" | |||
| ) | |||
| @@ -114,6 +115,12 @@ func Images(ctx *context.Context) { | |||
| } | |||
| func CloudBrainCommitImageShow(ctx *context.Context) { | |||
| ctx.Data["PageIsAdminImages"] = true | |||
| ctx.HTML(200, tplCommitImages) | |||
| } | |||
| func DownloadCloudBrains(ctx *context.Context) { | |||
| page := 1 | |||
| @@ -1,6 +1,8 @@ | |||
| package admin | |||
| import ( | |||
| "net/http" | |||
| "strconv" | |||
| "strings" | |||
| "code.gitea.io/gitea/models" | |||
| @@ -49,6 +51,8 @@ func Datasets(ctx *context.Context) { | |||
| orderBy = models.SearchOrderBySizeReverse | |||
| case "size": | |||
| orderBy = models.SearchOrderBySize | |||
| case "downloadtimes": | |||
| orderBy = models.SearchOrderByDownloadTimes | |||
| case "moststars": | |||
| orderBy = models.SearchOrderByStarsReverse | |||
| case "feweststars": | |||
| @@ -70,6 +74,7 @@ func Datasets(ctx *context.Context) { | |||
| PageSize: setting.UI.ExplorePagingNum, | |||
| }, | |||
| Keyword: keyword, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| SearchOrderBy: orderBy, | |||
| }) | |||
| if err != nil { | |||
| @@ -80,7 +85,7 @@ func Datasets(ctx *context.Context) { | |||
| ctx.Data["Keyword"] = keyword | |||
| ctx.Data["Total"] = count | |||
| ctx.Data["Datasets"] = datasets | |||
| ctx.Data["Recommend"] = ctx.QueryBool("recommend") | |||
| pager := context.NewPagination(int(count), setting.UI.ExplorePagingNum, page, 5) | |||
| pager.SetDefaultParams(ctx) | |||
| ctx.Data["Page"] = pager | |||
| @@ -88,6 +93,23 @@ func Datasets(ctx *context.Context) { | |||
| ctx.HTML(200, tplDatasets) | |||
| } | |||
| func DatasetAction(ctx *context.Context) { | |||
| var err error | |||
| datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) | |||
| switch ctx.Params(":action") { | |||
| case "recommend": | |||
| err = models.RecommendDataset(datasetId, true) | |||
| case "unrecommend": | |||
| err = models.RecommendDataset(datasetId, false) | |||
| } | |||
| if err != nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) | |||
| } else { | |||
| ctx.JSON(http.StatusOK, models.BaseOKMessage) | |||
| } | |||
| } | |||
| func DeleteDataset(ctx *context.Context) { | |||
| dataset, err := models.GetDatasetByID(ctx.QueryInt64("id")) | |||
| if err != nil { | |||
| @@ -535,6 +535,9 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("/restoreFork", repo.RestoreForkNumber) | |||
| m.Get("/downloadAll", repo.ServeAllProjectsPeriodStatisticsFile) | |||
| m.Get("/downloadAllOpenI", repo.ServeAllProjectsOpenIStatisticsFile) | |||
| m.Get("/summary", repo.GetLatestProjectsSummaryData) | |||
| m.Get("/summary/period", repo.GetProjectsSummaryData) | |||
| m.Get("/summary/download", repo.GetProjectsSummaryDataFile) | |||
| m.Group("/project", func() { | |||
| m.Get("", repo.GetAllProjectsPeriodStatistics) | |||
| m.Get("/numVisit", repo.ProjectNumVisit) | |||
| @@ -20,8 +20,10 @@ import ( | |||
| const DEFAULT_PAGE_SIZE = 10 | |||
| const DATE_FORMAT = "2006-01-02" | |||
| const MONTH_FORMAT = "2006-01" | |||
| const EXCEL_DATE_FORMAT = "20060102" | |||
| const CREATE_TIME_FORMAT = "2006/01/02 15:04:05" | |||
| const UPDATE_TIME_FORMAT = "2006-01-02 15:04:05" | |||
| type ProjectsPeriodData struct { | |||
| RecordBeginTime string `json:"recordBeginTime"` | |||
| @@ -60,6 +62,40 @@ type ProjectLatestData struct { | |||
| Top10 []UserInfo `json:"top10"` | |||
| } | |||
| type ProjectSummaryBaseData struct { | |||
| NumReposAdd int64 `json:"numReposAdd"` | |||
| NumRepoPublicAdd int64 `json:"numRepoPublicAdd"` | |||
| NumRepoPrivateAdd int64 `json:"numRepoPrivateAdd"` | |||
| NumRepoForkAdd int64 `json:"numRepoForkAdd"` | |||
| NumRepoMirrorAdd int64 `json:"numRepoMirrorAdd"` | |||
| NumRepoSelfAdd int64 `json:"numRepoSelfAdd"` | |||
| NumRepos int64 `json:"numRepos"` | |||
| CreatTime string `json:"creatTime"` | |||
| } | |||
| type ProjectSummaryData struct { | |||
| ProjectSummaryBaseData | |||
| NumRepoPublic int64 `json:"numRepoPublic"` | |||
| NumRepoPrivate int64 `json:"numRepoPrivate"` | |||
| NumRepoFork int64 `json:"numRepoFork"` | |||
| NumRepoMirror int64 `json:"numRepoMirror"` | |||
| NumRepoSelf int64 `json:"numRepoSelf"` | |||
| NumRepoOrgAdd int64 `json:"numRepoOrgAdd"` | |||
| NumRepoNotOrgAdd int64 `json:"numRepoNotOrgAdd"` | |||
| NumRepoOrg int64 `json:"numRepoOrg"` | |||
| NumRepoNotOrg int64 `json:"numRepoNotOrg"` | |||
| } | |||
| type ProjectSummaryPeriodData struct { | |||
| RecordBeginTime string `json:"recordBeginTime"` | |||
| PageSize int `json:"pageSize"` | |||
| TotalPage int `json:"totalPage"` | |||
| TotalCount int64 `json:"totalCount"` | |||
| PageRecords []*ProjectSummaryBaseData `json:"pageRecords"` | |||
| } | |||
| func RestoreForkNumber(ctx *context.Context) { | |||
| repos, err := models.GetAllRepositories() | |||
| if err != nil { | |||
| @@ -73,6 +109,144 @@ func RestoreForkNumber(ctx *context.Context) { | |||
| ctx.JSON(http.StatusOK, struct{}{}) | |||
| } | |||
| func GetLatestProjectsSummaryData(ctx *context.Context) { | |||
| stat, err := models.GetLatest2SummaryStatistic() | |||
| data := ProjectSummaryData{} | |||
| if err == nil && len(stat) > 0 { | |||
| data.NumRepos = stat[0].NumRepos | |||
| data.NumRepoOrg = stat[0].NumRepoOrg | |||
| data.NumRepoNotOrg = stat[0].NumRepos - stat[0].NumRepoOrg | |||
| data.NumRepoFork = stat[0].NumRepoFork | |||
| data.NumRepoMirror = stat[0].NumRepoMirror | |||
| data.NumRepoSelf = stat[0].NumRepoSelf | |||
| data.NumRepoPrivate = stat[0].NumRepoPrivate | |||
| data.NumRepoPublic = stat[0].NumRepoPublic | |||
| data.CreatTime = stat[0].CreatedUnix.Format(UPDATE_TIME_FORMAT) | |||
| if len(stat) == 2 { | |||
| data.NumReposAdd = stat[0].NumRepos - stat[1].NumRepos | |||
| data.NumRepoOrgAdd = stat[0].NumRepoOrg - stat[1].NumRepoOrg | |||
| data.NumRepoNotOrgAdd = (stat[0].NumRepos - stat[0].NumRepoOrg) - (stat[1].NumRepos - stat[1].NumRepoOrg) | |||
| data.NumRepoForkAdd = stat[0].NumRepoFork - stat[1].NumRepoFork | |||
| data.NumRepoMirrorAdd = stat[0].NumRepoMirror - stat[1].NumRepoMirror | |||
| data.NumRepoSelfAdd = stat[0].NumRepoSelf - stat[1].NumRepoSelf | |||
| data.NumRepoPrivateAdd = stat[0].NumRepoPrivate - stat[1].NumRepoPrivate | |||
| data.NumRepoPublicAdd = stat[0].NumRepoPublic - stat[1].NumRepoPublic | |||
| } | |||
| } | |||
| ctx.JSON(200, data) | |||
| } | |||
| func GetProjectsSummaryData(ctx *context.Context) { | |||
| var datas = make([]*ProjectSummaryBaseData, 0) | |||
| recordBeginTime, err := getRecordBeginTime() | |||
| if err != nil { | |||
| log.Error("Can not get record begin time", err) | |||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) | |||
| return | |||
| } | |||
| beginTime, endTime, err := getTimePeroid(ctx, recordBeginTime) | |||
| beginTime = beginTime.AddDate(0, 0, -1) | |||
| queryType := ctx.QueryTrim("type") | |||
| page := ctx.QueryInt("page") | |||
| if page <= 0 { | |||
| page = 1 | |||
| } | |||
| pageSize := ctx.QueryInt("pagesize") | |||
| if pageSize <= 0 { | |||
| pageSize = DEFAULT_PAGE_SIZE | |||
| } | |||
| var count int64 | |||
| if queryType == "all" || queryType == "current_year" { | |||
| dates := getEndOfMonthDates(beginTime, endTime) | |||
| count, _ = models.GetSummaryStatisticByDateCount(dates) | |||
| stats, err := models.GetSummaryStatisticByDates(dates, page, pageSize) | |||
| if err != nil { | |||
| log.Warn("can not get summary data", err) | |||
| } else { | |||
| for i, v := range stats { | |||
| if i == 0 { | |||
| continue | |||
| } | |||
| data := ProjectSummaryBaseData{} | |||
| setStatisticsData(&data, v, stats[i-1]) | |||
| createTime, _ := time.Parse(DATE_FORMAT, v.Date) | |||
| data.CreatTime = createTime.Format(MONTH_FORMAT) | |||
| datas = append(datas, &data) | |||
| } | |||
| } | |||
| } else { | |||
| count, _ = models.GetSummaryStatisticByTimeCount(beginTime, endTime) | |||
| stats, err := models.GetSummaryStatisticByTime(beginTime, endTime, page, pageSize) | |||
| if err != nil { | |||
| log.Warn("can not get summary data", err) | |||
| } else { | |||
| for i, v := range stats { | |||
| if i == 0 { | |||
| continue | |||
| } | |||
| data := ProjectSummaryBaseData{} | |||
| setStatisticsData(&data, v, stats[i-1]) | |||
| data.CreatTime = v.Date | |||
| datas = append(datas, &data) | |||
| } | |||
| } | |||
| } | |||
| projectSummaryPeriodData := ProjectSummaryPeriodData{ | |||
| TotalCount: count - 1, | |||
| TotalPage: getTotalPage(count-1, pageSize), | |||
| RecordBeginTime: recordBeginTime.Format(DATE_FORMAT), | |||
| PageSize: pageSize, | |||
| PageRecords: datas, | |||
| } | |||
| ctx.JSON(200, projectSummaryPeriodData) | |||
| } | |||
| func setStatisticsData(data *ProjectSummaryBaseData, v *models.SummaryStatistic, stats *models.SummaryStatistic) { | |||
| data.NumReposAdd = v.NumRepos - stats.NumRepos | |||
| data.NumRepoPublicAdd = v.NumRepoPublic - stats.NumRepoPublic | |||
| data.NumRepoPrivateAdd = v.NumRepoPrivate - stats.NumRepoPrivate | |||
| data.NumRepoMirrorAdd = v.NumRepoMirror - stats.NumRepoMirror | |||
| data.NumRepoForkAdd = v.NumRepoFork - stats.NumRepoFork | |||
| data.NumRepoSelfAdd = v.NumRepoSelf - stats.NumRepoSelf | |||
| data.NumRepos = v.NumRepos | |||
| } | |||
| func getEndOfMonthDates(beginTime time.Time, endTime time.Time) []string { | |||
| var dates = []string{} | |||
| date := endOfMonth(beginTime.AddDate(0, -1, 0)) | |||
| dates = append(dates, date.Format(DATE_FORMAT)) | |||
| tempDate := endOfMonth(beginTime) | |||
| for { | |||
| if tempDate.Before(endTime) { | |||
| dates = append(dates, tempDate.Format(DATE_FORMAT)) | |||
| tempDate = endOfMonth(tempDate.AddDate(0, 0, 1)) | |||
| } else { | |||
| break | |||
| } | |||
| } | |||
| return dates | |||
| } | |||
| func endOfMonth(date time.Time) time.Time { | |||
| return date.AddDate(0, 1, -date.Day()) | |||
| } | |||
| func GetAllProjectsPeriodStatistics(ctx *context.Context) { | |||
| recordBeginTime, err := getRecordBeginTime() | |||
| @@ -210,6 +384,118 @@ func ServeAllProjectsPeriodStatisticsFile(ctx *context.Context) { | |||
| } | |||
| func GetProjectsSummaryDataFile(ctx *context.Context) { | |||
| recordBeginTime, err := getRecordBeginTime() | |||
| if err != nil { | |||
| log.Error("Can not get record begin time", err) | |||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) | |||
| return | |||
| } | |||
| beginTime, endTime, err := getTimePeroid(ctx, recordBeginTime) | |||
| beginTime = beginTime.AddDate(0, 0, -1) | |||
| if err != nil { | |||
| log.Error("Parameter is wrong", err) | |||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.parameter_is_wrong")) | |||
| return | |||
| } | |||
| page := ctx.QueryInt("page") | |||
| if page <= 0 { | |||
| page = 1 | |||
| } | |||
| pageSize := 100 | |||
| if err != nil { | |||
| log.Error("Can not query the last updated time.", err) | |||
| ctx.Error(http.StatusBadRequest, ctx.Tr("repo.last_update_time_error")) | |||
| return | |||
| } | |||
| var projectAnalysis = ctx.Tr("repo.repo_stat_develop") | |||
| fileName := getSummaryFileName(ctx, beginTime, endTime, projectAnalysis) | |||
| f := excelize.NewFile() | |||
| index := f.NewSheet(projectAnalysis) | |||
| f.DeleteSheet("Sheet1") | |||
| for k, v := range allProjectsPeriodSummaryHeader(ctx) { | |||
| f.SetCellValue(projectAnalysis, k, v) | |||
| } | |||
| var total int64 | |||
| queryType := ctx.QueryTrim("type") | |||
| row := 2 | |||
| if queryType == "all" || queryType == "current_year" { | |||
| dates := getEndOfMonthDates(beginTime, endTime) | |||
| total, _ = models.GetSummaryStatisticByDateCount(dates) | |||
| totalPage := getTotalPage(total, pageSize) | |||
| for i := 0; i < totalPage; i++ { | |||
| stats, err := models.GetSummaryStatisticByDates(dates, i+1, pageSize) | |||
| if err != nil { | |||
| log.Warn("can not get summary data", err) | |||
| } else { | |||
| for j, v := range stats { | |||
| if j == 0 { | |||
| continue | |||
| } | |||
| data := ProjectSummaryBaseData{} | |||
| setStatisticsData(&data, v, stats[j-1]) | |||
| createTime, _ := time.Parse(DATE_FORMAT, v.Date) | |||
| data.CreatTime = createTime.Format(MONTH_FORMAT) | |||
| for k, v := range allProjectsPeriodSummaryValues(row, &data, ctx) { | |||
| f.SetCellValue(projectAnalysis, k, v) | |||
| } | |||
| row++ | |||
| } | |||
| } | |||
| } | |||
| } else { | |||
| total, _ = models.GetSummaryStatisticByTimeCount(beginTime, endTime) | |||
| totalPage := getTotalPage(total, pageSize) | |||
| for i := 0; i < totalPage; i++ { | |||
| stats, err := models.GetSummaryStatisticByTime(beginTime, endTime, i+1, pageSize) | |||
| if err != nil { | |||
| log.Warn("can not get summary data", err) | |||
| } else { | |||
| for j, v := range stats { | |||
| if j == 0 { | |||
| continue | |||
| } | |||
| data := ProjectSummaryBaseData{} | |||
| setStatisticsData(&data, v, stats[j-1]) | |||
| data.CreatTime = v.Date | |||
| for k, v := range allProjectsPeriodSummaryValues(row, &data, ctx) { | |||
| f.SetCellValue(projectAnalysis, k, v) | |||
| } | |||
| row++ | |||
| } | |||
| } | |||
| } | |||
| } | |||
| f.SetActiveSheet(index) | |||
| ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(fileName)) | |||
| ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||
| f.WriteTo(ctx.Resp) | |||
| } | |||
| func ServeAllProjectsOpenIStatisticsFile(ctx *context.Context) { | |||
| page := ctx.QueryInt("page") | |||
| @@ -290,6 +576,20 @@ func getFileName(ctx *context.Context, beginTime time.Time, endTime time.Time, p | |||
| return frontName | |||
| } | |||
| func getSummaryFileName(ctx *context.Context, beginTime time.Time, endTime time.Time, projectAnalysis string) string { | |||
| baseName := projectAnalysis + "_" | |||
| if ctx.QueryTrim("type") == "all" { | |||
| baseName = baseName + ctx.Tr("repo.all") | |||
| } else if ctx.QueryTrim("type") == "current_year" { | |||
| baseName = baseName + ctx.Tr("repo.current_year") | |||
| } else { | |||
| baseName = baseName + beginTime.Format(EXCEL_DATE_FORMAT) + "_" + endTime.AddDate(0, 0, -1).Format(EXCEL_DATE_FORMAT) | |||
| } | |||
| frontName := baseName + ".xlsx" | |||
| return frontName | |||
| } | |||
| func allProjectsPeroidHeader(ctx *context.Context) map[string]string { | |||
| return map[string]string{"A1": ctx.Tr("admin.repos.id"), "B1": ctx.Tr("admin.repos.projectName"), "C1": ctx.Tr("repo.owner"), "D1": ctx.Tr("admin.repos.isPrivate"), "E1": ctx.Tr("admin.repos.openi"), "F1": ctx.Tr("admin.repos.visit"), "G1": ctx.Tr("admin.repos.download"), "H1": ctx.Tr("admin.repos.pr"), "I1": ctx.Tr("admin.repos.commit"), | |||
| @@ -297,6 +597,19 @@ func allProjectsPeroidHeader(ctx *context.Context) map[string]string { | |||
| } | |||
| func allProjectsPeriodSummaryHeader(ctx *context.Context) map[string]string { | |||
| return map[string]string{"A1": ctx.Tr("repo.date"), "B1": ctx.Tr("repo.repo_add"), "C1": ctx.Tr("repo.repo_total"), "D1": ctx.Tr("repo.repo_public_add"), "E1": ctx.Tr("repo.repo_private_add"), "F1": ctx.Tr("repo.repo_self_add"), "G1": ctx.Tr("repo.repo_fork_add"), "H1": ctx.Tr("repo.repo_mirror_add")} | |||
| } | |||
| func allProjectsPeriodSummaryValues(row int, rs *ProjectSummaryBaseData, ctx *context.Context) map[string]string { | |||
| return map[string]string{getCellName("A", row): rs.CreatTime, getCellName("B", row): strconv.FormatInt(rs.NumReposAdd, 10), getCellName("C", row): strconv.FormatInt(rs.NumRepos, 10), getCellName("D", row): strconv.FormatInt(rs.NumRepoPublicAdd, 10), getCellName("E", row): strconv.FormatInt(rs.NumRepoPrivateAdd, 10), | |||
| getCellName("F", row): strconv.FormatInt(rs.NumRepoSelfAdd, 10), getCellName("G", row): strconv.FormatInt(rs.NumRepoForkAdd, 10), getCellName("H", row): strconv.FormatInt(rs.NumRepoMirrorAdd, 10), | |||
| } | |||
| } | |||
| func allProjectsPeroidValues(row int, rs *models.RepoStatistic, ctx *context.Context) map[string]string { | |||
| return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), | |||
| getCellName("F", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("G", row): strconv.FormatInt(rs.NumDownloads, 10), getCellName("H", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("I", row): strconv.FormatInt(rs.NumCommits, 10), | |||
| @@ -331,6 +331,7 @@ func ExploreDatasets(ctx *context.Context) { | |||
| Task: task, | |||
| License: license, | |||
| OwnerID: ownerID, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| ListOptions: models.ListOptions{ | |||
| Page: page, | |||
| PageSize: 30, | |||
| @@ -357,6 +358,7 @@ func ExploreDatasets(ctx *context.Context) { | |||
| ctx.Data["Category"] = category | |||
| ctx.Data["Task"] = task | |||
| ctx.Data["License"] = license | |||
| ctx.Data["Recommend"] = ctx.QueryBool("recommend") | |||
| pager.SetDefaultParams(ctx) | |||
| ctx.Data["Page"] = pager | |||
| @@ -11,7 +11,6 @@ import ( | |||
| "fmt" | |||
| "mime/multipart" | |||
| "net/http" | |||
| "path" | |||
| "strconv" | |||
| "strings" | |||
| @@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) { | |||
| }) | |||
| } | |||
| func GetObsKey(ctx *context.Context) { | |||
| uuid := gouuid.NewV4().String() | |||
| key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | |||
| ctx.JSON(200, map[string]string{ | |||
| "uuid": uuid, | |||
| "key": key, | |||
| "access_key_id": setting.AccessKeyID, | |||
| "secret_access_key": setting.SecretAccessKey, | |||
| "server": setting.Endpoint, | |||
| "bucket": setting.Bucket, | |||
| }) | |||
| } | |||
| func CompleteMultipart(ctx *context.Context) { | |||
| uuid := ctx.Query("uuid") | |||
| uploadID := ctx.Query("uploadID") | |||
| @@ -870,13 +855,13 @@ func CompleteMultipart(ctx *context.Context) { | |||
| } | |||
| if typeCloudBrain == models.TypeCloudBrainOne { | |||
| _, err = storage.CompleteMultiPartUpload(uuid, uploadID) | |||
| _, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks) | |||
| if err != nil { | |||
| ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) | |||
| return | |||
| } | |||
| } else { | |||
| err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName) | |||
| err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks) | |||
| if err != nil { | |||
| ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err)) | |||
| return | |||
| @@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) { | |||
| ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) | |||
| return | |||
| } | |||
| attachment.UpdateDatasetUpdateUnix() | |||
| repository, _ := models.GetRepositoryByID(dataset.RepoID) | |||
| notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) | |||
| notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment) | |||
| if attachment.DatasetID != 0 { | |||
| if isCanDecompress(attachment.Name) { | |||
| if typeCloudBrain == models.TypeCloudBrainOne { | |||
| @@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) { | |||
| }) | |||
| } | |||
| func UpdateMultipart(ctx *context.Context) { | |||
| uuid := ctx.Query("uuid") | |||
| partNumber := ctx.QueryInt("chunkNumber") | |||
| etag := ctx.Query("etag") | |||
| fileChunk, err := models.GetFileChunkByUUID(uuid) | |||
| if err != nil { | |||
| if models.IsErrFileChunkNotExist(err) { | |||
| ctx.Error(404) | |||
| } else { | |||
| ctx.ServerError("GetFileChunkByUUID", err) | |||
| } | |||
| return | |||
| } | |||
| fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1)) | |||
| err = models.UpdateFileChunk(fileChunk) | |||
| if err != nil { | |||
| ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| }) | |||
| } | |||
| func HandleUnDecompressAttachment() { | |||
| attachs, err := models.GetUnDecompressAttachments() | |||
| if err != nil { | |||
| @@ -59,6 +59,7 @@ var ( | |||
| ) | |||
| const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" | |||
| const CLONE_FILE_PREFIX = "file:///" | |||
| var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) | |||
| @@ -702,6 +703,53 @@ func CloudBrainCommitImageCheck(ctx *context.Context, form auth.CommitImageCloud | |||
| } | |||
| func CloudBrainAdminCommitImage(ctx *context.Context, form auth.CommitAdminImageCloudBrainForm) { | |||
| if !NamePattern.MatchString(form.Tag) { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) | |||
| return | |||
| } | |||
| if utf8.RuneCountInString(form.Description) > 255 { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 255))) | |||
| return | |||
| } | |||
| validTopics, errMessage := checkTopics(form.Topics) | |||
| if errMessage != "" { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr(errMessage))) | |||
| return | |||
| } | |||
| err := cloudbrain.CommitAdminImage(models.CommitImageParams{ | |||
| CommitImageCloudBrainParams: models.CommitImageCloudBrainParams{ | |||
| ImageDescription: form.Description, | |||
| ImageTag: form.Tag, | |||
| }, | |||
| IsPrivate: form.IsPrivate, | |||
| CloudBrainType: form.Type, | |||
| Topics: validTopics, | |||
| UID: ctx.User.ID, | |||
| Type: models.GetRecommondType(form.IsRecommend), | |||
| Place: form.Place, | |||
| }) | |||
| if err != nil { | |||
| log.Error("CommitImagefailed") | |||
| if models.IsErrImageTagExist(err) { | |||
| ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_exist"))) | |||
| } else if models.IsErrorImageCommitting(err) { | |||
| ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_committing"))) | |||
| } else { | |||
| ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_commit_fail"))) | |||
| } | |||
| return | |||
| } | |||
| ctx.JSON(200, models.BaseOKMessage) | |||
| } | |||
| func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrainForm) { | |||
| if !NamePattern.MatchString(form.Tag) { | |||
| @@ -1142,7 +1190,8 @@ func GetRate(ctx *context.Context) { | |||
| } | |||
| func downloadCode(repo *models.Repository, codePath, branchName string) error { | |||
| if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil { | |||
| //add "file:///" prefix to make the depth valid | |||
| if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil { | |||
| log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) | |||
| return err | |||
| } | |||
| @@ -1202,7 +1251,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo | |||
| return err | |||
| } | |||
| if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil { | |||
| if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil { | |||
| log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) | |||
| return err | |||
| } | |||
| @@ -358,6 +358,7 @@ func MyDatasets(ctx *context.Context) { | |||
| NeedIsPrivate: false, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| @@ -398,6 +399,7 @@ func PublicDataset(ctx *context.Context) { | |||
| Type: cloudbrainType, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| @@ -454,6 +456,7 @@ func MyFavoriteDataset(ctx *context.Context) { | |||
| Type: cloudbrainType, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| @@ -247,7 +247,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm | |||
| func NotebookShow(ctx *context.Context) { | |||
| ctx.Data["PageIsCloudBrain"] = true | |||
| debugListType := ctx.Query("debugListType") | |||
| if debugListType == "" { | |||
| debugListType = "all" | |||
| } | |||
| var ID = ctx.Params(":id") | |||
| task, err := models.GetCloudbrainByIDWithDeleted(ID) | |||
| if err != nil { | |||
| @@ -1027,10 +1029,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | |||
| commitID, _ := gitRepo.GetBranchCommitID(branch_name) | |||
| if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||
| Branch: branch_name, | |||
| }); err != nil { | |||
| log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | |||
| if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||
| log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) | |||
| trainJobErrorNewDataPrepare(ctx, form) | |||
| ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) | |||
| return | |||
| @@ -1245,9 +1245,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | |||
| commitID, _ := gitRepo.GetBranchCommitID(branch_name) | |||
| if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||
| Branch: branch_name, | |||
| }); err != nil { | |||
| if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||
| log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) | |||
| versionErrorDataPrepare(ctx, form) | |||
| ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) | |||
| @@ -1874,9 +1872,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | |||
| commitID, _ := gitRepo.GetBranchCommitID(branch_name) | |||
| if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||
| Branch: branch_name, | |||
| }); err != nil { | |||
| if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||
| log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | |||
| inferenceJobErrorNewDataPrepare(ctx, form) | |||
| ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) | |||
| @@ -60,6 +60,12 @@ func SummaryStatisticDaily(date string) { | |||
| } | |||
| selfRepositoryNumber := repositoryNumer - mirrorRepositoryNumber - forkRepositoryNumber | |||
| organizationRepoNumber, err := models.GetAllOrgRepositoriesCount() | |||
| if err != nil { | |||
| log.Error("can not get org repository number", err) | |||
| organizationRepoNumber = 0 | |||
| } | |||
| //repository size | |||
| repositorySize, err := models.GetAllRepositoriesSize() | |||
| if err != nil { | |||
| @@ -99,6 +105,7 @@ func SummaryStatisticDaily(date string) { | |||
| NumRepoPrivate: privateRepositoryNumer, | |||
| NumRepoPublic: publicRepositoryNumer, | |||
| NumRepoSelf: selfRepositoryNumber, | |||
| NumRepoOrg: organizationRepoNumber, | |||
| NumRepoBigModel: topicsCount[0], | |||
| NumRepoAI: topicsCount[1], | |||
| NumRepoVision: topicsCount[2], | |||
| @@ -40,8 +40,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||
| dataHeader := map[string]string{ | |||
| "A1": ctx.Tr("user.static.id"), | |||
| "B1": ctx.Tr("user.static.name"), | |||
| "C1": ctx.Tr("user.static.codemergecount"), | |||
| "D1": ctx.Tr("user.static.UserIndex"), | |||
| "C1": ctx.Tr("user.static.UserIndex"), | |||
| "D1": ctx.Tr("user.static.codemergecount"), | |||
| "E1": ctx.Tr("user.static.commitcount"), | |||
| "F1": ctx.Tr("user.static.issuecount"), | |||
| "G1": ctx.Tr("user.static.commentcount"), | |||
| @@ -77,8 +77,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||
| rows := fmt.Sprint(row) | |||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||
| @@ -239,8 +239,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||
| dataHeader := map[string]string{ | |||
| "A1": ctx.Tr("user.static.id"), | |||
| "B1": ctx.Tr("user.static.name"), | |||
| "C1": ctx.Tr("user.static.codemergecount"), | |||
| "D1": ctx.Tr("user.static.UserIndex"), | |||
| "C1": ctx.Tr("user.static.UserIndex"), | |||
| "D1": ctx.Tr("user.static.codemergecount"), | |||
| "E1": ctx.Tr("user.static.commitcount"), | |||
| "F1": ctx.Tr("user.static.issuecount"), | |||
| "G1": ctx.Tr("user.static.commentcount"), | |||
| @@ -270,8 +270,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||
| xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||
| @@ -525,6 +525,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Group("/datasets", func() { | |||
| m.Get("", admin.Datasets) | |||
| m.Put("/:id/action/:action", admin.DatasetAction) | |||
| // m.Post("/delete", admin.DeleteDataset) | |||
| }) | |||
| m.Group("/cloudbrains", func() { | |||
| @@ -534,6 +535,8 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Group("/images", func() { | |||
| m.Get("", admin.Images) | |||
| m.Get("/data", repo.GetAllImages) | |||
| m.Get("/commit_image", admin.CloudBrainCommitImageShow) | |||
| m.Post("/commit_image", bindIgnErr(auth.CommitAdminImageCloudBrainForm{}), repo.CloudBrainAdminCommitImage) | |||
| }) | |||
| m.Put("/image/:id/action/:action", image.Action) | |||
| @@ -608,12 +611,11 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) | |||
| m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) | |||
| m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) | |||
| m.Post("/complete_multipart", repo.CompleteMultipart) | |||
| m.Post("/update_chunk", repo.UpdateMultipart) | |||
| }, reqSignIn) | |||
| m.Group("/attachments", func() { | |||
| m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) | |||
| m.Post("/complete_multipart", repo.CompleteMultipart) | |||
| }) | |||
| m.Group("/attachments", func() { | |||
| @@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||
| topicsQuery := elastic.NewMatchQuery("topics", Key) | |||
| boolQ.Should(topicsQuery) | |||
| res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||
| res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||
| } | |||
| } | |||
| func getSort(SortBy string, ascending bool) elastic.Sorter { | |||
| var sort elastic.Sorter | |||
| sort = elastic.NewScoreSort() | |||
| if SortBy != "" { | |||
| if SortBy == "default" { | |||
| return sort | |||
| func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter { | |||
| sort := make([]elastic.Sorter, 0) | |||
| if SortBy == "default" || SortBy == "" { | |||
| sort = append(sort, elastic.NewScoreSort()) | |||
| if secondSortBy != "" { | |||
| log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy) | |||
| sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending)) | |||
| } | |||
| return elastic.NewFieldSort(SortBy).Order(ascending) | |||
| } else { | |||
| sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending)) | |||
| } | |||
| log.Info("sort size=" + fmt.Sprint(len(sort))) | |||
| return sort | |||
| } | |||
| @@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||
| topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") | |||
| boolQ.Should(nameQuery, descriptionQuery, topicsQuery) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||
| } else { | |||
| log.Info("query all content.") | |||
| //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in | |||
| boolQ.Must(UserOrOrgQuery) | |||
| } | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||
| fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") | |||
| categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") | |||
| boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||
| } else { | |||
| log.Info("query all datasets.") | |||
| //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in | |||
| boolQ.Must(isIssueQuery) | |||
| } | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||
| if err == nil { | |||
| searchJson, _ := json.Marshal(res) | |||
| log.Info("searchJson=" + string(searchJson)) | |||
| @@ -106,9 +106,9 @@ func Profile(ctx *context.Context) { | |||
| for _, org := range orgs { | |||
| _, repoCount, err := models.SearchRepository(&models.SearchRepoOptions{ | |||
| OwnerID: org.ID, | |||
| Private: ctx.IsSigned, | |||
| Actor: ctx.User, | |||
| OwnerID: org.ID, | |||
| Private: ctx.IsSigned, | |||
| Actor: ctx.User, | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("SearchRepository", err) | |||
| @@ -175,6 +175,8 @@ func Profile(ctx *context.Context) { | |||
| orderBy = models.SearchOrderByAlphabeticallyReverse | |||
| case "alphabetically": | |||
| orderBy = models.SearchOrderByAlphabetically | |||
| case "downloadtimes": | |||
| orderBy = models.SearchOrderByDownloadTimes | |||
| case "moststars": | |||
| orderBy = models.SearchOrderByStarsReverse | |||
| case "feweststars": | |||
| @@ -10,7 +10,7 @@ import ( | |||
| "github.com/elliotchance/orderedmap" | |||
| ) | |||
| var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31} | |||
| var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} | |||
| type ClientsManager struct { | |||
| Clients *orderedmap.OrderedMap | |||
| @@ -0,0 +1,129 @@ | |||
| <style> | |||
| .label_color{ | |||
| color:#505559 !important; | |||
| width: 6% !important; | |||
| text-align: center; | |||
| } | |||
| </style> | |||
| {{template "base/head" .}} | |||
| <div id="mask"> | |||
| <div id="loadingPage"> | |||
| <div class="rect1"></div> | |||
| <div class="rect2"></div> | |||
| <div class="rect3"></div> | |||
| <div class="rect4"></div> | |||
| <div class="rect5"></div> | |||
| </div> | |||
| </div> | |||
| <div class="repository"> | |||
| {{template "repo/header" .}} | |||
| <div class="alert"></div> | |||
| <div class="ui container"> | |||
| <div> | |||
| <div class="ui negative message" style="display: none;"> | |||
| </div> | |||
| <div class="ui info message" style="display: none;"> | |||
| </div> | |||
| <div class="ui positive message" style="display: none;"> | |||
| </div> | |||
| <h4 class="ui top attached header"> | |||
| {{.i18n.Tr "repo.submit_image"}} | |||
| </h4> | |||
| <div class="submit-image-tmplvalue" style="display: none;" data-link="{{$.Link}}"></div> | |||
| <div class="ui attached segment" style="padding: 2em 3em;padding-bottom: 7rem;"> | |||
| <div class="ui form" id="form_image"> | |||
| <input type="hidden" name="edit" value="edit"> | |||
| {{.CsrfTokenHtml}} | |||
| <div class="inline field"> | |||
| <label class="label_color" for="">{{$.i18n.Tr "dataset.dataset_available_clusters"}}</label> | |||
| <div class="ui basic label" style="border: none !important;color:#3291f8;"> | |||
| <svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="14" height="14"><path fill="none" d="M0 0h24v24H0z"></path><path d="M4 3h16a1 1 0 0 1 1 1v7H3V4a1 1 0 0 1 1-1zM3 13h18v7a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1v-7zm4 3v2h3v-2H7zM7 6v2h3V6H7z"></path></svg> | |||
| CPU/GPU | |||
| </div> | |||
| <input type="hidden" value="{{.Type}}" name="type"> | |||
| </div> | |||
| <div class="inline required field"> | |||
| <label class="label_color" for="">{{$.i18n.Tr "repo.images.name"}}</label> | |||
| <input type="text" name="tag" required placeholder="{{$.i18n.Tr "repo.images.name_placerholder"}}" style="width: 80%;" maxlength="100"> | |||
| <span class="tooltips" style="display: block;padding-left: 0.5rem;">{{.i18n.Tr "repo.images.name_rule"}}</span> | |||
| </div> | |||
| <div class="inline required field"> | |||
| <label class="label_color" for="">{{$.i18n.Tr "repo.images"}}</label> | |||
| <input type="text" name="place" required placeholder="{{$.i18n.Tr "cloudbrain.input_mirror"}}" style="width: 80%;" maxlength="100"> | |||
| </div> | |||
| <div class="inline required field"> | |||
| <label class="label_color" for="">{{$.i18n.Tr "dataset.description"}}</label> | |||
| <textarea style="width: 80%;" required id="description" name="description" rows="3" maxlength="255" placeholder={{.i18n.Tr "repo.modelarts.train_job.new_place"}} onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 255)"></textarea> | |||
| </div> | |||
| <div class="inline field" style="display: flex;align-items: center;"> | |||
| <label class="label_color" for="">{{$.i18n.Tr "repo.model.manage.label"}}</label> | |||
| <div class="ui multiple search selection dropdown" id="dropdown_image" style="width: 80%;"> | |||
| <input type="hidden" name="topics" value="" required> | |||
| <div class="default text" id="default_text">{{.i18n.Tr "repo.repo_label_helpe"}}</div> | |||
| <div class="menu" id="course_label_item"></div> | |||
| </div> | |||
| </div> | |||
| <span class="tooltips" style="display: block;padding-left: 0.5rem;margin-top: 0.5rem;margin-bottom: 1rem;">{{.i18n.Tr "repo.image.label_tooltips"}}</span> | |||
| <div class="inline fields"> | |||
| <label class="label_color" for="" style="visibility: hidden;"></label> | |||
| <div class="field"> | |||
| <div class="ui radio checkbox"> | |||
| <input type="radio" name="isRecommend" checked="checked" value="true"> | |||
| <label>{{.i18n.Tr "admin.images.recommend"}}</label> | |||
| </div> | |||
| </div> | |||
| <div class="field" style="flex: 0.15;"> | |||
| <div class="ui radio checkbox" > | |||
| <input type="radio" name="isRecommend" value="false"> | |||
| <label>{{.i18n.Tr "admin.images.unrecommend"}}</label> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| <div class="inline fields"> | |||
| <label class="label_color" for="" style="visibility: hidden;"></label> | |||
| <div class="field"> | |||
| <div class="ui radio checkbox"> | |||
| <input type="radio" name="isPrivate" checked="checked" value="false"> | |||
| <label>{{.i18n.Tr "org.settings.visibility.public"}}</label> | |||
| </div> | |||
| </div> | |||
| <div class="field" style="flex: 0.15;"> | |||
| <div class="ui radio checkbox" > | |||
| <input type="radio" name="isPrivate" value="true"> | |||
| <label>{{.i18n.Tr "home.show_private"}}</label> | |||
| </div> | |||
| </div> | |||
| <div class="field"> | |||
| <span class="label_color">{{.i18n.Tr "repo.images.public_tooltips"}}</span> | |||
| </div> | |||
| </div> | |||
| <div class="inline required field" style="padding-top: 2rem;"> | |||
| <label class="label_color" for="" style="visibility: hidden;"></label> | |||
| <button class="ui create_image green button" type="button"> | |||
| {{.i18n.Tr "repo.cloudbrain.commit_image"}} | |||
| </button> | |||
| <a class="ui button" id="cancel_submit_image">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| <!-- 确认模态框 --> | |||
| <div> | |||
| <div class="ui modal image_confirm_submit"> | |||
| <div class="header">{{.i18n.Tr "repo.submit_image"}}</div> | |||
| <div class="content text red center"> | |||
| <p><i class="exclamation icon"></i>{{.i18n.Tr "repo.image_overwrite"}}</p> | |||
| </div> | |||
| <div class="actions"> | |||
| <button class="ui deny small button">{{.i18n.Tr "cloudbrain.operate_cancel"}}</button> | |||
| <button class="ui green small approve button">{{.i18n.Tr "cloudbrain.operate_confirm"}}</button> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| {{template "base/footer" .}} | |||
| @@ -3,6 +3,9 @@ | |||
| {{template "admin/navbar" .}} | |||
| <div class="ui container"> | |||
| {{template "base/alert" .}} | |||
| <div class="ui negative message" style="display: none;"> | |||
| </div> | |||
| <h4 class="ui top attached header"> | |||
| {{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}}) | |||
| </h4> | |||
| @@ -24,10 +27,10 @@ | |||
| {{range .Datasets}} | |||
| <tr> | |||
| <td>{{.ID}}</td> | |||
| <td><a href="{{AppSubUrl}}/">{{.Title}}</a></td> | |||
| <td style="display: flex;align-items: center;"><a href="{{AppSubUrl}}/">{{.Title}}</a>{{if .Recommend}}<img src="/img/jian.svg" style="margin-left: 0.5rem;">{{end}}</td> | |||
| <td><i class="fa fa{{if .IsPrivate}}-check{{end}}-square-o"></i></td> | |||
| <td><span title="{{.CreatedUnix.FormatLong}}">{{.CreatedUnix.FormatShort}}</span></td> | |||
| <td></td> | |||
| <td>{{if .Recommend}}<span class="set_dataset" style="color: rgb(250, 140, 22);cursor: pointer;" data-url="{{$.Link}}/{{.ID}}/action/unrecommend">{{$.i18n.Tr "admin.datasets.unrecommend"}}</span>{{else}}<span class="set_dataset" style="color: rgb(19, 194, 141);cursor: pointer;" data-url="{{$.Link}}/{{.ID}}/action/recommend">{{$.i18n.Tr "admin.datasets.recommend"}}</span>{{end}}</td> | |||
| </tr> | |||
| {{end}} | |||
| </tbody> | |||
| @@ -37,16 +40,4 @@ | |||
| {{template "base/paginate" .}} | |||
| </div> | |||
| </div> | |||
| <div class="ui small basic delete modal"> | |||
| <div class="ui icon header"> | |||
| <i class="trash icon"></i> | |||
| {{.i18n.Tr "dataset.settings.delete"}} | |||
| </div> | |||
| <div class="content"> | |||
| <p>{{.i18n.Tr "dataset.settings.delete_desc"}}</p> | |||
| {{.i18n.Tr "dataset.settings.delete_notices_2" `<span class="name"></span>` | Safe}}<br> | |||
| </div> | |||
| {{template "base/delete_modal_actions" .}} | |||
| </div> | |||
| {{template "base/footer" .}} | |||
| {{template "base/footer" .}} | |||
| @@ -23,7 +23,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><span class="panel_dataset_name">${dataset.Name} </span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name} </span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -49,7 +49,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -74,7 +74,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -99,7 +99,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -23,7 +23,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><span class="panel_dataset_name">${dataset.Name} </span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name} </span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -49,7 +49,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -74,7 +74,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -99,7 +99,7 @@ | |||
| <el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth"> | |||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index"> | |||
| <div style="width: 90%;"> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||
| <div style="margin-top: 8px;display: flex;"> | |||
| <a :title="dataset.UserName" style="cursor: default;"> | |||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | |||
| @@ -125,7 +125,8 @@ | |||
| <a class="{{if eq .SortType "oldest"}}active{{end}} item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a> | |||
| <a class="{{if eq .SortType "recentupdate"}}active{{end}} item" href="{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a> | |||
| <a class="{{if eq .SortType "leastupdate"}}active{{end}} item" href="{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a> | |||
| <!-- <a class="{{if eq .SortType "downloadtimes"}}active{{end}} item" href="{{$.Link}}?sort=downloadtimes&q={{$.Keyword}}&tab={{$.TabName}}">{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}</a> --> | |||
| <a class="{{if eq .SortType "downloadtimes"}}active{{end}} item" href="{{$.Link}}?sort=downloadtimes&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}</a> | |||
| <a class="{{if eq .SortType "moststars"}}active{{end}} item" href="{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| @@ -144,7 +145,8 @@ | |||
| <div class="ui card" @click="gotoDataset('{{.Repo.Link}}/datasets')" style="cursor: pointer;box-shadow: 0px 4px 4px 0px rgba(232,232,232,0.6);border: 1px solid rgba(232, 232, 232, 1);"> | |||
| <div class="content" style="border-bottom: none;"> | |||
| <div class="repo_dataset_header" style="display: flex;align-items: center;justify-content: space-between;"> | |||
| <a href="{{.Repo.Link}}/datasets" style="font-size: 12px;color: #3291F8;height: 24px;">{{.Repo.OwnerName}} / {{.Repo.Alias}}</a> | |||
| <a href="{{.Repo.Link}}/datasets" style="font-size: 12px;color: #3291F8;height: 24px;">{{.Repo.OwnerName}} / {{.Repo.Alias}}{{if .Recommend}}<img src="/img/jian.svg" style="margin-left: 0.5rem;">{{end}}</a> | |||
| {{if $.IsSigned}} | |||
| <span style="display: flex;align-items: center;justify-content: flex-end;cursor: pointer;" @click.stop="postSquareStar({{.ID}},'{{.Repo.Link}}/datasets',{{$k}})"> | |||
| <div style="line-height: 1;margin-right: 4px;margin-bottom: -2px;"> | |||
| @@ -179,7 +179,26 @@ | |||
| </div> | |||
| </div> | |||
| <input id="store_category" type="hidden" name="get_benchmark_category"> | |||
| <div class="inline required field"> | |||
| <label>{{.i18n.Tr "repo.modelarts.code_version"}}</label> | |||
| <select class="ui dropdown width80 left2" id="code_version" name="branch_name"> | |||
| {{if .branch_name}} | |||
| <option name="branch_name" value="{{.branch_name}}">{{.branch_name}}</option> | |||
| {{range $k, $v :=.Branches}} | |||
| {{ if ne $v $.branch_name }} | |||
| <option name="branch_name" value="{{$v}}">{{$v}}</option> | |||
| {{end}} | |||
| {{end}} | |||
| {{else}} | |||
| <option name="branch_name" value="{{.branchName}}">{{.branchName}}</option> | |||
| {{range $k, $v :=.Branches}} | |||
| {{ if ne $v $.branchName }} | |||
| <option name="branch_name" value="{{$v}}">{{$v}}</option> | |||
| {{end}} | |||
| {{end}} | |||
| {{end}} | |||
| </select> | |||
| </div> | |||
| <div class="inline required field"> | |||
| <label>{{.i18n.Tr "cloudbrain.gpu_type"}}</label> | |||
| <select id="cloudbrain_gpu_type" class="ui search dropdown" placeholder="选择GPU类型" style='width:385px' name="gpu_type"> | |||
| @@ -187,7 +187,7 @@ td, th { | |||
| {{.i18n.Tr "repo.cloudbrain"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||
| {{$.i18n.Tr "repo.modelarts.notebook"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| @@ -281,7 +281,7 @@ | |||
| </span> | |||
| <el-dropdown-menu slot="dropdown"> | |||
| <el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item> | |||
| <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item> | |||
| <!-- <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>--> | |||
| {{if and ($.CanWrite) (eq .DecompressState 1) }} | |||
| <el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item> | |||
| {{end}} | |||
| @@ -193,7 +193,7 @@ td, th { | |||
| {{.i18n.Tr "repo.cloudbrain"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||
| {{$.i18n.Tr "repo.modelarts.notebook"}} | |||
| </a> | |||
| <div class="divider"> / </div> | |||
| @@ -420,7 +420,13 @@ td, th { | |||
| </div> | |||
| </div> | |||
| <div class="ui tab" data-tab="second{{$k}}"> | |||
| <div> | |||
| <div style="position: relative;"> | |||
| <span> | |||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" id="log_top"><i class="icon-to-top"></i></a> | |||
| </span> | |||
| <span> | |||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" id="log_bottom"><i class="icon-to-bottom"></i></a> | |||
| </span> | |||
| <div class="ui message message{{.VersionName}}" style="display: none;"> | |||
| <div id="header"></div> | |||
| </div> | |||
| @@ -879,4 +885,57 @@ td, th { | |||
| }); | |||
| } | |||
| } | |||
| function scrollAnimation(dom, currentY, targetY, currentX) { | |||
| let needScrollTop = targetY - currentY; | |||
| let _currentY = currentY; | |||
| setTimeout(() => { | |||
| // 一次调用滑动帧数,每次调用会不一样 | |||
| //取总距离的十分之一 | |||
| const dist = Math.ceil(needScrollTop / 10); | |||
| _currentY += dist; | |||
| //移动一个十分之一 | |||
| console.log(_currentY, targetY) | |||
| dom.scrollTo(currentX || 0, _currentY); | |||
| // 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果 | |||
| if (needScrollTop > 10 || needScrollTop < -10) { | |||
| scrollAnimation(dom, _currentY, targetY) | |||
| } else { | |||
| dom.scrollTo(0, targetY) | |||
| } | |||
| }, 1) | |||
| } | |||
| $('#log_top').click(function(){ | |||
| let logContentDom = document.querySelector('.log') | |||
| if(!logContentDom) | |||
| return | |||
| scrollAnimation(logContentDom, logContentDom.scrollTop, 0); | |||
| }) | |||
| $('#log_bottom').click(function(){ | |||
| let logContentDom = document.querySelector('.log') | |||
| if(!logContentDom) | |||
| return | |||
| //如果内容撑大到可以滚动,则触发滚动 | |||
| // if(!['10','11','12'].includes(context.taskInfo.statusCode)){ | |||
| // context.getLogContent(0, context.lines, 'goDown') | |||
| // } | |||
| if(logContentDom.scrollHeight > logContentDom.clientHeight){ | |||
| console.log("1111") | |||
| scrollAnimation(logContentDom, logContentDom.scrollTop, logContentDom.scrollHeight - logContentDom.clientHeight); | |||
| } | |||
| else{ | |||
| logScroll(version_name) | |||
| logContentDom.scrollTo(0,logContentDom.scrollTop-1); | |||
| } | |||
| // if(this.checkCurrentCanScrollBottom()){ | |||
| // // this.goDown(); | |||
| // this.scrollAnimation(logContentDom, logContentDom.scrollTop, logContentDom.scrollHeight - logContentDom.clientHeight); | |||
| // }else{ | |||
| // //如果内容不足,则往后追加内容 | |||
| // this.goDown(); | |||
| // logContentDom.scrollTo(0,logContentDom.scrollTop-1); | |||
| // } | |||
| }) | |||
| </script> | |||
| @@ -71,7 +71,7 @@ | |||
| {{ $index := index .GetIssueInfos 0}} | |||
| {{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} | |||
| {{else if eq .GetOpType 24}} | |||
| {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}} | |||
| {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}} | |||
| {{else if eq .GetOpType 25}} | |||
| {{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} | |||
| {{else if eq .GetOpType 26}} | |||
| @@ -27,6 +27,7 @@ import createDropzone from '../features/dropzone.js'; | |||
| const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | |||
| const chunkSize = 1024 * 1024 * 64; | |||
| const md5ChunkSize = 1024 * 1024 * 1; | |||
| export default { | |||
| props:{ | |||
| @@ -190,10 +191,12 @@ export default { | |||
| let currentChunk = 0; | |||
| const time = new Date().getTime(); | |||
| // console.log('计算MD5...') | |||
| this.status = this.dropzoneParams.data('md5-computing'); | |||
| file.totalChunkCounts = chunks; | |||
| loadNext(); | |||
| if (file.size==0) { | |||
| file.totalChunkCounts = 1 | |||
| } | |||
| loadMd5Next(); | |||
| fileReader.onload = (e) => { | |||
| fileLoaded.call(this, e); | |||
| @@ -207,13 +210,12 @@ export default { | |||
| spark.append(e.target.result); // Append array buffer | |||
| currentChunk++; | |||
| if (currentChunk < chunks) { | |||
| // console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); | |||
| this.status = `${this.dropzoneParams.data('loading-file')} ${( | |||
| (currentChunk / chunks) * | |||
| 100 | |||
| ).toFixed(2)}% (${currentChunk}/${chunks})`; | |||
| this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||
| loadNext(); | |||
| loadMd5Next(); | |||
| return; | |||
| } | |||
| @@ -235,6 +237,13 @@ export default { | |||
| start + chunkSize >= file.size ? file.size : start + chunkSize; | |||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||
| } | |||
| function loadMd5Next() { | |||
| const start = currentChunk * chunkSize; | |||
| const end = | |||
| start + md5ChunkSize >= file.size ? file.size : start + md5ChunkSize; | |||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||
| } | |||
| }, | |||
| async computeMD5Success(md5edFile) { | |||
| @@ -1,484 +0,0 @@ | |||
| <template> | |||
| <div class="dropzone-wrapper dataset-files"> | |||
| <div | |||
| id="dataset" | |||
| class="dropzone" | |||
| /> | |||
| <p class="upload-info"> | |||
| {{ file_status_text }} | |||
| <strong class="success text red">{{ status }}</strong> | |||
| </p> | |||
| <p>说明:<br> | |||
| - 只有zip格式的数据集才能发起云脑任务;<br> | |||
| - 云脑1提供 <span class="text blue">CPU / GPU</span> 资源,云脑2提供 <span class="text blue">Ascend NPU</span> 资源;调试使用的数据集也需要上传到对应的环境。 | |||
| </p> | |||
| </div> | |||
| </template> | |||
| <script> | |||
| /* eslint-disable eqeqeq */ | |||
| // import Dropzone from 'dropzone/dist/dropzone.js'; | |||
| // import 'dropzone/dist/dropzone.css' | |||
| import SparkMD5 from 'spark-md5'; | |||
| import axios from 'axios'; | |||
| import qs from 'qs'; | |||
| import createDropzone from '../features/dropzone.js'; | |||
| const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | |||
| const CloudBrainType = 1; | |||
| export default { | |||
| data() { | |||
| return { | |||
| dropzoneUploader: null, | |||
| maxFiles: 1, | |||
| maxFilesize: 1 * 1024 * 1024 * 1024 * 1024, | |||
| acceptedFiles: '*/*', | |||
| progress: 0, | |||
| status: '', | |||
| dropzoneParams: {}, | |||
| file_status_text: '' | |||
| }; | |||
| }, | |||
| async mounted() { | |||
| this.dropzoneParams = $('div#minioUploader-params'); | |||
| this.file_status_text = this.dropzoneParams.data('file-status'); | |||
| this.status = this.dropzoneParams.data('file-init-status'); | |||
| let previewTemplate = ''; | |||
| previewTemplate += '<div class="dz-preview dz-file-preview">\n '; | |||
| previewTemplate += ' <div class="dz-details">\n '; | |||
| previewTemplate += ' <div class="dz-filename">'; | |||
| previewTemplate += | |||
| ' <span data-dz-name data-dz-thumbnail></span>'; | |||
| previewTemplate += ' </div>\n '; | |||
| previewTemplate += ' <div class="dz-size" data-dz-size style="white-space: nowrap"></div>\n '; | |||
| previewTemplate += ' </div>\n '; | |||
| previewTemplate += ' <div class="dz-progress ui active progress">'; | |||
| previewTemplate += | |||
| ' <div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div>\n '; | |||
| previewTemplate += ' </div>\n '; | |||
| previewTemplate += ' <div class="dz-success-mark">'; | |||
| previewTemplate += ' <span>上传成功</span>'; | |||
| previewTemplate += ' </div>\n '; | |||
| previewTemplate += ' <div class="dz-error-mark">'; | |||
| previewTemplate += ' <span>上传失败</span>'; | |||
| previewTemplate += ' </div>\n '; | |||
| previewTemplate += ' <div class="dz-error-message">'; | |||
| previewTemplate += ' <span data-dz-errormessage></span>'; | |||
| previewTemplate += ' </div>\n'; | |||
| previewTemplate += '</div>'; | |||
| const $dropzone = $('div#dataset'); | |||
| console.log('createDropzone'); | |||
| const dropzoneUploader = await createDropzone($dropzone[0], { | |||
| url: '/todouploader', | |||
| maxFiles: this.maxFiles, | |||
| maxFilesize: this.maxFileSize, | |||
| timeout: 0, | |||
| autoQueue: false, | |||
| dictDefaultMessage: this.dropzoneParams.data('default-message'), | |||
| dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'), | |||
| dictFileTooBig: this.dropzoneParams.data('file-too-big'), | |||
| dictRemoveFile: this.dropzoneParams.data('remove-file'), | |||
| previewTemplate | |||
| }); | |||
| dropzoneUploader.on('addedfile', (file) => { | |||
| setTimeout(() => { | |||
| // eslint-disable-next-line no-unused-expressions | |||
| file.accepted && this.onFileAdded(file); | |||
| }, 200); | |||
| }); | |||
| dropzoneUploader.on('maxfilesexceeded', function (file) { | |||
| if (this.files[0].status !== 'success') { | |||
| alert(this.dropzoneParams.data('waitting-uploading')); | |||
| this.removeFile(file); | |||
| return; | |||
| } | |||
| this.removeAllFiles(); | |||
| this.addFile(file); | |||
| }); | |||
| this.dropzoneUploader = dropzoneUploader; | |||
| }, | |||
| methods: { | |||
| resetStatus() { | |||
| this.progress = 0; | |||
| this.status = ''; | |||
| }, | |||
| updateProgress(file, progress) { | |||
| file.previewTemplate.querySelector( | |||
| '.dz-upload' | |||
| ).style.width = `${progress}%`; | |||
| }, | |||
| emitDropzoneSuccess(file) { | |||
| file.status = 'success'; | |||
| this.dropzoneUploader.emit('success', file); | |||
| this.dropzoneUploader.emit('complete', file); | |||
| }, | |||
| emitDropzoneFailed(file) { | |||
| this.status = this.dropzoneParams.data('falied'); | |||
| file.status = 'error'; | |||
| this.dropzoneUploader.emit('error', file); | |||
| // this.dropzoneUploader.emit('complete', file); | |||
| }, | |||
| onFileAdded(file) { | |||
| file.datasetId = document | |||
| .getElementById('datasetId') | |||
| .getAttribute('datasetId'); | |||
| this.resetStatus(); | |||
| this.computeMD5(file); | |||
| }, | |||
| finishUpload(file) { | |||
| this.emitDropzoneSuccess(file); | |||
| setTimeout(() => { | |||
| window.location.reload(); | |||
| }, 1000); | |||
| }, | |||
| computeMD5(file) { | |||
| this.resetStatus(); | |||
| const blobSlice = | |||
| File.prototype.slice || | |||
| File.prototype.mozSlice || | |||
| File.prototype.webkitSlice, | |||
| chunkSize = 1024 * 1024 * 64, | |||
| chunks = Math.ceil(file.size / chunkSize), | |||
| spark = new SparkMD5.ArrayBuffer(), | |||
| fileReader = new FileReader(); | |||
| let currentChunk = 0; | |||
| const time = new Date().getTime(); | |||
| // console.log('计算MD5...') | |||
| this.status = this.dropzoneParams.data('md5-computing'); | |||
| file.totalChunkCounts = chunks; | |||
| loadNext(); | |||
| fileReader.onload = (e) => { | |||
| fileLoaded.call(this, e); | |||
| }; | |||
| fileReader.onerror = (err) => { | |||
| console.warn('oops, something went wrong.', err); | |||
| file.cancel(); | |||
| }; | |||
| function fileLoaded(e) { | |||
| spark.append(e.target.result); // Append array buffer | |||
| currentChunk++; | |||
| if (currentChunk < chunks) { | |||
| // console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); | |||
| this.status = `${this.dropzoneParams.data('loading-file')} ${( | |||
| (currentChunk / chunks) * | |||
| 100 | |||
| ).toFixed(2)}% (${currentChunk}/${chunks})`; | |||
| this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||
| loadNext(); | |||
| return; | |||
| } | |||
| const md5 = spark.end(); | |||
| console.log( | |||
| `MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${ | |||
| file.size | |||
| } 用时:${(new Date().getTime() - time) / 1000} s` | |||
| ); | |||
| spark.destroy(); // 释放缓存 | |||
| file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识 | |||
| file.cmd5 = false; // 取消计算md5状态 | |||
| this.computeMD5Success(file); | |||
| } | |||
| function loadNext() { | |||
| const start = currentChunk * chunkSize; | |||
| const end = | |||
| start + chunkSize >= file.size ? file.size : start + chunkSize; | |||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||
| } | |||
| }, | |||
| async computeMD5Success(md5edFile) { | |||
| const file = await this.getSuccessChunks(md5edFile); | |||
| try { | |||
| if (file.uploadID == '' || file.uuid == '') { | |||
| // 未上传过 | |||
| await this.newMultiUpload(file); | |||
| if (file.uploadID != '' && file.uuid != '') { | |||
| file.chunks = ''; | |||
| this.multipartUpload(file); | |||
| } else { | |||
| // 失败如何处理 | |||
| return; | |||
| } | |||
| return; | |||
| } | |||
| if (file.uploaded == '1') { | |||
| // 已上传成功 | |||
| // 秒传 | |||
| if (file.attachID == '0') { | |||
| // 删除数据集记录,未删除文件 | |||
| await addAttachment(file); | |||
| } | |||
| //不同数据集上传同一个文件 | |||
| if (file.datasetID != '' ) { | |||
| if (file.datasetName != "" && file.realName != "") { | |||
| var info = "该文件已上传,对应数据集(" + file.datasetName + ")-文件(" + file.realName + ")"; | |||
| window.alert(info); | |||
| window.location.reload(); | |||
| } | |||
| } | |||
| console.log('文件已上传完成'); | |||
| this.progress = 100; | |||
| this.status = this.dropzoneParams.data('upload-complete'); | |||
| this.finishUpload(file); | |||
| } else { | |||
| // 断点续传 | |||
| this.multipartUpload(file); | |||
| } | |||
| } catch (error) { | |||
| this.emitDropzoneFailed(file); | |||
| console.log(error); | |||
| } | |||
| async function addAttachment(file) { | |||
| return await axios.post( | |||
| '/attachments/add', | |||
| qs.stringify({ | |||
| uuid: file.uuid, | |||
| file_name: file.name, | |||
| size: file.size, | |||
| dataset_id: file.datasetId, | |||
| type: CloudBrainType, | |||
| _csrf: csrf, | |||
| }) | |||
| ); | |||
| } | |||
| }, | |||
| async getSuccessChunks(file) { | |||
| const params = { | |||
| params: { | |||
| md5: file.uniqueIdentifier, | |||
| type: CloudBrainType, | |||
| file_name: file.name, | |||
| _csrf: csrf | |||
| } | |||
| }; | |||
| try { | |||
| const response = await axios.get('/attachments/get_chunks', params); | |||
| file.uploadID = response.data.uploadID; | |||
| file.uuid = response.data.uuid; | |||
| file.uploaded = response.data.uploaded; | |||
| file.chunks = response.data.chunks; | |||
| file.attachID = response.data.attachID; | |||
| file.datasetID = response.data.datasetID; | |||
| file.datasetName = response.data.datasetName; | |||
| file.realName = response.data.fileName; | |||
| return file; | |||
| } catch (error) { | |||
| this.emitDropzoneFailed(file); | |||
| console.log('getSuccessChunks catch: ', error); | |||
| return null; | |||
| } | |||
| }, | |||
| async newMultiUpload(file) { | |||
| const res = await axios.get('/attachments/new_multipart', { | |||
| params: { | |||
| totalChunkCounts: file.totalChunkCounts, | |||
| md5: file.uniqueIdentifier, | |||
| size: file.size, | |||
| fileType: file.type, | |||
| type: CloudBrainType, | |||
| file_name: file.name, | |||
| _csrf: csrf | |||
| } | |||
| }); | |||
| file.uploadID = res.data.uploadID; | |||
| file.uuid = res.data.uuid; | |||
| }, | |||
| multipartUpload(file) { | |||
| const blobSlice = | |||
| File.prototype.slice || | |||
| File.prototype.mozSlice || | |||
| File.prototype.webkitSlice, | |||
| chunkSize = 1024 * 1024 * 64, | |||
| chunks = Math.ceil(file.size / chunkSize), | |||
| fileReader = new FileReader(), | |||
| time = new Date().getTime(); | |||
| let currentChunk = 0; | |||
| function loadNext() { | |||
| const start = currentChunk * chunkSize; | |||
| const end = | |||
| start + chunkSize >= file.size ? file.size : start + chunkSize; | |||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||
| } | |||
| function checkSuccessChunks() { | |||
| const index = successChunks.indexOf((currentChunk + 1).toString()); | |||
| if (index == -1) { | |||
| return false; | |||
| } | |||
| return true; | |||
| } | |||
| async function getUploadChunkUrl(currentChunk, partSize) { | |||
| const res = await axios.get('/attachments/get_multipart_url', { | |||
| params: { | |||
| uuid: file.uuid, | |||
| uploadID: file.uploadID, | |||
| size: partSize, | |||
| chunkNumber: currentChunk + 1, | |||
| type: CloudBrainType, | |||
| file_name: file.name, | |||
| _csrf: csrf | |||
| } | |||
| }); | |||
| urls[currentChunk] = res.data.url; | |||
| } | |||
| async function uploadMinio(url, e) { | |||
| let urls = []; | |||
| const res = await axios.put(url, e.target.result, { | |||
| headers: { | |||
| 'Content-Type': '' | |||
| }}); | |||
| etags[currentChunk] = res.headers.etag; | |||
| } | |||
| async function uploadMinioNewMethod(url,e){ | |||
| var xhr = new XMLHttpRequest(); | |||
| xhr.open('PUT', url, false); | |||
| xhr.setRequestHeader('Content-Type', '') | |||
| xhr.send(e.target.result); | |||
| var etagValue = xhr.getResponseHeader('ETag'); | |||
| //console.log(etagValue); | |||
| etags[currentChunk] = etagValue; | |||
| } | |||
| async function updateChunk(currentChunk) { | |||
| await axios.post( | |||
| '/attachments/update_chunk', | |||
| qs.stringify({ | |||
| uuid: file.uuid, | |||
| chunkNumber: currentChunk + 1, | |||
| etag: etags[currentChunk], | |||
| type: CloudBrainType, | |||
| _csrf: csrf | |||
| }) | |||
| ); | |||
| } | |||
| async function uploadChunk(e) { | |||
| try { | |||
| if (!checkSuccessChunks()) { | |||
| const start = currentChunk * chunkSize; | |||
| const partSize = | |||
| start + chunkSize >= file.size ? file.size - start : chunkSize; | |||
| // 获取分片上传url | |||
| await getUploadChunkUrl(currentChunk, partSize); | |||
| if (urls[currentChunk] != '') { | |||
| // 上传到minio | |||
| await uploadMinioNewMethod(urls[currentChunk], e); | |||
| if (etags[currentChunk] != '') { | |||
| // 更新数据库:分片上传结果 | |||
| //await updateChunk(currentChunk); | |||
| } else { | |||
| console.log("上传到minio uploadChunk etags[currentChunk] == ''");// TODO | |||
| } | |||
| } else { | |||
| console.log("uploadChunk urls[currentChunk] != ''");// TODO | |||
| } | |||
| } | |||
| } catch (error) { | |||
| this.emitDropzoneFailed(file); | |||
| console.log(error); | |||
| } | |||
| } | |||
| async function completeUpload() { | |||
| return await axios.post( | |||
| '/attachments/complete_multipart', | |||
| qs.stringify({ | |||
| uuid: file.uuid, | |||
| uploadID: file.uploadID, | |||
| file_name: file.name, | |||
| size: file.size, | |||
| dataset_id: file.datasetId, | |||
| type: CloudBrainType, | |||
| _csrf: csrf | |||
| }) | |||
| ); | |||
| } | |||
| const successChunks = []; | |||
| let successParts = []; | |||
| successParts = file.chunks.split(','); | |||
| for (let i = 0; i < successParts.length; i++) { | |||
| successChunks[i] = successParts[i].split('-')[0]; | |||
| } | |||
| const urls = []; // TODO const ? | |||
| const etags = []; | |||
| console.log('上传分片...'); | |||
| this.status = this.dropzoneParams.data('uploading'); | |||
| loadNext(); | |||
| fileReader.onload = async (e) => { | |||
| await uploadChunk(e); | |||
| fileReader.abort(); | |||
| currentChunk++; | |||
| if (currentChunk < chunks) { | |||
| console.log( | |||
| `第${currentChunk}个分片上传完成, 开始第${currentChunk + | |||
| 1}/${chunks}个分片上传` | |||
| ); | |||
| this.progress = Math.ceil((currentChunk / chunks) * 100); | |||
| this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||
| this.status = `${this.dropzoneParams.data('uploading')} ${( | |||
| (currentChunk / chunks) * | |||
| 100 | |||
| ).toFixed(2)}%`; | |||
| await loadNext(); | |||
| } else { | |||
| await completeUpload(); | |||
| console.log( | |||
| `文件上传完成:${file.name} \n分片:${chunks} 大小:${ | |||
| file.size | |||
| } 用时:${(new Date().getTime() - time) / 1000} s` | |||
| ); | |||
| this.progress = 100; | |||
| this.status = this.dropzoneParams.data('upload-complete'); | |||
| this.finishUpload(file); | |||
| } | |||
| }; | |||
| } | |||
| } | |||
| }; | |||
| </script> | |||
| <style> | |||
| .dropzone-wrapper { | |||
| margin: 0; | |||
| } | |||
| .ui .dropzone { | |||
| border: 2px dashed #0087f5; | |||
| box-shadow: none !important; | |||
| padding: 0; | |||
| min-height: 5rem; | |||
| border-radius: 4px; | |||
| } | |||
| .dataset .dataset-files #dataset .dz-preview.dz-file-preview, | |||
| .dataset .dataset-files #dataset .dz-preview.dz-processing { | |||
| display: flex; | |||
| align-items: center; | |||
| } | |||
| .dataset .dataset-files #dataset .dz-preview { | |||
| border-bottom: 1px solid #dadce0; | |||
| min-height: 0; | |||
| } | |||
| .upload-info{ | |||
| margin-top: 0.2em; | |||
| } | |||
| </style> | |||
| @@ -62,11 +62,6 @@ | |||
| <a :href="AppSubUrl +'../../../'+ scope.row.Name">{{scope.row.Name}} </a> | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CodeMergeCount" | |||
| label="PR数" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="UserIndex" | |||
| label="用户指数" | |||
| @@ -76,6 +71,11 @@ | |||
| {{scope.row.UserIndex | rounding}} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CodeMergeCount" | |||
| label="PR数" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="CommitCount" | |||
| label="commit数" | |||
| @@ -1,6 +1,7 @@ | |||
| <template> | |||
| <div> | |||
| <div > | |||
| <div class="ui container" style="width: 80%;"> | |||
| <div class="ui grid"> | |||
| <div class="row" style="border: 1px solid #d4d4d5;margin-top: 15px;padding-top: 0;"> | |||
| <div class="ui attached segment"> | |||
| @@ -11,12 +12,10 @@ | |||
| </div> | |||
| </div> | |||
| </div> | |||
| <el-row style="padding: 1rem;"> | |||
| <el-col :span="2" style="margin-right: 1rem;"> | |||
| <el-checkbox v-model="checked" style="padding: 0.5rem 1rem;">仅显示平台推荐</el-checkbox> | |||
| </el-col> | |||
| <el-col :span="6"> | |||
| <el-dropdown @command="handleCommand" trigger="click" style="border: 1px solid rgba(34,36,38,.15);border-radius: 4px;padding: 0.5rem 1rem;"> | |||
| <div class="ui ten wide column" style="margin: 1rem 0;"> | |||
| <el-checkbox v-model="checked" style="padding: 0.5rem 1rem;">仅显示平台推荐</el-checkbox> | |||
| <el-dropdown @command="handleCommand" trigger="click" style="border: 1px solid rgba(34,36,38,.15);border-radius: 4px;padding: 0.5rem 1rem;"> | |||
| <span class="el-dropdown-link"> | |||
| {{dropdownPrivate}}<i class="el-icon-caret-bottom el-icon--right"></i> | |||
| </span> | |||
| @@ -25,10 +24,12 @@ | |||
| <el-dropdown-item :command="{label:'公开',private:false}">公开</el-dropdown-item> | |||
| <el-dropdown-item :command="{label:'公开',private:true}">私有</el-dropdown-item> | |||
| </el-dropdown-menu> | |||
| </el-dropdown> | |||
| </el-col> | |||
| </el-row> | |||
| <el-row> | |||
| </el-dropdown> | |||
| </div> | |||
| <div class="ui six wide column right aligned" style="margin: 1rem 0;"> | |||
| <a class="ui blue small button" href="/admin/images/commit_image?from=imageAdmin">创建云脑镜像</a> | |||
| </div> | |||
| <div class="ui sixteen wide column" style="padding: 0;"> | |||
| <el-table | |||
| :data="tableDataCustom" | |||
| style="width: 100%" | |||
| @@ -138,7 +139,7 @@ | |||
| </template> | |||
| </el-table-column> | |||
| </el-table> | |||
| </el-row> | |||
| </div> | |||
| <div class="ui container" style="padding:2rem 0;text-align:center"> | |||
| <el-pagination | |||
| background | |||
| @@ -152,6 +153,7 @@ | |||
| </el-pagination> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| @@ -26,6 +26,15 @@ export default async function initImage(){ | |||
| } | |||
| ] | |||
| }, | |||
| place:{ | |||
| identifier : 'place', | |||
| rules: [ | |||
| { | |||
| type: 'empty', | |||
| } | |||
| ] | |||
| }, | |||
| } | |||
| }) | |||
| } | |||
| @@ -75,8 +84,9 @@ export default async function initImage(){ | |||
| type:'POST', | |||
| data:formData, | |||
| success:function(res){ | |||
| console.log(res) | |||
| if(res.Code===1){ | |||
| $('.ui.info.message').text(res.Message).show().delay(1500).fadeOut(); | |||
| $('.ui.negative.message').text(res.Message).show().delay(2500).fadeOut(); | |||
| }else if(res.Code==0){ | |||
| if(location.href.indexOf('imageAdmin')!==-1){ | |||
| location.href = `${window.config.AppSubUrl}/admin/images` | |||
| @@ -105,6 +115,11 @@ export default async function initImage(){ | |||
| $("textarea[name='description']").parent().addClass('error') | |||
| return false | |||
| } | |||
| if($("input[name='place']").length>0&&!$("input[name='place']").val()){ | |||
| console.log("1111111",$("input[name='place']")) | |||
| $("input[name='place']").parent().addClass('error') | |||
| return false | |||
| } | |||
| const postData = { | |||
| _csrf:$("input[name='_csrf']").val(), | |||
| @@ -115,6 +130,10 @@ export default async function initImage(){ | |||
| topics:$("input[name='topics']").val(), | |||
| id:$("input[name='id']").val() | |||
| } | |||
| if($("input[name='place']").val()&&$("input[name='isRecommend']:checked").val()){ | |||
| postData.isRecommend = $("input[name='isRecommend']:checked").val() | |||
| postData.place = $("input[name='place']").val() | |||
| } | |||
| let formData = $params(postData) | |||
| if($("input[name='edit']").val()=="edit"){ | |||
| postImage(formData) | |||
| @@ -143,7 +162,7 @@ export default async function initImage(){ | |||
| } | |||
| }) | |||
| } | |||
| return false | |||
| }) | |||
| $('#cancel_submit_image').click(()=>{ | |||
| if(link.includes('cloudbrain')){ | |||
| @@ -34,7 +34,6 @@ import { | |||
| } from './features/notification.js'; | |||
| import {createCodeEditor} from './features/codeeditor.js'; | |||
| import MinioUploader from './components/MinioUploader.vue'; | |||
| import ObsUploader from './components/ObsUploader.vue'; | |||
| import EditAboutInfo from './components/EditAboutInfo.vue'; | |||
| // import Images from './components/Images.vue'; | |||
| import EditTopics from './components/EditTopics.vue'; | |||
| @@ -2960,7 +2959,6 @@ $(document).ready(async () => { | |||
| initCodeView(); | |||
| initVueApp(); | |||
| initVueUploader(); | |||
| initObsUploader(); | |||
| initVueDataset(); | |||
| initVueEditAbout(); | |||
| initVueEditTopic(); | |||
| @@ -3704,6 +3702,32 @@ function initVueEditAbout() { | |||
| } | |||
| function initVueDataset() { | |||
| $('.set_dataset').on('click', function(){ | |||
| const $this = $(this); | |||
| let link = $this.data('url') | |||
| $.ajax({ | |||
| url:link, | |||
| type:'PUT', | |||
| success:function(res){ | |||
| console.log(res) | |||
| if(res.Code==0){ | |||
| window.location.href = '/admin/datasets' | |||
| }else{ | |||
| $('.ui.negative.message').text(res.Message).show().delay(1500).fadeOut(); | |||
| } | |||
| }, | |||
| error: function(xhr){ | |||
| // 隐藏 loading | |||
| // 只有请求不正常(状态码不为200)才会执行 | |||
| $('.ui.negative.message').html(xhr.responseText).show().delay(1500).fadeOut(); | |||
| console.log(xhr) | |||
| }, | |||
| complete:function(xhr){ | |||
| // $("#mask").css({"display":"none","z-index":"1"}) | |||
| } | |||
| }) | |||
| }); | |||
| const el = document.getElementById('dataset-base'); | |||
| if (!el) { | |||
| return; | |||
| @@ -3856,8 +3880,7 @@ function initVueDataset() { | |||
| }, | |||
| }, | |||
| components: { | |||
| MinioUploader, | |||
| ObsUploader | |||
| MinioUploader | |||
| }, | |||
| mounted(){ | |||
| // if(document.getElementById('postPath')){ | |||
| @@ -4382,19 +4405,6 @@ function initVueDataAnalysis() { | |||
| render: h => h(DataAnalysis) | |||
| }); | |||
| } | |||
| // 新增 | |||
| function initObsUploader() { | |||
| const el = document.getElementById('obsUploader'); | |||
| if (!el) { | |||
| return; | |||
| } | |||
| new Vue({ | |||
| el: '#obsUploader', | |||
| components: {ObsUploader}, | |||
| template: '<ObsUploader />' | |||
| }); | |||
| } | |||
| function initVueWxAutorize() { | |||
| const el = document.getElementById('WxAutorize'); | |||
| if (!el) { | |||