diff --git a/README.md b/README.md index 061ece70c..99f6a6e8c 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,7 @@ ## 授权许可 本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://git.openi.org.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。 + ## 需要帮助? 如果您在使用或者开发过程中遇到问题,可以在以下渠道咨询: - 点击[这里](https://git.openi.org.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**) @@ -49,3 +50,8 @@ ## 启智社区小白训练营: - 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://git.openi.org.cn/zeizei/OpenI_Learning) + +## 平台引用 +如果本平台对您的科研工作提供了帮助,可在论文致谢中加入: +英文版:```Thanks for the support provided by OpenI Community (https://git.openi.org.cn).``` +中文版:```感谢启智社区提供的技术支持(https://git.openi.org.cn)。``` \ No newline at end of file diff --git a/models/attachment.go b/models/attachment.go index c322d391b..a3fc6fa01 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -9,6 +9,7 @@ import ( "fmt" "io" "path" + "strings" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/obs" @@ -18,6 +19,7 @@ import ( "code.gitea.io/gitea/modules/timeutil" gouuid "github.com/satori/go.uuid" + "xorm.io/builder" "xorm.io/xorm" ) @@ -38,6 +40,7 @@ type Attachment struct { UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added CommentID int64 Name string + Description string `xorm:"TEXT"` DownloadCount int64 `xorm:"DEFAULT 0"` Size int64 `xorm:"DEFAULT 0"` IsPrivate bool `xorm:"DEFAULT false"` @@ -47,6 +50,7 @@ type Attachment struct { FileChunk *FileChunk `xorm:"-"` CanDel bool `xorm:"-"` + Uploader *User `xorm:"-"` } type AttachmentUsername struct { @@ -54,6 +58,27 @@ type AttachmentUsername struct { Name string } +type AttachmentInfo struct { + Attachment `xorm:"extends"` + Repo *Repository `xorm:"extends"` + RelAvatarLink string `xorm:"extends"` + UserName string `xorm:"extends"` +} + +type AttachmentsOptions struct { + ListOptions + DatasetIDs []int64 + DecompressState int + Type int + UploaderID int64 + NeedDatasetIDs bool + NeedIsPrivate bool + IsPrivate bool + JustNeedZipFile bool + NeedRepoInfo bool + Keyword string +} + func (a *Attachment) AfterUpdate() { if a.DatasetID > 0 { datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment)) @@ -326,6 +351,18 @@ func DeleteAttachmentsByComment(commentID int64, remove bool) (int, error) { func UpdateAttachment(atta *Attachment) error { return updateAttachment(x, atta) } +func UpdateAttachmentDescription(atta *Attachment) error { + return updateAttachmentDescription(x, atta) +} + +func updateAttachmentDescription(e Engine, atta *Attachment) error { + var sess *xorm.Session + + sess = e.ID(atta.ID) + + _, err := sess.Cols("description").Update(atta) + return err +} func updateAttachment(e Engine, atta *Attachment) error { var sess *xorm.Session @@ -503,3 +540,98 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) { func GetAllAttachmentSize() (int64, error) { return x.SumInt(&Attachment{}, "size") } + +func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { + sess := x.NewSession() + defer sess.Close() + + var cond = builder.NewCond() + if opts.NeedDatasetIDs { + cond = cond.And( + builder.In("attachment.dataset_id", opts.DatasetIDs), + ) + } + + if opts.UploaderID > 0 { + cond = cond.And( + builder.Eq{"attachment.uploader_id": opts.UploaderID}, + ) + } + + if (opts.Type) >= 0 { + cond = cond.And( + builder.Eq{"attachment.type": opts.Type}, + ) + } + + if opts.NeedIsPrivate { + cond = cond.And( + builder.Eq{"attachment.is_private": opts.IsPrivate}, + ) + } + + if opts.JustNeedZipFile { + var DecompressState []int32 + DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed) + cond = cond.And( + builder.In("attachment.decompress_state", DecompressState), + ) + } + + var count int64 + var err error + if len(opts.Keyword) == 0 { + count, err = sess.Where(cond).Count(new(Attachment)) + } else { + lowerKeyWord := strings.ToLower(opts.Keyword) + + cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord})) + count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo)) + + } + + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + if opts.Page >= 0 && opts.PageSize > 0 { + var start int + if opts.Page == 0 { + start = 0 + } else { + start = (opts.Page - 1) * opts.PageSize + } + sess.Limit(opts.PageSize, start) + } + + sess.OrderBy("attachment.created_unix DESC") + attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum) + if err := sess.Table(&Attachment{}).Where(cond). + Find(&attachments); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + + if opts.NeedRepoInfo { + for _, attachment := range attachments { + dataset, err := GetDatasetByID(attachment.DatasetID) + if err != nil { + return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) + } + repo, err := GetRepositoryByID(dataset.RepoID) + if err == nil { + attachment.Repo = repo + } else { + return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err) + } + user, err := GetUserByID(attachment.UploaderID) + if err == nil { + attachment.RelAvatarLink = user.RelAvatarLink() + attachment.UserName = user.Name + } else { + return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err) + } + } + } + + return attachments, count, nil +} diff --git a/models/base_message.go b/models/base_message.go new file mode 100644 index 000000000..37f7668ad --- /dev/null +++ b/models/base_message.go @@ -0,0 +1,16 @@ +package models + +type BaseMessage struct { + Code int + Message string +} + +var BaseOKMessage = BaseMessage{ + 0, "", +} + +func BaseErrorMessage(message string) BaseMessage { + return BaseMessage{ + 1, message, + } +} diff --git a/models/dataset.go b/models/dataset.go index 2b3de752b..af47c53fe 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -22,6 +22,7 @@ type Dataset struct { Category string Description string `xorm:"TEXT"` DownloadTimes int64 + NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` License string Task string ReleaseID int64 `xorm:"INDEX"` @@ -35,6 +36,11 @@ type Dataset struct { Attachments []*Attachment `xorm:"-"` } +type DatasetWithStar struct { + Dataset + IsStaring bool +} + func (d *Dataset) IsPrivate() bool { switch d.Status { case DatasetStatusPrivate: @@ -91,33 +97,37 @@ type SearchDatasetOptions struct { OwnerID int64 RepoID int64 IncludePublic bool + Category string + Task string + License string ListOptions SearchOrderBy IsOwner bool } func CreateDataset(dataset *Dataset) (err error) { - if _, err = x.Insert(dataset); err != nil { + + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { return err } - return nil -} - -func CreateDefaultDatasetToRepo(repo *Repository) (err error) { - dataset := &Dataset{RepoID: repo.ID} - has, err := x.Get(dataset) + datasetByRepoId := &Dataset{RepoID: dataset.RepoID} + has, err := sess.Get(datasetByRepoId) if err != nil { return err } - if !has { - dataset.Status = DatasetStatusPrivate - dataset.Title = repo.Name - if err = CreateDataset(dataset); err != nil { - return err - } + if has { + return fmt.Errorf("The dataset already exists.") } - return nil + + if _, err = sess.Insert(dataset); err != nil { + return err + } + return sess.Commit() + } func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) { @@ -130,7 +140,18 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { cond = cond.And(builder.Neq{"dataset.status": DatasetStatusDeleted}) if len(opts.Keyword) > 0 { - cond = cond.And(builder.Like{"dataset.title", opts.Keyword}) + cond = cond.And(builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword})) + } + + if len(opts.Category) > 0 { + cond = cond.And(builder.Eq{"dataset.category": opts.Category}) + } + + if len(opts.Task) > 0 { + cond = cond.And(builder.Eq{"dataset.task": opts.Task}) + } + if len(opts.License) > 0 { + cond = cond.And(builder.Eq{"dataset.license": opts.License}) } if opts.RepoID > 0 { @@ -139,12 +160,13 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) + cond = cond.And(builder.Eq{"attachment.is_private": false}) if opts.OwnerID > 0 { if len(opts.Keyword) == 0 { cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID}) } else { subCon := builder.NewCond() - subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Like{"dataset.title", opts.Keyword}) + subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword})) cond = cond.Or(subCon) } @@ -153,6 +175,7 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID}) if !opts.IsOwner { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) + cond = cond.And(builder.Eq{"attachment.is_private": false}) } } @@ -169,14 +192,20 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da defer sess.Close() datasets := make(DatasetList, 0, opts.PageSize) + selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars" - count, err := sess.Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).Count(new(Dataset)) + count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). + Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). + Where(cond).Count(new(Dataset)) if err != nil { return nil, 0, fmt.Errorf("Count: %v", err) } - sess.Select("dataset.*").Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).OrderBy(opts.SearchOrderBy.String()) + sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id"). + Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). + Where(cond).OrderBy(opts.SearchOrderBy.String()) + if opts.PageSize > 0 { sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) } @@ -231,13 +260,23 @@ func getDatasetAttachments(e Engine, typeCloudBrain int, isSigned bool, user *Us sort.Sort(sortedRels) // Select attachments - err = e. - Asc("dataset_id"). - In("dataset_id", sortedRels.ID). - And("type = ?", typeCloudBrain). - Find(&attachments, Attachment{}) - if err != nil { - return err + if typeCloudBrain == -1 { + err = e. + Asc("dataset_id"). + In("dataset_id", sortedRels.ID). + Find(&attachments, Attachment{}) + if err != nil { + return err + } + } else { + err = e. + Asc("dataset_id"). + In("dataset_id", sortedRels.ID). + And("type = ?", typeCloudBrain). + Find(&attachments, Attachment{}) + if err != nil { + return err + } } // merge join @@ -301,9 +340,6 @@ func GetDatasetByID(id int64) (*Dataset, error) { } func GetDatasetByRepo(repo *Repository) (*Dataset, error) { - if err := CreateDefaultDatasetToRepo(repo); err != nil { - return nil, err - } dataset := &Dataset{RepoID: repo.ID} has, err := x.Get(dataset) if err != nil { @@ -316,6 +352,12 @@ func GetDatasetByRepo(repo *Repository) (*Dataset, error) { } } +func GetDatasetStarByUser(user *User) ([]*DatasetStar, error) { + datasetStars := make([]*DatasetStar, 0) + err := x.Cols("id", "uid", "dataset_id", "created_unix").Where("uid=?", user.ID).Find(&datasetStars) + return datasetStars, err +} + func DeleteDataset(datasetID int64, uid int64) error { var err error sess := x.NewSession() diff --git a/models/dataset_star.go b/models/dataset_star.go new file mode 100644 index 000000000..4b22c2855 --- /dev/null +++ b/models/dataset_star.go @@ -0,0 +1,70 @@ +package models + +import "code.gitea.io/gitea/modules/timeutil" + +type DatasetStar struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"UNIQUE(s)"` + DatasetID int64 `xorm:"UNIQUE(s)"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +// StarRepo or unstar repository. +func StarDataset(userID, datasetID int64, star bool) error { + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { + return err + } + + if star { + if isDatasetStaring(sess, userID, datasetID) { + return nil + } + + if _, err := sess.Insert(&DatasetStar{UID: userID, DatasetID: datasetID}); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars + 1 WHERE id = ?", datasetID); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars + 1 WHERE id = ?", userID); err != nil { + return err + } + } else { + if !isDatasetStaring(sess, userID, datasetID) { + return nil + } + + if _, err := sess.Delete(&DatasetStar{0, userID, datasetID, 0}); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars - 1 WHERE id = ?", datasetID); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars - 1 WHERE id = ?", userID); err != nil { + return err + } + } + + return sess.Commit() +} + +func IsDatasetStaringByRepoId(userID, repoID int64) bool { + dataset, _ := GetDatasetByRepo(&Repository{ID: repoID}) + if dataset == nil { + return false + } + return isDatasetStaring(x, userID, dataset.ID) +} + +func IsDatasetStaring(userID, datasetID int64) bool { + return isDatasetStaring(x, userID, datasetID) + +} + +func isDatasetStaring(e Engine, userID, datasetID int64) bool { + has, _ := e.Get(&DatasetStar{0, userID, datasetID, 0}) + return has +} diff --git a/models/models.go b/models/models.go index 0f4679b4f..36527f78d 100755 --- a/models/models.go +++ b/models/models.go @@ -129,6 +129,7 @@ func init() { new(LanguageStat), new(EmailHash), new(Dataset), + new(DatasetStar), new(Cloudbrain), new(FileChunk), new(BlockChain), diff --git a/models/repo.go b/models/repo.go index 2d1fdacfb..42e350fbe 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1280,10 +1280,6 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, opts ...Cr return fmt.Errorf("copyDefaultWebhooksToRepo: %v", err) } - if err = CreateDefaultDatasetToRepo(repo); err != nil { - return fmt.Errorf("models.CreateDefaultDatasetToRepo: %v", err) - } - return nil } @@ -1601,6 +1597,34 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e if err != nil { return err } + //If repo has become private, we need set dataset and dataset_file to private + _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ + Status: 0, + }) + if err != nil { + return err + } + + dataset, err := GetDatasetByRepo(repo) + if err != nil { + return err + } + _, err = e.Where("dataset_id = ?", dataset.ID).Cols("is_private").Update(&Attachment{ + IsPrivate: true, + }) + if err != nil { + return err + } + + } else { + //If repo has become public, we need set dataset to public + _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ + Status: 1, + }) + if err != nil { + return err + } + } // Create/Remove git-daemon-export-ok for git-daemon... @@ -2691,7 +2715,7 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi log.Error("ReadLatestFileInRepo error when OpenRepository,error=%v", err) return nil, err } - commitID, err := gitRepo.GetBranchCommitID(refName) + _, err = gitRepo.GetBranchCommitID(refName) if err != nil { log.Error("ReadLatestFileInRepo error when GetBranchCommitID,error=%v", err) return nil, err @@ -2723,5 +2747,9 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi if n >= 0 { buf = buf[:n] } - return &RepoFile{CommitId: commitID, Content: buf}, nil + commitId := "" + if blob != nil { + commitId = fmt.Sprint(blob.ID) + } + return &RepoFile{CommitId: commitId, Content: buf}, nil } diff --git a/models/user.go b/models/user.go index f7857248b..f72462051 100755 --- a/models/user.go +++ b/models/user.go @@ -153,10 +153,11 @@ type User struct { UseCustomAvatar bool // Counters - NumFollowers int - NumFollowing int `xorm:"NOT NULL DEFAULT 0"` - NumStars int - NumRepos int + NumFollowers int + NumFollowing int `xorm:"NOT NULL DEFAULT 0"` + NumStars int + NumDatasetStars int `xorm:"NOT NULL DEFAULT 0"` + NumRepos int // For organization NumTeams int diff --git a/modules/auth/dataset.go b/modules/auth/dataset.go index 577637273..71b5ac938 100755 --- a/modules/auth/dataset.go +++ b/modules/auth/dataset.go @@ -9,11 +9,10 @@ import ( type CreateDatasetForm struct { Title string `binding:"Required"` Category string `binding:"Required"` - Description string `binding:"Required;MaxSize(254)"` + Description string `binding:"Required"` License string `binding:"Required;MaxSize(64)"` Task string `binding:"Required;MaxSize(64)"` ReleaseID int64 `xorm:"INDEX"` - Private bool Files []string } @@ -25,11 +24,23 @@ type EditDatasetForm struct { ID int64 `binding:"Required"` Title string `binding:"Required"` Category string `binding:"Required"` - Description string `binding:"Required;MaxSize(254)"` + Description string `binding:"Required"` License string `binding:"Required;MaxSize(64)"` Task string `binding:"Required;MaxSize(64)"` - Private bool - ReleaseID int64 `xorm:"INDEX"` + ReleaseID int64 `xorm:"INDEX"` Files []string - Type string `binding:"Required"` + Type string `binding:"Required"` +} + +func (f *EditDatasetForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) +} + +type EditAttachmentForm struct { + ID int64 `binding:"Required"` + Description string +} + +func (f *EditAttachmentForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) } diff --git a/modules/context/repo.go b/modules/context/repo.go index 64f02c921..7c425c8c0 100755 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -475,6 +475,8 @@ func RepoAssignment() macaron.Handler { if ctx.IsSigned { ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID) ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID) + + ctx.Data["IsStaringDataset"] = models.IsDatasetStaringByRepoId(ctx.User.ID, repo.ID) } if repo.IsFork { diff --git a/modules/dataset/dataset.go b/modules/dataset/dataset.go new file mode 100644 index 000000000..a180af184 --- /dev/null +++ b/modules/dataset/dataset.go @@ -0,0 +1,17 @@ +package dataset + +func GetResourceType(cloudbrainType int) string { + if cloudbrainType == 0 { + return "CPU/GPU" + } else { + return "NPU" + } +} + +func GetStatusText(isPrivate bool) string { + if isPrivate { + return "dataset.private" + } else { + return "dataset.public" + } +} diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 7ae2263f7..3fc66c426 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -165,6 +165,7 @@ var ( ExplorePagingNum int ContributorPagingNum int IssuePagingNum int + DatasetPagingNum int RepoSearchPagingNum int MembersPagingNum int FeedMaxCommitNum int @@ -207,6 +208,7 @@ var ( ExplorePagingNum: 20, ContributorPagingNum: 50, IssuePagingNum: 10, + DatasetPagingNum: 5, RepoSearchPagingNum: 10, MembersPagingNum: 20, FeedMaxCommitNum: 5, diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 3d31b611c..77c6fca8d 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -23,6 +23,8 @@ import ( "time" "unicode" + "code.gitea.io/gitea/modules/dataset" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/emoji" @@ -86,20 +88,22 @@ func NewFuncMap() []template.FuncMap { "AllowedReactions": func() []string { return setting.UI.Reactions }, - "AvatarLink": models.AvatarLink, - "Safe": Safe, - "SafeJS": SafeJS, - "Str2html": Str2html, - "TimeSince": timeutil.TimeSince, - "TimeSinceUnix": timeutil.TimeSinceUnix, - "TimeSinceUnix1": timeutil.TimeSinceUnix1, - "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, - "RawTimeSince": timeutil.RawTimeSince, - "FileSize": base.FileSize, - "PrettyNumber": base.PrettyNumber, - "Subtract": base.Subtract, - "EntryIcon": base.EntryIcon, - "MigrationIcon": MigrationIcon, + "AvatarLink": models.AvatarLink, + "Safe": Safe, + "SafeJS": SafeJS, + "Str2html": Str2html, + "TimeSince": timeutil.TimeSince, + "TimeSinceUnix": timeutil.TimeSinceUnix, + "TimeSinceUnix1": timeutil.TimeSinceUnix1, + "AttachmentResourceType": dataset.GetResourceType, + "AttachmentStatus": dataset.GetStatusText, + "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, + "RawTimeSince": timeutil.RawTimeSince, + "FileSize": base.FileSize, + "PrettyNumber": base.PrettyNumber, + "Subtract": base.Subtract, + "EntryIcon": base.EntryIcon, + "MigrationIcon": MigrationIcon, "Add": func(a, b int) int { return a + b }, @@ -340,11 +344,13 @@ func NewTextFuncMap() []texttmpl.FuncMap { "AppDomain": func() string { return setting.Domain }, - "TimeSince": timeutil.TimeSince, - "TimeSinceUnix": timeutil.TimeSinceUnix, - "TimeSinceUnix1": timeutil.TimeSinceUnix1, - "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, - "RawTimeSince": timeutil.RawTimeSince, + "TimeSince": timeutil.TimeSince, + "TimeSinceUnix": timeutil.TimeSinceUnix, + "TimeSinceUnix1": timeutil.TimeSinceUnix1, + "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, + "RawTimeSince": timeutil.RawTimeSince, + "AttachmentResourceType": dataset.GetResourceType, + "AttachmentStatus": dataset.GetStatusText, "DateFmtLong": func(t time.Time) string { return t.Format(time.RFC1123Z) }, @@ -746,5 +752,5 @@ func licenses() []string { // Dataset tasks func tasks() []string { - return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_recognition", "speech_synthesis"} + return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"} } diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 34f8ec3c7..f36cac3c4 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -723,8 +723,13 @@ alert = To initiate a cloud brain task, please upload the dataset in zip format. dataset = Dataset dataset_setting= Dataset Setting title = Name +title_format_err=Name can only contain number,letter,'-','_' or '.', and can be up to 100 characters long. description = Description +description_format_err=Description's length can be up to 1024 characters long. create_dataset = Create Dataset +create_dataset_fail=Failed to create dataset. +query_dataset_fail=Failed to query dataset. +edit_attachment_fail=Failed to update description. show_dataset= Dataset edit_dataset= Edit Dataset update_dataset= Update Dataset @@ -743,7 +748,8 @@ private = private public = public dir = directory back = back -copy_url=copy download url +copy_url=Copy Download Url +copy_md5 = Copy MD5 directory=preview of the datasets create_label_task=create label task visibility = visibility @@ -794,12 +800,49 @@ category.computer_vision= computer vision category.natural_language_processing= natural language processing category.speech_processing= speech processing category.computer_vision_natural_language_processing= computer vision and natural language processing -attachment.delete= delete this version of dataset +attachment.delete= Delete this version of dataset attachment.delete_desc= Are you sure you will delete this version of dataset, once deleted can not be recovery public= public private= private -delete= delete - +delete= Delete +select_dataset=Select Dataset +current_project=Current Project +owner_dataset=Owner Dataset +public_dataset=Public Dataset +I_liked = I Liked +use = Use +create_new_dataset = Create New Dataset +dataset_name = Dataset Name +dataset_description = Dataset Description +select_category = Select Category +select_task = Select Research Direction/Application Area +dataset_name_tooltips = Please enter letters, numbers, _ and - up to 100 characters. +dataset_no_create = No dataset has been created yet +dataset_explain = Dataset: CloudBrain I provides CPU/GPU resources, Cloudbrain II provides Ascend NPU resources, and the data set used for debugging also needs to be uploaded to the corresponding environment; +dataset_instructions_for_use = Instructions for use: You can refer to Qizhi AI Collaboration Platform +dataset_camp_course = Newcomer Training Camp Course; +dataset_upload = Upload +dataset_file_name = File Name +dataset_available_clusters = Available Clusters +dataset_upload_time = Upload Time +download = Download +modify_description = Modify Description +set_public = Set Public +set_private = Set Private +annotation = Annotation +upload_dataset_file = Upload Dataset File +file_description = File Description +data_upload = Dataset Upload +illustrate = Illustrate +illustrate.only = Only Datasets In +illustrate.zip = zip/tar.gz Format +illustrate.fisrt_end = Can Initiate Cloudbrain Tasks +modify_dataset = Modify Dataset +modify_dataset_description = Modify Dataset Description +search_dataset = Search Dataset Files +unzip_tooltips = If it has not been decompressed for a long time, please check whether the compressed package has encrypted files or file errors +zip_failed = Decompression failed, please check whether the compressed package is encrypted or contact technical support +dataset_desc = The description should not exceed 1024 characters [repo] owner = Owner repo_name = Repository Name @@ -829,7 +872,7 @@ repo_label_helpe = Press Enter to complete issue_labels = Issue Labels issue_labels_helper = Select an issue label set. license = License -license_helper = Select a license file. +license_helper = Select a license file readme = README readme_helper = Select a README file template. auto_init = Initialize Repository (Adds .gitignore, License and README) @@ -885,7 +928,7 @@ cloudbrain1 = cloudbrain1 cloudbrain2 = cloudbrain2 cloudbrain_selection = select cloudbrain cloudbrain_platform_selection = Select the cloudbrain platform you want to use: -confirm_choice = confirm +confirm_choice = Confirm cloudbran1_tips = Only data in zip format can create cloudbrain tasks cloudbrain_creator=Creator cloudbrain_task = Task Name @@ -1096,6 +1139,7 @@ unstar = Unstar star = Star fork = Fork download_archive = Download Repository +star_fail=Failed to %s the dataset. no_desc = No Description no_label = No labels diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index d0f246b4a..b47a6bafa 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -726,8 +726,14 @@ alert=如果要发起云脑任务,请上传zip格式的数据集 dataset=数据集 dataset_setting=数据集设置 title=名称 +title_format_err=名称最多允许输入100个字符,只允许字母,数字,中划线 (‘-’),下划线 (‘_’) 和点 (‘.’) 。 description=描述 +description_format_err=描述最多允许输入1024个字符。 create_dataset=创建数据集 +create_dataset_fail=创建数据集失败。 +query_dataset_fail=查询数据集失败。 +edit_attachment_fail=修改描述失败。 + show_dataset=数据集 edit_dataset=编辑数据集 update_dataset=更新数据集 @@ -803,6 +809,44 @@ attachment.delete_desc= 你确定要删除该版本的数据集么?一旦删 public=公有 private=私有 delete=删除 +select_dataset=选择数据集 +current_project=当前项目 +owner_dataset=我的数据集 +public_dataset=公开数据集 +I_liked=我收藏的 +use=使用 +create_new_dataset = 新建数据集 +dataset_name=数据集名称 +dataset_description = 数据集描述 +select_category = 选择分类 +select_task = 选择研究方向/应用领域 +dataset_name_tooltips = 请输入字母、数字、_和-,最长100个字符。 +dataset_no_create = 还未创建过数据集 +dataset_explain = 数据集:云脑1提供 CPU / GPU 资源,云脑2提供 Ascend NPU 资源,调试使用的数据集也需要上传到对应的环境; +dataset_instructions_for_use = 使用说明:可以参考启智AI协作平台 +dataset_camp_course = 小白训练营课程 +dataset_upload = 上传 +dataset_file_name = 文件名称 +dataset_available_clusters = 可用集群 +dataset_upload_time = 上传时间 +download = 下载 +modify_description = 修改描述 +set_public = 设为公开 +set_private = 设为私有 +annotation = 标注 +upload_dataset_file = 上传数据集文件 +file_description = 文件描述 +data_upload = 数据上传 +illustrate = 说明 +illustrate.only = 只有 +illustrate.zip = zip/tar.gz格式 +illustrate.fisrt_end = 的数据集才能发起云脑任务 +modify_dataset = 修改数据集 +modify_dataset_description = 修改数据集文件描述 +search_dataset = 搜索数据集文件 +unzip_tooltips = 如果长时间未解压,请检查压缩包是否有加密文件或者文件错误 +zip_failed = 解压失败,请检查压缩包是否有加密或者联系技术支持人员。 +dataset_desc = 描述字数不超过1024个字符 [repo] owner=拥有者 @@ -833,7 +877,7 @@ repo_label_helpe=输入完成后回车键完成标签确定。 issue_labels=任务标签 issue_labels_helper=选择一个任务标签集 license=授权许可 -license_helper=选择授权许可文件。 +license_helper=选择授权许可文件 readme=自述 readme_helper=选择自述文件模板。 auto_init=初始化存储库 (添加. gitignore、许可证和自述文件) @@ -1101,6 +1145,8 @@ unstar=取消点赞 star=点赞 fork=派生 download_archive=下载此项目 +star_fail=%s失败。 + no_desc=暂无描述 no_label = 暂无标签 diff --git a/routers/home.go b/routers/home.go index 2db8d2112..c33d7a049 100755 --- a/routers/home.go +++ b/routers/home.go @@ -274,10 +274,11 @@ func ExploreDatasets(ctx *context.Context) { // ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled var ( - datasets []*models.Dataset - count int64 - err error - orderBy models.SearchOrderBy + datasets []*models.Dataset + datasetsWithStar []*models.DatasetWithStar + count int64 + err error + orderBy models.SearchOrderBy ) page := ctx.QueryInt("page") if page <= 0 { @@ -301,6 +302,10 @@ func ExploreDatasets(ctx *context.Context) { orderBy = models.SearchOrderBySizeReverse case "downloadtimes": orderBy = models.SearchOrderByDownloadTimes + case "moststars": + orderBy = models.SearchOrderByStarsReverse + case "feweststars": + orderBy = models.SearchOrderByStars default: ctx.Data["SortType"] = "recentupdate" orderBy = models.SearchOrderByRecentUpdated @@ -308,6 +313,9 @@ func ExploreDatasets(ctx *context.Context) { keyword := strings.Trim(ctx.Query("q"), " ") + category := ctx.Query("category") + task := ctx.Query("task") + license := ctx.Query("license") var ownerID int64 if ctx.User != nil && !ctx.User.IsAdmin { ownerID = ctx.User.ID @@ -316,25 +324,40 @@ func ExploreDatasets(ctx *context.Context) { Keyword: keyword, IncludePublic: true, SearchOrderBy: orderBy, + Category: category, + Task: task, + License: license, OwnerID: ownerID, ListOptions: models.ListOptions{ Page: page, - PageSize: setting.UI.ExplorePagingNum, + PageSize: 30, }, } datasets, count, err = models.SearchDataset(opts) + if err != nil { ctx.ServerError("SearchDatasets", err) return } + for _, dataset := range datasets { + if !ctx.IsSigned { + datasetsWithStar = append(datasetsWithStar, &models.DatasetWithStar{Dataset: *dataset, IsStaring: false}) + } else { + datasetsWithStar = append(datasetsWithStar, &models.DatasetWithStar{Dataset: *dataset, IsStaring: models.IsDatasetStaring(ctx.User.ID, dataset.ID)}) + } + + } pager := context.NewPagination(int(count), opts.PageSize, page, 5) ctx.Data["Keyword"] = opts.Keyword + ctx.Data["Category"] = category + ctx.Data["Task"] = task + ctx.Data["License"] = license pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager - ctx.Data["Datasets"] = datasets + ctx.Data["Datasets"] = datasetsWithStar ctx.Data["Total"] = count ctx.Data["PageIsDatasets"] = true ctx.HTML(200, tplExploreDataset) diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index a60bed2a6..96f17b74b 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -15,6 +15,10 @@ import ( "strconv" "strings" + "code.gitea.io/gitea/modules/auth" + + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/labelmsg" @@ -30,8 +34,10 @@ import ( const ( //result of decompress - DecompressSuccess = "0" - DecompressFailed = "1" + DecompressSuccess = "0" + DecompressFailed = "1" + tplAttachmentUpload base.TplName = "repo/attachment/upload" + tplAttachmentEdit base.TplName = "repo/attachment/edit" ) type CloudBrainDataset struct { @@ -63,6 +69,40 @@ func renderAttachmentSettings(ctx *context.Context) { ctx.Data["AttachmentMaxFiles"] = setting.Attachment.MaxFiles } +func UploadAttachmentUI(ctx *context.Context) { + ctx.Data["datasetId"] = ctx.Query("datasetId") + ctx.Data["PageIsDataset"] = true + + ctx.HTML(200, tplAttachmentUpload) + +} + +func EditAttachmentUI(ctx *context.Context) { + + id, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + ctx.Data["PageIsDataset"] = true + attachment, _ := models.GetAttachmentByID(id) + if attachment == nil { + ctx.Error(404, "The attachment does not exits.") + } + ctx.Data["Attachment"] = attachment + ctx.HTML(200, tplAttachmentEdit) + +} + +func EditAttachment(ctx *context.Context, form auth.EditAttachmentForm) { + + err := models.UpdateAttachmentDescription(&models.Attachment{ + ID: form.ID, + Description: form.Description, + }) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.edit_attachment_fail"))) + } + ctx.JSON(http.StatusOK, models.BaseOKMessage) + +} + // UploadAttachment response for uploading issue's attachment func UploadAttachment(ctx *context.Context) { if !setting.Attachment.Enabled { @@ -393,11 +433,17 @@ func AddAttachment(ctx *context.Context) { ctx.Error(404, "attachment has not been uploaded") return } + datasetId := ctx.QueryInt64("dataset_id") + dataset, err := models.GetDatasetByID(datasetId) + if err != nil { + ctx.Error(404, "dataset does not exist.") + return + } attachment, err := models.InsertAttachment(&models.Attachment{ UUID: uuid, UploaderID: ctx.User.ID, - IsPrivate: true, + IsPrivate: dataset.IsPrivate(), Name: fileName, Size: ctx.QueryInt64("size"), DatasetID: ctx.QueryInt64("dataset_id"), @@ -804,6 +850,9 @@ func CompleteMultipart(ctx *context.Context) { typeCloudBrain := ctx.QueryInt("type") fileName := ctx.Query("file_name") + log.Warn("uuid:" + uuid) + log.Warn("typeCloudBrain:" + strconv.Itoa(typeCloudBrain)) + err := checkTypeCloudBrain(typeCloudBrain) if err != nil { ctx.ServerError("checkTypeCloudBrain failed", err) @@ -841,22 +890,24 @@ func CompleteMultipart(ctx *context.Context) { ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) return } - + dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id")) + log.Warn("insert attachment to datasetId:" + strconv.FormatInt(dataset.ID, 10)) attachment, err := models.InsertAttachment(&models.Attachment{ - UUID: uuid, - UploaderID: ctx.User.ID, - IsPrivate: true, - Name: fileName, - Size: ctx.QueryInt64("size"), - DatasetID: ctx.QueryInt64("dataset_id"), - Type: typeCloudBrain, + UUID: uuid, + UploaderID: ctx.User.ID, + IsPrivate: dataset.IsPrivate(), + Name: fileName, + Size: ctx.QueryInt64("size"), + DatasetID: ctx.QueryInt64("dataset_id"), + Description: ctx.Query("description"), + Type: typeCloudBrain, }) if err != nil { ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) return } - dataset, _ := models.GetDatasetByID(attachment.DatasetID) + repository, _ := models.GetRepositoryByID(dataset.RepoID) notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 5e5a403ea..d4c4fdda0 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -180,6 +180,8 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { ctx.Data["brainscore_path"] = cloudbrain.BrainScoreMountPath ctx.Data["is_brainscore_enabled"] = setting.IsBrainScoreEnabled + ctx.Data["cloudbraintype"] = models.TypeCloudBrainOne + return nil } diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go index 7d59ab486..d23722372 100755 --- a/routers/repo/dataset.go +++ b/routers/repo/dataset.go @@ -1,7 +1,14 @@ package repo import ( + "encoding/json" + "fmt" + "net/http" + "regexp" "sort" + "strconv" + "strings" + "unicode/utf8" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" @@ -12,9 +19,14 @@ import ( ) const ( - tplIndex base.TplName = "repo/datasets/index" + tplIndex base.TplName = "repo/datasets/index" + tplDatasetCreate base.TplName = "repo/datasets/create" + tplDatasetEdit base.TplName = "repo/datasets/edit" + taskstplIndex base.TplName = "repo/datasets/tasks/index" ) +var titlePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`) + // MustEnableDataset check if repository enable internal dataset func MustEnableDataset(ctx *context.Context) { if !ctx.Repo.CanRead(models.UnitTypeDatasets) { @@ -84,43 +96,34 @@ func QueryDataSet(ctx *context.Context) []*models.Attachment { attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo) ctx.Data["SortType"] = ctx.Query("sort") - switch ctx.Query("sort") { - case "newest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) - case "oldest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix < attachments[j].CreatedUnix - }) - default: - ctx.Data["SortType"] = "newest" - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) - } + + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].CreatedUnix > attachments[j].CreatedUnix + }) + return attachments } func DatasetIndex(ctx *context.Context) { log.Info("dataset index 1") MustEnableDataset(ctx) + ctx.Data["PageIsDataset"] = true repo := ctx.Repo.Repository dataset, err := models.GetDatasetByRepo(repo) + ctx.Data["CanWrite"] = ctx.Repo.CanWrite(models.UnitTypeDatasets) if err != nil { - log.Error("query dataset, not found repo.") - ctx.NotFound("GetDatasetByRepo", err) + log.Warn("query dataset, not found.") + ctx.HTML(200, tplIndex) return } + cloudbrainType := -1 + if ctx.Query("type") != "" { - if ctx.Query("type") == "" { - log.Error("query dataset, not found param type") - ctx.NotFound("type error", nil) - return + cloudbrainType = ctx.QueryInt("type") } - err = models.GetDatasetAttachments(ctx.QueryInt("type"), ctx.IsSigned, ctx.User, dataset) + err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset) if err != nil { ctx.ServerError("GetDatasetAttachments", err) return @@ -128,53 +131,138 @@ func DatasetIndex(ctx *context.Context) { attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo) - ctx.Data["SortType"] = ctx.Query("sort") - switch ctx.Query("sort") { - case "newest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) - case "oldest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix < attachments[j].CreatedUnix - }) - default: - ctx.Data["SortType"] = "newest" - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].CreatedUnix > attachments[j].CreatedUnix + }) + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 } + pagesize := ctx.QueryInt("pagesize") + if pagesize <= 0 { + pagesize = 10 + } + pager := context.NewPagination(len(attachments), pagesize, page, 5) + + pageAttachments := getPageAttachments(attachments, page, pagesize) + + //load attachment creator + for _, attachment := range pageAttachments { + uploader, _ := models.GetUserByID(attachment.UploaderID) + attachment.Uploader = uploader + } + + ctx.Data["Page"] = pager - ctx.Data["PageIsDataset"] = true ctx.Data["Title"] = ctx.Tr("dataset.show_dataset") ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets" ctx.Data["dataset"] = dataset - ctx.Data["Attachments"] = attachments + ctx.Data["Attachments"] = pageAttachments ctx.Data["IsOwner"] = true ctx.Data["StoreType"] = setting.Attachment.StoreType - ctx.Data["Type"] = ctx.QueryInt("type") + ctx.Data["Type"] = cloudbrainType renderAttachmentSettings(ctx) ctx.HTML(200, tplIndex) } +func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment { + begin := (page - 1) * pagesize + end := (page) * pagesize + + if begin > len(attachments)-1 { + return nil + } + if end > len(attachments)-1 { + return attachments[begin:] + } else { + return attachments[begin:end] + } + +} + +func CreateDataset(ctx *context.Context) { + + MustEnableDataset(ctx) + ctx.Data["PageIsDataset"] = true + + ctx.HTML(200, tplDatasetCreate) +} + +func EditDataset(ctx *context.Context) { + + MustEnableDataset(ctx) + ctx.Data["PageIsDataset"] = true + datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + + dataset, _ := models.GetDatasetByID(datasetId) + if dataset == nil { + ctx.Error(http.StatusNotFound, "") + return + } + ctx.Data["Dataset"] = dataset + + ctx.HTML(200, tplDatasetEdit) +} + +func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) { + + dataset := &models.Dataset{} + + if !titlePattern.MatchString(form.Title) { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) + return + } + if utf8.RuneCountInString(form.Description) > 1024 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err"))) + return + } + + dataset.RepoID = ctx.Repo.Repository.ID + dataset.UserID = ctx.User.ID + dataset.Category = form.Category + dataset.Task = form.Task + dataset.Title = form.Title + dataset.License = form.License + dataset.Description = form.Description + dataset.DownloadTimes = 0 + if ctx.Repo.Repository.IsPrivate { + dataset.Status = 0 + } else { + dataset.Status = 1 + } + err := models.CreateDataset(dataset) + if err != nil { + log.Error("fail to create dataset", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail"))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) { ctx.Data["PageIsDataset"] = true ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset") + if !titlePattern.MatchString(form.Title) { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) + return + } + if utf8.RuneCountInString(form.Description) > 1024 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err"))) + return + } + rel, err := models.GetDatasetByID(form.ID) ctx.Data["dataset"] = rel if err != nil { - ctx.ServerError("GetDataset", err) - return - } - - if ctx.HasError() { - ctx.Data["Error"] = true - ctx.HTML(200, tplIndex) + log.Error("failed to query dataset", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail"))) return } @@ -184,9 +272,236 @@ func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) { rel.Task = form.Task rel.License = form.License if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil { - ctx.Data["Error"] = true - ctx.HTML(200, tplIndex) - log.Error("%v", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail"))) } - ctx.Redirect(ctx.Repo.RepoLink + "/datasets?type=" + form.Type) + ctx.JSON(http.StatusOK, models.BaseOKMessage) +} + +func DatasetAction(ctx *context.Context) { + var err error + datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + switch ctx.Params(":action") { + case "star": + err = models.StarDataset(ctx.User.ID, datasetId, true) + case "unstar": + err = models.StarDataset(ctx.User.ID, datasetId, false) + + } + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + +func CurrentRepoDataset(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + + repo := ctx.Repo.Repository + var datasetIDs []int64 + dataset, err := models.GetDatasetByRepo(repo) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err))) + return + } + datasetIDs = append(datasetIDs, dataset.ID) + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: true, + DatasetIDs: datasetIDs, + Type: cloudbrainType, + NeedIsPrivate: false, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) +} + +func MyDatasets(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + + uploaderID := ctx.User.ID + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: false, + UploaderID: uploaderID, + Type: cloudbrainType, + NeedIsPrivate: false, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) +} + +func PublicDataset(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: false, + NeedIsPrivate: true, + IsPrivate: false, + Type: cloudbrainType, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) +} + +func MyFavoriteDataset(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + var datasetIDs []int64 + datasetStars, err := models.GetDatasetStarByUser(ctx.User) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err))) + log.Error("GetDatasetStarByUser failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + for i, _ := range datasetStars { + datasetIDs = append(datasetIDs, datasetStars[i].DatasetID) + } + + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: true, + DatasetIDs: datasetIDs, + NeedIsPrivate: true, + IsPrivate: false, + Type: cloudbrainType, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) + +} + +func GetDatasetStatus(ctx *context.Context) { + + var ( + err error + ) + + UUID := ctx.Params(":uuid") + attachment, err := models.GetAttachmentByUUID(UUID) + if err != nil { + log.Error("GetDatasetStarByUser failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + + ctx.JSON(200, map[string]string{ + "result_code": "0", + "UUID": UUID, + "AttachmentStatus": fmt.Sprint(attachment.DecompressState), + }) } diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index ed7cfbe98..df86dd4f7 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -140,6 +140,8 @@ func notebookNewDataPrepare(ctx *context.Context) error { } ctx.Data["flavors"] = modelarts.FlavorInfos.FlavorInfo + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo + return nil } @@ -616,6 +618,7 @@ func trainJobNewDataPrepare(ctx *context.Context) error { return err } ctx.Data["config_list"] = configList.ParaConfigs + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } @@ -782,6 +785,7 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error { ctx.Data["uuid"] = task.Uuid ctx.Data["flavor_code"] = task.FlavorCode ctx.Data["engine_id"] = task.EngineID + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom) if err != nil { @@ -2015,6 +2019,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { New: MODEL_LATEST, }) ctx.Data["MODEL_COUNT"] = model_count + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } diff --git a/routers/repo/setting.go b/routers/repo/setting.go index 5b057dbe5..af28f3290 100644 --- a/routers/repo/setting.go +++ b/routers/repo/setting.go @@ -245,10 +245,6 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) { // This section doesn't require repo_name/RepoName to be set in the form, don't show it // as an error on the UI for this action ctx.Data["Err_RepoName"] = nil - if err := models.CreateDefaultDatasetToRepo(repo); err != nil { - ctx.ServerError("CreateDefaultDatasetToRepo", err) - return - } if form.EnableDataset && !models.UnitTypeDatasets.UnitGlobalDisabled() { units = append(units, models.RepoUnit{ diff --git a/routers/routes/routes.go b/routers/routes/routes.go index eae7f159a..b15941dfc 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -587,6 +587,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/delete", repo.DeleteAttachment) m.Get("/get_pre_url", repo.GetPresignedPutObjectURL) m.Post("/add", repo.AddAttachment) + + m.Post("/edit", bindIgnErr(auth.EditAttachmentForm{}), repo.EditAttachment) m.Post("/private", repo.UpdatePublicAttachment) m.Get("/get_chunks", repo.GetSuccessChunks) m.Get("/new_multipart", repo.NewMultipart) @@ -979,7 +981,24 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/datasets", func() { m.Get("", reqRepoDatasetReader, repo.DatasetIndex) - m.Post("", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost) + m.Put("/:id/:action", reqRepoDatasetReader, repo.DatasetAction) + m.Get("/create", reqRepoDatasetWriter, repo.CreateDataset) + m.Post("/create", reqRepoDatasetWriter, bindIgnErr(auth.CreateDatasetForm{}), repo.CreateDatasetPost) + m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset) + m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost) + m.Get("/current_repo", repo.CurrentRepoDataset) + m.Get("/my_datasets", repo.MyDatasets) + m.Get("/public_datasets", repo.PublicDataset) + m.Get("/my_favorite", repo.MyFavoriteDataset) + + m.Group("/status", func() { + m.Get("/:uuid", repo.GetDatasetStatus) + }) + + m.Group("/attachments", func() { + m.Get("/upload", repo.UploadAttachmentUI) + m.Get("/edit/:id", repo.EditAttachmentUI) + }, reqSignIn) m.Group("/dirs", func() { m.Get("/:uuid", reqRepoDatasetReader, repo.DirIndex) diff --git a/templates/base/head.tmpl b/templates/base/head.tmpl index 2cecee52b..937abd588 100755 --- a/templates/base/head.tmpl +++ b/templates/base/head.tmpl @@ -215,10 +215,10 @@ var _hmt = _hmt || []; localStorage.setItem("isCloseNotice",true) } function isShowNotice(){ - var current_notice = localStorage.getItem("notice") + var current_notice = localStorage.getItem("notices") - if (current_notice != "{{.notice.CommitId}}"){ - localStorage.setItem('notice',"{{.notice.CommitId}}"); + if (current_notice != "{{.notices.CommitId}}"){ + localStorage.setItem('notices',"{{.notices.CommitId}}"); isNewNotice=true; localStorage.setItem("isCloseNotice",false) }else{ diff --git a/templates/base/head_fluid.tmpl b/templates/base/head_fluid.tmpl index 59e542b0b..5340c7cb8 100644 --- a/templates/base/head_fluid.tmpl +++ b/templates/base/head_fluid.tmpl @@ -216,10 +216,10 @@ var _hmt = _hmt || []; localStorage.setItem("isCloseNotice",true) } function isShowNotice(){ - var current_notice = localStorage.getItem("notice") + var current_notice = localStorage.getItem("notices") - if (current_notice != "{{.notice.CommitId}}"){ - localStorage.setItem('notice',"{{.notice.CommitId}}"); + if (current_notice != "{{.notices.CommitId}}"){ + localStorage.setItem('notices',"{{.notices.CommitId}}"); isNewNotice=true; localStorage.setItem("isCloseNotice",false) }else{ diff --git a/templates/base/head_home.tmpl b/templates/base/head_home.tmpl index 561edd5ce..25d7a92ec 100644 --- a/templates/base/head_home.tmpl +++ b/templates/base/head_home.tmpl @@ -220,10 +220,10 @@ var _hmt = _hmt || []; localStorage.setItem("isCloseNotice",true) } function isShowNotice(){ - var current_notice = localStorage.getItem("notice") + var current_notice = localStorage.getItem("notices") - if (current_notice != "{{.notice.CommitId}}"){ - localStorage.setItem('notice',"{{.notice.CommitId}}"); + if (current_notice != "{{.notices.CommitId}}"){ + localStorage.setItem('notices',"{{.notices.CommitId}}"); isNewNotice=true; localStorage.setItem("isCloseNotice",false) }else{ diff --git a/templates/base/head_pro.tmpl b/templates/base/head_pro.tmpl index 82543ac61..75292b6fc 100644 --- a/templates/base/head_pro.tmpl +++ b/templates/base/head_pro.tmpl @@ -217,10 +217,10 @@ var _hmt = _hmt || []; localStorage.setItem("isCloseNotice",true) } function isShowNotice(){ - var current_notice = localStorage.getItem("notice") + var current_notice = localStorage.getItem("notices") - if (current_notice != "{{.notice.CommitId}}"){ - localStorage.setItem('notice',"{{.notice.CommitId}}"); + if (current_notice != "{{.notices.CommitId}}"){ + localStorage.setItem('notices',"{{.notices.CommitId}}"); isNewNotice=true; localStorage.setItem("isCloseNotice",false) }else{ diff --git a/templates/custom/select_dataset.tmpl b/templates/custom/select_dataset.tmpl new file mode 100644 index 000000000..17e4eee42 --- /dev/null +++ b/templates/custom/select_dataset.tmpl @@ -0,0 +1,134 @@ + + +
+ + + + {{.i18n.Tr "dataset.select_dataset"}} + +
+ + +
+ + + +
+
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias} ${dataset.Name}
+
+ + + + ${dataset.Description} +
+
+
+ + + + 解压中 + + + + 解压失败 + +
+
+ + +
+ +
+
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
+ + + + ${dataset.Description} +
+
+
+ + + + 解压中 + + + + 解压失败 + +
+
+ +
+ +
+
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
+ + + + ${dataset.Description} +
+
+
+ + + + 解压中 + + + + 解压失败 + +
+
+ +
+ +
+
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
+ + + + ${dataset.Description} +
+
+
+ + + + 解压中 + + + + 解压失败 + +
+
+ +
+
+
+ + +
+
+ + +
\ No newline at end of file diff --git a/templates/explore/dataset_left.tmpl b/templates/explore/dataset_left.tmpl new file mode 100644 index 000000000..2f9faf5df --- /dev/null +++ b/templates/explore/dataset_left.tmpl @@ -0,0 +1,69 @@ +
+
+
+ + + + +
+
+

+ {{.i18n.Tr "dataset.category"}} + {{if $.Category}} + Clear + {{end}} +

+
+
+ {{range $category := categories}} + {{$Cate := $.i18n.Tr (printf "dataset.category.%s" $category)}} + {{$Cate}} + {{end}} + +
+
+
+
+

+ {{.i18n.Tr "dataset.task"}} + {{if $.Task}} + Clear + {{end}} +

+
+
+ {{range $task := tasks}} + {{$Task := $.i18n.Tr (printf "dataset.task.%s" $task)}} + {{$Task}} + {{end}} +
+
+
+
+

+ {{.i18n.Tr "repo.license"}} + {{if $.License}} + Clear + {{end}} +

+
+
+ {{range $license := licenses}} + {{$license}} + {{end}} +
+
+ +
+
+
diff --git a/templates/explore/dataset_list.tmpl b/templates/explore/dataset_list.tmpl index 242f99dc9..1d22b62ec 100755 --- a/templates/explore/dataset_list.tmpl +++ b/templates/explore/dataset_list.tmpl @@ -25,7 +25,7 @@ {{range .Datasets}}
- + {{.Repo.OwnerName}} / {{.Repo.Alias}}
diff --git a/templates/explore/datasets.tmpl b/templates/explore/datasets.tmpl index 748d51ba8..ff18a3da3 100644 --- a/templates/explore/datasets.tmpl +++ b/templates/explore/datasets.tmpl @@ -1,15 +1,216 @@ {{template "base/head" .}} +
{{template "explore/dataset_search" .}} -
-
- {{template "explore/navbar" .}} -
- {{template "explore/dataset_list" .}} - {{template "base/paginate" .}} -
-
- {{template "explore/repo_right" .}} +
+
+
+ + {{template "explore/dataset_left" .}} + +
+ + {{if .Datasets}} + + {{end}} + +
+
+ {{range $k, $v :=.Datasets}} +
+
+
+ {{.Repo.OwnerName}} / {{.Repo.Alias}} + {{if $.IsSigned}} + +
+ +
+ ${starItems[{{$k}}]} +
+ {{else}} + +
+ +
+ ${starItems[{{$k}}]} +
+ {{end}} +
+
{{.Title}}
+ {{if or (.Category) (.Task) (.License)}} +
+ {{if .Category}} + {{$category := .Category}} + {{$.i18n.Tr (printf "dataset.category.%s" $category)}} + {{end}} + {{if .Task}} + {{$task := .Task}} + {{$.i18n.Tr (printf "dataset.task.%s" $task)}} + {{end}} + {{if .License}} + {{.License}} + {{end}} +
+ {{end}} +
+

{{.Description}}

+
+ +
+
+
+ + + + 创建于:{{TimeSinceUnix1 .CreatedUnix}} +
+
+
+ {{end}} + + +
+
+ +
+
+ + +
+
+
diff --git a/templates/repo/attachment/edit.tmpl b/templates/repo/attachment/edit.tmpl new file mode 100644 index 000000000..c1331fa3a --- /dev/null +++ b/templates/repo/attachment/edit.tmpl @@ -0,0 +1,45 @@ +
+
+
+
+
+
+
+
+
+{{template "base/head" .}} +
+{{template "repo/header" .}} +
+ +
+

+ {{$.i18n.Tr "dataset.modify_dataset_description"}} +

+
+
+ + {{.CsrfTokenHtml}} + + {{.Attachment.Type | AttachmentResourceType}} + + + + {{.Attachment.Name}} + + + + + + 确定 + 取消 + + + +
+
+
+ +
+
+{{template "base/footer" .}} diff --git a/templates/repo/attachment/upload.tmpl b/templates/repo/attachment/upload.tmpl new file mode 100644 index 000000000..42aac99df --- /dev/null +++ b/templates/repo/attachment/upload.tmpl @@ -0,0 +1,73 @@ + +{{template "base/head" .}} +
+{{template "repo/header" .}} +
+ +
+

+ {{$.i18n.Tr "dataset.upload_dataset_file"}} +

+
+
+ + {{.CsrfTokenHtml}} + + CPU/GPU + NPU + + + + + + + + + + +
+
+
+
+
+
+ +

{{$.i18n.Tr "dataset.illustrate"}}:

+

{{$.i18n.Tr "dataset.illustrate.only"}} {{$.i18n.Tr "dataset.illustrate.zip"}} {{$.i18n.Tr "dataset.illustrate.fisrt_end"}};
+ {{$.i18n.Tr "dataset.dataset_explain"}}

+ +
+
+
+{{template "base/footer" .}} diff --git a/templates/repo/cloudbrain/new.tmpl b/templates/repo/cloudbrain/new.tmpl index 7ef6d25e8..5e84857bd 100755 --- a/templates/repo/cloudbrain/new.tmpl +++ b/templates/repo/cloudbrain/new.tmpl @@ -101,6 +101,7 @@ } +
@@ -116,6 +117,7 @@
{{template "repo/header" .}}
+
{{template "base/alert" .}}
@@ -200,17 +202,8 @@ {{end}}
- -
- - -
- + + {{template "custom/select_dataset" .}}
+
+

+ {{.i18n.Tr "dataset.create_new_dataset"}} +

+
+
+ + {{.CsrfTokenHtml}} + + + {{.i18n.Tr "dataset.dataset_name_tooltips"}} + + + + + + + + {{range $category := categories}} + + {{end}} + + + + + + {{range $task := tasks}} + + {{end}} + + + + + {{range $license := licenses}} + + {{end}} + + + + {{.i18n.Tr "repo.confirm_choice"}} + {{.i18n.Tr "cancel"}} + + +
+
+
+
+
+{{template "base/footer" .}} diff --git a/templates/repo/datasets/edit.tmpl b/templates/repo/datasets/edit.tmpl new file mode 100644 index 000000000..2c0577eb6 --- /dev/null +++ b/templates/repo/datasets/edit.tmpl @@ -0,0 +1,72 @@ + +
+
+
+
+
+
+
+
+
+{{template "base/head" .}} +
+ {{template "repo/header" .}} +
+ +
+

+ {{.i18n.Tr "dataset.modify_dataset"}} +

+ + +
+
+ + {{.CsrfTokenHtml}} + + + {{.i18n.Tr "dataset.dataset_name_tooltips"}} + + + + + + + + {{range $category := categories}} + + {{end}} + + + + + {{range $task := tasks}} + + {{end}} + + + + + {{range $license := licenses}} + + {{end}} + + + + {{.i18n.Tr "repo.confirm_choice"}} + {{.i18n.Tr "cancel"}} + + +
+
+
+
+
+{{template "base/footer" .}} diff --git a/templates/repo/datasets/index.tmpl b/templates/repo/datasets/index.tmpl index 65ba2bb6e..a38e86525 100755 --- a/templates/repo/datasets/index.tmpl +++ b/templates/repo/datasets/index.tmpl @@ -6,145 +6,341 @@ margin: -1px; background: #FFF !important; } + +.dataset_title{ + font-size: 14px; + max-width: 80%; + display: inline-block !important; + margin-left: 6px !important; + padding-right: 0 !important; +} +.wrapper { + display: flex; + overflow: hidden; + padding: 0 1rem; + } + .exp{ + display: none; + } + .exp:checked+.text{ + max-height: none; + } + .exp:checked+.text::after{ + visibility: hidden; + } + .exp:checked+.text .btn::before{ + visibility: hidden; + } + .exp:checked+.text .btn::after{ + content:'{{$.i18n.Tr "org.fold"}}' + } + + .wrapper>.text { + font-family: SourceHanSansSC-regular; + font-size: 14px; + color: #101010; + overflow: hidden; + text-overflow: ellipsis; + text-align: justify; + position: relative; + line-height: 1.5; + max-height: 3em; + transition: .3s max-height; + word-wrap: break-word; + word-break: break-all; + } + .wrapper>.text::before { + content: ''; + height: calc(100% - 20px); + float: right; + } + .wrapper>.text::after { + content: ''; + width: 999vw; + height: 999vw; + position: absolute; + box-shadow: inset calc(100px - 999vw) calc(30px - 999vw) 0 0 #fff; + margin-left: -100px; + } + .btn{ + position: relative; + float: right; + clear: both; + margin-left: 20px; + font-size: 14px; + padding: 0 8px; + background: #3F51B5; + line-height: 20px; + border-radius: 4px; + color: #fff; + cursor: pointer; + /* margin-top: -30px; */ + } + .btn::after{ + content:'{{$.i18n.Tr "org.unfold"}}' + } + .btn::before{ + content: '...'; + position: absolute; + left: -5px; + color: #333; + transform: translateX(-100%) + } + + .el-button--text{color:#0366d6 ;} + .heart-stroke{ + stroke: #666; + stroke-width: 2; + fill: #fff + } + .stars_active{ + fill: #FA8C16 !important; + stroke:#FA8C16 !important + } + .diy-popper{ + max-width: 400px; + } -
+
{{template "repo/header" .}} - -
- - -
- {{.CsrfTokenHtml}} - {{template "base/alert" .}} -
-
-
-

{{.dataset.Title}}

-
- {{if .Permission.CanWrite $.UnitTypeDatasets}} - - {{end}} -
-
-
- {{if .dataset.Description }} - {{.dataset.Description}} - {{else}} - {{.Repository.DescriptionHTML}} + {{if .dataset}} + + +
+
+
+
+

{{.dataset.Title}}

+
+ + {{if $.IsSigned}} +
+ +
+ ${num_stars} + {{else}} +
+ +
+ ${num_stars} + {{end}} + {{.i18n.Tr "repo.modelarts.modify"}} +
+
+ {{if and (.dataset.Category) (.dataset.Task) (.dataset.License)}} +
+ {{if .dataset.Category}} + {{$category := .dataset.Category}} + {{$.i18n.Tr (printf "dataset.category.%s" $category)}} + {{end}} + {{if .dataset.Task}} + {{$task := .dataset.Task}} + {{$.i18n.Tr (printf "dataset.task.%s" $task)}} + {{end}} + {{if .dataset.License}} + {{.dataset.License}} + {{end}} +
{{end}}
-
-
- -
- -
- -
- -
- -
- -
- {{.i18n.Tr "cancel"}} - -
-
- - - -
-
-
-
-
- {{if eq .Type 0}}{{.i18n.Tr "repo.cloudbrain1"}}{{else}}{{.i18n.Tr "repo.cloudbrain2"}}{{end}}-{{.i18n.Tr "datasets"}} -
-
- -
- {{template "repo/datasets/dataset_list" .}} + {{end}} +
+
+
+ + + + + + + {{$.i18n.Tr "dataset.dataset_upload"}} +
-
-
- {{if .Permission.CanWrite $.UnitTypeDatasets}} - -
- {{end}} +
+
+
+
+ +
+ {{$.i18n.Tr "dataset.dataset_file_name"}} +
+
+ {{$.i18n.Tr "repo.model.manage.size"}} +
+
+ {{$.i18n.Tr "dataset.dataset_available_clusters"}} +
+
+ {{$.i18n.Tr "repo.modelarts.status"}} +
+
+ {{$.i18n.Tr "repo.cloudbrain_creator"}} +
+
+ {{$.i18n.Tr "dataset.dataset_upload_time"}} +
+
+ {{$.i18n.Tr "repo.cloudbrain_operate"}} +
+
+
+ {{range $k, $v :=.Attachments}} +
+
+ + +
+ {{if .Description}} + +
{{.Description}}
{{$.i18n.Tr "dataset.download"}}:{{.DownloadCount}}
+ + {{.Name}} + +
+ {{else}} + +
{{$.i18n.Tr "dataset.download"}}:{{.DownloadCount}}
+ + {{.Name}} + +
+ {{end}} + + +
+
+ {{.Size | FileSize}} +
+
+ {{.Type | AttachmentResourceType}} +
+
+ {{$x:=.IsPrivate | AttachmentStatus}} + {{$.i18n.Tr "home.show_private"}} + {{$.i18n.Tr "org.settings.visibility.public"}} +
+
+ {{if .Uploader.Name}} + + {{else}} + + {{end}} +
+
+ {{.CreatedUnix | TimeSinceUnix1}} +
+ +
+
+ {{end}} +
+
+
-
- {{template "repo/datasets/right_side" .}} -
- +
+
+
+ + +
+
+ {{else}} +
+
+
{{.i18n.Tr "dataset.dataset_no_create"}}
+ {{if $.CanWrite}} + {{.i18n.Tr "dataset.create_new_dataset"}} + {{end}} +
+
{{.i18n.Tr "dataset.dataset_explain"}}
+
{{.i18n.Tr "dataset.dataset_instructions_for_use"}}{{.i18n.Tr "dataset.dataset_camp_course"}}
+
+
+ {{end}}
+ {{template "base/footer" .}} diff --git a/templates/repo/editor/upload.tmpl b/templates/repo/editor/upload.tmpl index 7794c2a2d..8466a6e9a 100644 --- a/templates/repo/editor/upload.tmpl +++ b/templates/repo/editor/upload.tmpl @@ -27,7 +27,7 @@
-
+
{{template "repo/editor/commit_form" .}} diff --git a/templates/repo/header.tmpl b/templates/repo/header.tmpl index be3df93b8..3cb6954ad 100755 --- a/templates/repo/header.tmpl +++ b/templates/repo/header.tmpl @@ -138,7 +138,7 @@ {{end}} {{if .Permission.CanRead $.UnitTypeDatasets}} - + {{.i18n.Tr "datasets"}} diff --git a/templates/repo/modelarts/notebook/new.tmpl b/templates/repo/modelarts/notebook/new.tmpl index a91d98af1..a66690f43 100755 --- a/templates/repo/modelarts/notebook/new.tmpl +++ b/templates/repo/modelarts/notebook/new.tmpl @@ -59,6 +59,9 @@ {{end}}
+<<<<<<< HEAD + {{template "custom/select_dataset" .}} +=======
@@ -70,6 +73,7 @@
+>>>>>>> V20220328
@@ -24,9 +26,19 @@ import qs from 'qs'; import createDropzone from '../features/dropzone.js'; const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; -const CloudBrainType = 0; +// const uploadtype = 0; export default { + props:{ + uploadtype:{ + type:Number, + required:true + }, + desc:{ + type:String, + default:'' + } + }, data() { return { dropzoneUploader: null, @@ -36,7 +48,12 @@ export default { progress: 0, status: '', dropzoneParams: {}, - file_status_text: '' + file_status_text: '', + file:{}, + repoPath:'', + btnFlag:false, + cancel:'', + upload:'', }; }, @@ -44,33 +61,47 @@ export default { this.dropzoneParams = $('div#minioUploader-params'); this.file_status_text = this.dropzoneParams.data('file-status'); this.status = this.dropzoneParams.data('file-init-status'); - - let previewTemplate = ''; - previewTemplate += '
\n '; - previewTemplate += '
\n '; - previewTemplate += '
'; - previewTemplate += - ' '; - previewTemplate += '
\n '; - previewTemplate += '
\n '; - previewTemplate += '
\n '; - previewTemplate += '
'; - previewTemplate += - '
\n '; - previewTemplate += '
\n '; - previewTemplate += '
'; - previewTemplate += ' 上传成功'; - previewTemplate += '
\n '; - previewTemplate += '
'; - previewTemplate += ' 上传失败'; - previewTemplate += '
\n '; - previewTemplate += '
'; - previewTemplate += ' '; - previewTemplate += '
\n'; - previewTemplate += '
'; + this.repoPath = this.dropzoneParams.data('repopath'); + this.cancel = this.dropzoneParams.data('cancel'); + this.upload = this.dropzoneParams.data('upload'); + // let previewTemplate = ''; + // previewTemplate += '
\n '; + // previewTemplate += '
\n '; + // previewTemplate += '
'; + // previewTemplate += + // ' '; + // previewTemplate += '
\n '; + // previewTemplate += '
\n '; + // previewTemplate += '
\n '; + // previewTemplate += '
'; + // previewTemplate += + // '
\n '; + // previewTemplate += '
\n '; + // previewTemplate += '
'; + // previewTemplate += ' 上传成功'; + // previewTemplate += '
\n '; + // previewTemplate += '
'; + // previewTemplate += ' 上传失败'; + // previewTemplate += '
\n '; + // previewTemplate += '
'; + // previewTemplate += ' '; + // previewTemplate += '
\n'; + // previewTemplate += '
'; + let previewTemplate = '' + previewTemplate += '
' + previewTemplate += '
' + previewTemplate += '
' + previewTemplate += '
' + previewTemplate += '
' + // previewTemplate += '' + previewTemplate += '
' + + previewTemplate += '
' + previewTemplate += '
' + previewTemplate += '
' + previewTemplate += '
' const $dropzone = $('div#dataset'); - console.log('createDropzone'); const dropzoneUploader = await createDropzone($dropzone[0], { url: '/todouploader', maxFiles: this.maxFiles, @@ -84,10 +115,7 @@ export default { previewTemplate }); dropzoneUploader.on('addedfile', (file) => { - setTimeout(() => { - // eslint-disable-next-line no-unused-expressions - file.accepted && this.onFileAdded(file); - }, 200); + this.file = file }); dropzoneUploader.on('maxfilesexceeded', function (file) { if (this.files[0].status !== 'success') { @@ -102,14 +130,23 @@ export default { this.dropzoneUploader = dropzoneUploader; }, methods: { + cancelDataset(){ + location.href = this.repoPath + this.dropzoneUploader.removeAllFiles(true) + }, resetStatus() { this.progress = 0; this.status = ''; + console.log(this.uploadtype) }, updateProgress(file, progress) { + console.log("progress---",progress) file.previewTemplate.querySelector( '.dz-upload' - ).style.width = `${progress}%`; + ).style.width = `${progress}%` + file.previewTemplate.querySelector( + '.dz-upload' + ).style.background = '#409eff'; }, emitDropzoneSuccess(file) { file.status = 'success'; @@ -122,18 +159,24 @@ export default { this.dropzoneUploader.emit('error', file); // this.dropzoneUploader.emit('complete', file); }, - onFileAdded(file) { - file.datasetId = document + onFileAdded() { + this.btnFlag = true + this.file.datasetId = document .getElementById('datasetId') .getAttribute('datasetId'); this.resetStatus(); - this.computeMD5(file); + console.log(this.file,!this.file?.upload) + if(!this.file?.upload){ + this.btnFlag = false + return + } + this.computeMD5(this.file); }, finishUpload(file) { this.emitDropzoneSuccess(file); setTimeout(() => { - window.location.reload(); + window.location.href = this.repoPath }, 1000); }, @@ -249,7 +292,7 @@ export default { file_name: file.name, size: file.size, dataset_id: file.datasetId, - type: CloudBrainType, + type: this.uploadtype, _csrf: csrf }) ); @@ -260,6 +303,8 @@ export default { const params = { params: { md5: file.uniqueIdentifier, + type: this.uploadtype, + file_name: file.name, _csrf: csrf } }; @@ -282,13 +327,15 @@ export default { }, async newMultiUpload(file) { + console.log(this.uploadtype,this) const res = await axios.get('/attachments/new_multipart', { params: { totalChunkCounts: file.totalChunkCounts, md5: file.uniqueIdentifier, size: file.size, fileType: file.type, - type: CloudBrainType, + type: this.uploadtype, + file_name: file.name, _csrf: csrf } }); @@ -306,6 +353,7 @@ export default { fileReader = new FileReader(), time = new Date().getTime(); let currentChunk = 0; + let _this = this function loadNext() { const start = currentChunk * chunkSize; @@ -329,7 +377,8 @@ export default { uploadID: file.uploadID, size: partSize, chunkNumber: currentChunk + 1, - type: CloudBrainType, + type: _this.uploadtype, + file_name: file.name, _csrf: csrf } }); @@ -343,14 +392,27 @@ export default { } - async function uploadMinioNewMethod(url,e){ + async function uploadMinioNewMethod(url,e){ + var xhr = new XMLHttpRequest(); - xhr.open('PUT', url, false); - xhr.setRequestHeader('Content-Type', 'text/plain') - xhr.send(e.target.result); - var etagValue = xhr.getResponseHeader('etag'); - //console.log(etagValue); - etags[currentChunk] = etagValue; + xhr.open('PUT', url, false); + if(_this.uploadtype===0){ + xhr.setRequestHeader('Content-Type', 'text/plain') + xhr.send(e.target.result); + + var etagValue = xhr.getResponseHeader('etag'); + etags[currentChunk] = etagValue; + } + else if(_this.uploadtype===1){ + xhr.setRequestHeader('Content-Type', '') + xhr.send(e.target.result); + var etagValue = xhr.getResponseHeader('ETag'); + //console.log(etagValue); + etags[currentChunk] = etagValue; + } + + + } async function updateChunk(currentChunk) { @@ -395,6 +457,7 @@ export default { } async function completeUpload() { + console.log(_this.uploadtype) return await axios.post( '/attachments/complete_multipart', qs.stringify({ @@ -403,8 +466,9 @@ export default { file_name: file.name, size: file.size, dataset_id: file.datasetId, - type: CloudBrainType, - _csrf: csrf + type: _this.uploadtype, + _csrf: csrf, + description:_this.desc }) ); } @@ -430,6 +494,7 @@ export default { 1}/${chunks}个分片上传` ); this.progress = Math.ceil((currentChunk / chunks) * 100); + console.log("((currentChunk / chunks) * 100).toFixed(2)",((currentChunk / chunks) * 100).toFixed(2)) this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); this.status = `${this.dropzoneParams.data('uploading')} ${( (currentChunk / chunks) * @@ -443,6 +508,7 @@ export default { file.size } 用时:${(new Date().getTime() - time) / 1000} s` ); + this.updateProgress(file, 100); this.progress = 100; this.status = this.dropzoneParams.data('upload-complete'); this.finishUpload(file); @@ -455,7 +521,7 @@ export default { \ No newline at end of file diff --git a/web_src/js/components/ObsUploader.vue b/web_src/js/components/ObsUploader.vue index b20594b86..7ceb91a27 100755 --- a/web_src/js/components/ObsUploader.vue +++ b/web_src/js/components/ObsUploader.vue @@ -460,7 +460,7 @@ export default { \ No newline at end of file diff --git a/web_src/js/index.js b/web_src/js/index.js index 902750c5b..5602dad90 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -43,7 +43,7 @@ import Contributors from './components/Contributors.vue' import Model from './components/Model.vue'; import WxAutorize from './components/WxAutorize.vue' import initCloudrain from './features/cloudrbanin.js' - +// import $ from 'jquery.js' Vue.use(ElementUI); Vue.prototype.$axios = axios; @@ -2918,6 +2918,7 @@ $(document).ready(async () => { initVueApp(); initVueUploader(); initObsUploader(); + initVueDataset(); initVueEditAbout(); initVueEditTopic(); initVueContributors(); @@ -3658,6 +3659,594 @@ function initVueEditAbout() { }); } +function initVueDataset() { + const el = document.getElementById('dataset-base'); + if (!el) { + return; + } + let link=$('#square-link').data('link') + let repolink = $('.dataset-repolink').data('repolink') + let cloudbrainType = $('.dataset-repolink').data('cloudranin-type') + const clearBtn = document.getElementsByClassName("clear_dataset_value"); + const params = new URLSearchParams(location.search) + for (let i = 0; i < clearBtn.length; i++) { + clearBtn[i].addEventListener('click',function(e){ + let searchType=e.target.getAttribute("data-clear-value") + if(params.has(searchType)){ + params.delete(searchType) + let clearSearch = params.toString() + location.href = link + '?' + clearSearch + } + }) + } + const items = [] + const zipStatus = [] + $('#dataset-range-value').find('.item').each(function(){ + items.push($(this).data('private')) + zipStatus.push($(this).data('decompress-state')) + }) + let num_stars = $('#dataset-range-value').data('num-stars') + let star_active = $('#dataset-range-value').data('star-active') + const ruleForm = {} + if(document.getElementById('dataset-edit-value')){ + let $this = $('#dataset-edit-value') + ruleForm.title = $this.data('edit-title') || '' + ruleForm.description = $this.data('edit-description') || '' + ruleForm.category = $this.data('edit-category') || '' + ruleForm.task = $this.data('edit-task') || '' + ruleForm.license = $this.data('edit-license') || '' + ruleForm.id = $this.data('edit-id')|| '' + ruleForm._csrf = csrf + } + + const starItems = [] + const starActives = [] + $('#datasets-square-range-value').find('.item').each(function(){ + starItems.push($(this).data('num-stars')) + starActives.push($(this).data('star-active')) + }) + const taskLists = [] + const licenseLists = [] + $('#task-square-range-value').find('.item').each(function(){ + taskLists.push($(this).data('task')) + }) + $('#task-square-range-value').find('.item').each(function(){ + licenseLists.push($(this).data('license')) + }) + let dataset_file_desc + if(document.getElementById('dataset-file-desc')){ + dataset_file_desc = document.getElementById('dataset-file-desc').value + } + + // getEditInit(){ + // if($('#dataset-edit-value')){ + // $this = $('#dataset-edit-value') + // this.ruleForm.title = $this.data('edit-title') || '' + // this.ruleForm.description = $this.data('edit-description') || '' + // this.ruleForm.category = $this.data('edit-category') || '' + // this.ruleForm.task = $this.data('edit-task') || '' + // this.ruleForm.license = $this.data('edit-license') || '' + // this.ruleForm.id = $this.data('edit-id')|| '' + // } + // }, + new Vue({ + delimiters: ['${', '}'], + el, + data: { + suburl: AppSubUrl, + url:'', + type:0, + desc:'', + descfile:'', + datasetType:'', + privates:[], + zipStatus:[], + starItems:[], + starActives:[], + taskLists:[], + taskShow:[], + licenseLists:[], + licenseShow:[], + hasMoreBthHis: false, + showMoreHis:false, + star_active:false, + num_stars:0, + dialogVisible:false, + activeName: 'first', + searchDataItem:'', + currentRepoDataset:[], + myDataset:[], + publicDataset:[], + myFavoriteDataset:[], + page:1, + totalnums:0, + repolink:'', + cloudbrainType:0, + dataset_uuid:'', + dataset_name:'', + loadingDataIndex:true, + timer:null, + ruleForm:{ + title:'', + description:'', + category:'', + task:'', + license:'', + _csrf:csrf, + + }, + ruleForm1:{ + title:'', + description:'', + category:'', + task:'', + license:'', + _csrf:'', + id:'' + }, + rules: { + title: [ + { required: true, message: '请输入数据集名称', trigger: 'blur' }, + { min: 1, max: 100, message: '长度在 1 到 100 个字符', trigger: 'blur' }, + // {required:true,message:'test',pattern:'/^[a-zA-Z0-9-_]{1,100}[^-]$/',trigger:'blur'}, + { validator: (rule, value, callback) => { + if (/^[a-zA-Z0-9-_.]{0,100}$/.test(value) == false) { + callback(new Error("输入不符合数据集名称规则")); + } else { + callback(); + } + }, trigger: 'blur'} + ], + description: [ + { required: true, message: '请输入数据集描述详情', trigger: 'blur' } + ], + category: [ + { required: true, message: '请选择分类', trigger: 'change' } + ], + task: [ + { required: true, message: '请选择研究方向/应用领域', trigger: 'change' } + ], + // license: [ + // { required: true, message: '请选择活动区域', trigger: 'change' } + // ] + }, + }, + components: { + MinioUploader, + ObsUploader + }, + mounted(){ + // if(document.getElementById('postPath')){ + // this.url = document.getElementById('postPath').value + // } + // this.privates = items + // this.num_stars = num_stars + // this.star_active = star_active + // this.ruleForm1 = ruleForm + + // // this.getEditInit() + // this.getTypeList() + this.getTypeList() + + if(!!document.getElementById('dataset-repolink-init')){ + this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) + } + + }, + created(){ + if(document.getElementById('postPath')){ + this.url = document.getElementById('postPath').value + } + this.privates = items + this.zipStatus = zipStatus + this.num_stars = num_stars + this.star_active = star_active + this.ruleForm1 = ruleForm + // this.getEditInit() + + this.starItems = starItems + this.starActives = starActives + this.taskLists = taskLists + this.licenseLists = licenseLists + this.descfile = dataset_file_desc + this.repolink = repolink + this.cloudbrainType = cloudbrainType + }, + methods:{ + handleCurrentChange(val) { + this.page = val + switch(this.activeName){ + case 'first': + this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) + + break + case 'second': + this.getMyDataset(this.repolink,this.cloudbrainType) + break + case 'third': + this.getPublicDataset(this.repolink,this.cloudbrainType) + break + case 'fourth': + this.getStarDataset(this.repolink,this.cloudbrainType) + break + } + + }, + createDataset(formName){ + let _this = this + this.$refs[formName].validate((valid)=>{ + if(valid){ + document.getElementById("mask").style.display = "block" + _this.$axios.post(_this.url,_this.qs.stringify(_this.ruleForm)).then((res)=>{ + if(res.data.Code===0){ + document.getElementById("mask").style.display = "none" + location.href = _this.url.split('/create')[0]+'?type=-1' + }else{ + console.log(res.data.Message) + } + document.getElementById("mask").style.display = "none" + }).catch(error=>{ + console.log(error) + }) + } + else{ + return false + } + }) + }, + cancelDataset(getpage,attachment){ + if(getpage && !attachment){ + if(getpage==='create'){ + location.href = this.url.split('/create')[0]+'?type=-1' + }else if(getpage==='edit'){ + location.href = this.url.split('/edit')[0]+'?type=-1' + }else{ + location.href='/' + } + } + else{ + location.href = `${AppSubUrl}${attachment}/datasets` + + } + + }, + gotoUpload(repolink,datsetId){ + location.href = `${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}` + }, + gotoDataset(datsetUrl){ + location.href = datsetUrl + }, + gotoAnnotate(repolink,uuid,type){ + location.href = `${AppSubUrl}${repolink}/datasets/label/${uuid}?type=${type}` + }, + uploadGpu(){ + this.type=0 + }, + uploadNpu(){ + this.type=1 + }, + setPrivate(uuid,privateFlag,index){ + const params = {_csrf:csrf,file:uuid,is_private:privateFlag} + this.$axios.post('/attachments/private',this.qs.stringify(params)).then((res)=>{ + this.$set(this.privates,index,privateFlag) + }).catch(error=>{ + console.log(error) + }) + }, + delDataset(uuid){ + let _this = this + const params = {_csrf:csrf,file:uuid} + $('#data-dataset-delete-modal') + .modal({ + closable: false, + onApprove() { + _this.$axios.post('/attachments/delete',_this.qs.stringify(params)).then((res)=>{ + // $('#'+uuid).hide() + location.reload() + }).catch(error=>{ + console.log(error) + }) + } + }) + .modal('show'); + }, + // getEditInit(){ + // if($('#dataset-edit-value')){ + // $this = $('#dataset-edit-value') + // this.ruleForm.title = $this.data('edit-title') || '' + // this.ruleForm.description = $this.data('edit-description') || '' + // this.ruleForm.category = $this.data('edit-category') || '' + // this.ruleForm.task = $this.data('edit-task') || '' + // this.ruleForm.license = $this.data('edit-license') || '' + // this.ruleForm.id = $this.data('edit-id')|| '' + // } + // }, + editDataset(formName,id){ + let _this = this + this.url = this.url.split(`/${id}`)[0] + this.$refs[formName].validate((valid)=>{ + if(valid){ + document.getElementById("mask").style.display = "block" + _this.$axios.post(_this.url,_this.qs.stringify(_this.ruleForm1)).then((res)=>{ + if(res.data.Code===0){ + document.getElementById("mask").style.display = "none" + location.href = _this.url.split('/edit')[0]+'?type=-1' + }else{ + console.log(res.data.Message) + } + document.getElementById("mask").style.display = "none" + }).catch((err)=>{ + console.log(err) + }) + } + else{ + return false + } + }) + + }, + editDatasetFile(id,backurl){ + let url = '/attachments/edit' + const params={id:id,description:this.descfile,_csrf:csrf} + // document.getElementById("mask").style.display = "block" + this.$axios.post(url,this.qs.stringify(params)).then((res)=>{ + if(res.data.Code===0){ + location.href = `${AppSubUrl}${backurl}/datasets` + }else{ + console.log(res.data.Message) + } + }).catch((err)=>{ + console.log(err) + }) + }, + postStar(id,link){ + if(this.star_active){ + let url = link+'/'+ id + '/unstar' + this.$axios.put(url).then((res)=>{ + if(res.data.Code===0){ + this.star_active = false + this.num_stars = this.num_stars -1 + } + }) + }else{ + let url = link+'/'+ id + '/star' + this.$axios.put(url).then((res)=>{ + if(res.data.Code===0){ + this.star_active = true + this.num_stars = this.num_stars + 1 + } + }) + } + }, + postSquareStar(id,link,index){ + if(this.starActives[index]){ + let url = link+'/'+ id + '/unstar' + this.$axios.put(url).then((res)=>{ + if(res.data.Code===0){ + this.$set(this.starActives,index,false) + this.$set(this.starItems,index,this.starItems[index]-1) + } + }) + }else{ + let url = link+'/'+ id + '/star' + this.$axios.put(url).then((res)=>{ + if(res.data.Code===0){ + this.$set(this.starActives,index,true) + this.$set(this.starItems,index,this.starItems[index]+1) + + } + }) + } + }, + getTypeList(){ + const params = new URLSearchParams(window.location.search) + if( window.location.search && params.has('type')){ + if(params.get('type')==0){ + this.datasetType = '0' + } + if(params.get('type')==1){ + this.datasetType = '1' + } + if(params.get('type')==-1){ + this.datasetType = '-1' + } + }else { + this.datasetType = '-1' + } + }, + changeDatasetType(val){ + const searchParams = new URLSearchParams(window.location.search) + if (!window.location.search) { + window.location.href = window.location.href + '?type='+val + } else if (searchParams.has('type')) { + window.location.href = window.location.href.replace(/type=([0-9]|-[0-9])/g,'type='+val) + } else { + window.location.href=window.location.href+'&type='+val + } + + + }, + gotoDatasetEidt(repolink,id){ + location.href = `${repolink}/datasets/attachments/edit/${id}` + + }, + handleClick(repoLink, tabName,type) { + if(tabName=="first"){ + this.page=1 + this.searchDataItem='' + this.getCurrentRepoDataset(repoLink,type) + + } + if(tabName=="second"){ + this.page=1 + this.searchDataItem='' + this.getMyDataset(repoLink,type) + } + if(tabName=="third"){ + this.page=1 + this.searchDataItem='' + this.getPublicDataset(repoLink,type) + } + if(tabName=="fourth"){ + this.page=1 + this.searchDataItem='' + this.getStarDataset(repoLink,type) + } + }, + polling (checkStatuDataset,repoLink) { + this.timer = window.setInterval(() => { + setTimeout(() => { + this.getDatasetStatus(checkStatuDataset,repoLink) + },0) + },15000) + + }, + + getDatasetStatus(checkStatuDataset,repoLink){ + const getmap = checkStatuDataset.map((item)=>{ + let url = `${AppSubUrl}${repolink}/datasets/status/${item.UUID}` + return this.$axios.get(url) + }) + this.$axios.all(getmap) + .then((res)=>{ + let flag = res.some((item)=>{ + return item.data.AttachmentStatus == 1 + }) + flag && clearInterval(this.timer) + flag && this.refreshStatusDataset() + + } + ) + + }, + refreshStatusDataset(){ + switch(this.activeName){ + case 'first': + this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) + break + case 'second': + this.getMyDataset(this.repolink,this.cloudbrainType) + break + case 'third': + this.getPublicDataset(this.repolink,this.cloudbrainType) + break + case 'fourth': + this.getStarDataset(this.repolink,this.cloudbrainType) + break + } + }, + getCurrentRepoDataset(repoLink,type){ + clearInterval(this.timer) + this.loadingDataIndex = true + let url = repoLink + '/datasets/current_repo' + this.$axios.get(url,{ + params:{ + type:type, + page:this.page, + q:this.searchDataItem + } + }).then((res)=>{ + this.currentRepoDataset = JSON.parse(res.data.data) + const checkStatuDataset = this.currentRepoDataset.filter(item=>item.DecompressState===2) + if(checkStatuDataset.length>0){ + this.polling(checkStatuDataset,repoLink) + } + this.totalnums = parseInt(res.data.count) + this.loadingDataIndex = false + }) + }, + getMyDataset(repoLink,type){ + clearInterval(this.timer) + this.loadingDataIndex = true + let url = repoLink + '/datasets/my_datasets' + this.$axios.get(url,{ + params:{ + type:type, + page:this.page, + q:this.searchDataItem + } + }).then((res)=>{ + this.myDataset = JSON.parse(res.data.data) + const checkStatuDataset = this.myDataset.filter(item=>item.DecompressState===2) + if(checkStatuDataset.length>0){ + this.polling(checkStatuDataset,repoLink) + } + this.totalnums = parseInt(res.data.count) + this.loadingDataIndex = false + }) + + }, + getPublicDataset(repoLink,type){ + clearInterval(this.timer) + this.loadingDataIndex = true + let url = repoLink + '/datasets/public_datasets' + this.$axios.get(url,{ + params:{ + type:type, + page:this.page, + q:this.searchDataItem + } + }).then((res)=>{ + this.publicDataset = JSON.parse(res.data.data) + const checkStatuDataset = this.publicDataset.filter(item=>item.DecompressState===2) + if(checkStatuDataset.length>0){ + this.polling(checkStatuDataset,repoLink) + } + this.totalnums = parseInt(res.data.count) + this.loadingDataIndex = false + }) + + }, + getStarDataset(repoLink,type){ + clearInterval(this.timer) + this.loadingDataIndex = true + let url = repoLink + '/datasets/my_favorite' + this.$axios.get(url,{ + params:{ + type:type, + page:this.page, + q:this.searchDataItem + } + }).then((res)=>{ + this.myFavoriteDataset = JSON.parse(res.data.data) + const checkStatuDataset = this.myFavoriteDataset.filter(item=>item.DecompressState===2) + if(checkStatuDataset.length>0){ + this.polling(checkStatuDataset,repoLink) + } + this.totalnums= parseInt(res.data.count) + this.loadingDataIndex = false + }) + + }, + selectDataset(uuid,name){ + this.dataset_uuid = uuid + this.dataset_name = name + this.dialogVisible = false + }, + searchDataset(){ + switch(this.activeName){ + case 'first': + this.page = 1 + this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) + break + case 'second': + this.page = 1 + this.getMyDataset(this.repolink,this.cloudbrainType) + break + case 'third': + this.page = 1 + this.getPublicDataset(this.repolink,this.cloudbrainType) + break + case 'fourth': + this.page = 1 + this.getStarDataset(this.repolink,this.cloudbrainType) + break + } + } + }, + }); + +} function initVueEditTopic() { const el = document.getElementById('topic_edit1'); diff --git a/web_src/less/_dataset.less b/web_src/less/_dataset.less index 3bc19ef63..35a87f89f 100644 --- a/web_src/less/_dataset.less +++ b/web_src/less/_dataset.less @@ -222,3 +222,38 @@ } } } +.panel_creator_reponam{ + display: inline-block; + border-radius: 4px; + padding: 4px; + font-size: 12px; + text-align: center; + background-color: rgba(161, 220, 255, 0.2); + color: #101010; +} +.panel_dataset_name{ + font-size: 15px; + color: #0366D6; + text-align: center; + margin-left: 1rem; +} +.panel_datset_desc{ + white-space: nowrap; + display: inline-block; + overflow: hidden; + width: 90%; + + text-overflow: ellipsis; +} +.el-dialog__body{ + padding-top:0 +} +#dataset-base{ + .active{ + color: #0087f5!important; + border: 1px solid #0087f5!important; + /* margin: -1px!important; */ + background: #fff!important; + + } +} \ No newline at end of file diff --git a/web_src/less/openi.less b/web_src/less/openi.less index 7871d8148..942ec4b08 100644 --- a/web_src/less/openi.less +++ b/web_src/less/openi.less @@ -375,6 +375,10 @@ display: block; font-size: 18px; margin-bottom: 1rem; } +.bgtask-content-button{ + margin-top: 1em; + margin-bottom: 1em; +} .selectcloudbrain .active.item{ color: #0087f5 !important;