Browse Source

更新代码

tags/v1.22.11.2^2
ychao_1983 3 years ago
parent
commit
b5ac11d533
7 changed files with 53 additions and 331 deletions
  1. +42
    -0
      modules/structs/cloudbrain.go
  2. +2
    -2
      routers/repo/aisafety.go
  3. +4
    -4
      routers/repo/cloudbrain.go
  4. +2
    -318
      routers/repo/dataset.go
  5. +2
    -2
      routers/repo/grampus.go
  6. +1
    -1
      routers/repo/modelarts.go
  7. +0
    -4
      routers/routes/routes.go

+ 42
- 0
modules/structs/cloudbrain.go View File

@@ -0,0 +1,42 @@
package structs

type CreateGrampusTrainJobOption struct {
DisplayJobName string `json:"display_job_name" binding:"Required"`
JobName string `json:"job_name" binding:"Required" `
Attachment string `json:"attachment" binding:"Required"`
BootFile string `json:"boot_file" binding:"Required"`
ImageID string `json:"image_id" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
Description string `json:"description"`
BranchName string `json:"branch_name" binding:"Required"`
EngineName string `json:"engine_name" binding:"Required"`
WorkServerNumber int `json:"work_server_number" binding:"Required"`
Image string `json:"image" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type CreateTrainJobOption struct {
Type int `json:"type" binding:"Required"`
DisplayJobName string `json:"display_job_name" binding:"Required"`
ImageID string `json:"image_id"`
Image string `json:"image" binding:"Required"`
Attachment string `json:"attachment" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
Description string `json:"description" `
BootFile string `json:"boot_file" binding:"Required"`
BranchName string `json:"branch_name" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
WorkServerNumber int `json:"work_server_number" binding:"Required"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

+ 2
- 2
routers/repo/aisafety.go View File

@@ -827,7 +827,7 @@ func createForNPU(ctx *context.Context, jobName string) error {
JobType: string(models.JobTypeModelSafety),
}

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
return err
@@ -924,7 +924,7 @@ func createForGPU(ctx *context.Context, jobName string) error {
LabelName: evaluationIndex,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
return err
}


+ 4
- 4
routers/repo/cloudbrain.go View File

@@ -394,7 +394,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {

}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -580,7 +580,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -2498,7 +2498,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tplCloudBrainBenchmarkNew, &form)
@@ -2652,7 +2652,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tpl, &form)


+ 2
- 318
routers/repo/dataset.go View File

@@ -47,8 +47,8 @@ func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment
permission := false
if !permission && ctx.User != nil {
isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
isInRepoTeam,_:=repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator ||isInRepoTeam {
isInRepoTeam, _ := repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator || isInRepoTeam {
log.Info("Collaborator user may visit the attach.")
permission = true
}
@@ -349,96 +349,6 @@ func DatasetAction(ctx *context.Context) {

}

func CurrentRepoDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

repo := ctx.Repo.Repository
var datasetIDs []int64
dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
return
}
datasetIDs = append(datasetIDs, dataset.ID)
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: datasetIDs,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyDatasets(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
keyword := strings.Trim(ctx.Query("q"), " ")
@@ -591,180 +501,6 @@ func ReferenceDatasetData(ctx *context.Context) {

}

func PublicDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyFavoriteDataset(ctx *context.Context) {
UserId := ctx.User.ID
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
var NotColDatasetIDs []int64
var IsColDatasetIDs []int64
datasetStars, err := models.GetDatasetStarByUser(ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
log.Error("GetDatasetStarByUser failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
//If the dataset has been deleted, it will not be counted
for _, datasetStar := range datasetStars {
IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar)
if err != nil {
log.Error("IsDatasetStarExist error:", err.Error())
}
if IsExist {
DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset)
if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator {
IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID)
} else {
NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID)
}
}
}

NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: NotColDatasetIDs,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
//If is collaborator, there is no need to determine whether the dataset is private or public
IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: IsColDatasetIDs,
NeedIsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
for _, NotColDataset := range NotColDatasets {
IsColDatasets = append(IsColDatasets, NotColDataset)
}
datasets := IsColDatasets
count := NotColcount + IsColcount
sort.Slice(datasets, func(i, j int) bool {
return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix
})

page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pagesize := ctx.QueryInt("pagesize")
if pagesize <= 0 {
pagesize = 5
}
pageDatasetsInfo := getPageDatasets(datasets, page, pagesize)
if pageDatasetsInfo == nil {
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": "[]",
"count": strconv.FormatInt(count, 10),
})
return
}
data, err := json.Marshal(pageDatasetsInfo)
log.Info("data:", data)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})

}
func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo {
begin := (page - 1) * pagesize
end := (page) * pagesize

if begin > len(AttachmentInfos)-1 {
return nil
}
if end > len(AttachmentInfos)-1 {
return AttachmentInfos[begin:]
} else {
return AttachmentInfos[begin:end]
}

}
func getTotalPage(total int64, pageSize int) int {

another := 0
if int(total)%pageSize != 0 {
another = 1
}
return int(total)/pageSize + another

}

func GetDatasetStatus(ctx *context.Context) {

var (
@@ -789,55 +525,3 @@ func GetDatasetStatus(ctx *context.Context) {
"AttachmentStatus": fmt.Sprint(attachment.DecompressState),
})
}
func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("query repo error:", err.Error())
} else {
repo.GetOwner()
if ctx.User != nil {
if repo.Owner.IsOrganization() {
org := repo.Owner
org.Teams, err = org.GetUserTeams(ctx.User.ID)
if err != nil {
log.Error("GetUserTeams error:", err.Error())
return false
}
if org.IsUserPartOfOrg(ctx.User.ID) {
for _, t := range org.Teams {
if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) {
return true
}
}
isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID)
if isOwner {
return isOwner
}
return false
}
}

isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
if isCollaborator {
return true
}
}
}

return false
}
func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) {
dataset, err := models.GetDatasetByID(datasetStar.DatasetID)
if err != nil {
log.Error("query dataset error:", err.Error())
return false, nil, nil, err
} else {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("GetRepositoryByID error:", err.Error())
return false, nil, nil, err
}
return true, repo, dataset, nil
}

}

+ 2
- 2
routers/repo/grampus.go View File

@@ -465,7 +465,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain

}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -724,7 +724,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
req.PreTrainModelPath = preTrainModelPath
}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)


+ 1
- 1
routers/repo/modelarts.go View File

@@ -2203,7 +2203,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
JobType: string(models.JobTypeInference),
}

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
inferenceJobErrorNewDataPrepare(ctx, form)


+ 0
- 4
routers/routes/routes.go View File

@@ -1096,10 +1096,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
m.Post("/reference_datasets", reqRepoDatasetWriterJson, bindIgnErr(auth.ReferenceDatasetForm{}), repo.ReferenceDatasetPost)
m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
m.Get("/current_repo", repo.CurrentRepoDataset)
m.Get("/my_datasets", repo.MyDatasets)
m.Get("/public_datasets", repo.PublicDataset)
m.Get("/my_favorite", repo.MyFavoriteDataset)

m.Get("/current_repo_m", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets_m", repo.MyDatasetsMultiple)


Loading…
Cancel
Save