Browse Source

Merge branch 'multi-dataset' of https://git.openi.org.cn/OpenI/aiforge into multi-dataset

tags/v1.22.6.2^2
zhoupzh 3 years ago
parent
commit
e1e91e7270
1 changed files with 49 additions and 68 deletions
  1. +49
    -68
      routers/repo/modelarts.go

+ 49
- 68
routers/repo/modelarts.go View File

@@ -991,19 +991,6 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
FlavorName := form.FlavorName
VersionCount := modelarts.VersionCount
EngineName := form.EngineName
if IsDatasetUseCountExceed(uuid) {
log.Error("DatasetUseCount is Exceed:%v")
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("DatasetUseCount is Exceed", tplModelArtsTrainJobNew, &form)
return
}
_, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"])
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tplModelArtsTrainJobNew, &form)
return
}

count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID)
if err != nil {
@@ -1114,15 +1101,15 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
Value: modelarts.Ascend,
})
}
DatasUrlList, dataUrl, isMultiDataset, err := GetDatasUrlListByUUIDS(uuid)
datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil {
log.Error("Failed to GetDatasUrlListByUUIDS: %v", err)
log.Error("Failed to getDatasUrlListByUUIDS: %v", err)
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("Failed to GetDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobNew, &form)
ctx.RenderWithErr("Failed to getDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobNew, &form)
return
}
dataPath := dataUrl
jsondatas, err := json.Marshal(DatasUrlList)
jsondatas, err := json.Marshal(datasUrlList)
if err != nil {
log.Error("Failed to Marshal: %v", err)
trainJobErrorNewDataPrepare(ctx, form)
@@ -1270,20 +1257,6 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
EngineName := form.EngineName
isLatestVersion := modelarts.IsLatestVersion

if IsDatasetUseCountExceed(uuid) {
log.Error("DatasetUseCount is Exceed:%v")
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr("DatasetUseCount is Exceed", tplModelArtsTrainJobVersionNew, &form)
return
}
_, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"])
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tplModelArtsTrainJobVersionNew, &form)
return
}

canNewJob, _ := canUserCreateTrainJobVersion(ctx, latestTask.UserID)
if !canNewJob {
ctx.RenderWithErr("user cann't new trainjob", tplModelArtsTrainJobVersionNew, &form)
@@ -1368,15 +1341,16 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
Value: modelarts.Ascend,
})
}
DatasUrlList, dataUrl, isMultiDataset, err := GetDatasUrlListByUUIDS(uuid)

datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil {
log.Error("Failed to GetDatasUrlListByUUIDS: %v", err)
log.Error("Failed to getDatasUrlListByUUIDS: %v", err)
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr("Failed to GetDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobVersionNew, &form)
ctx.RenderWithErr("Failed to getDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobVersionNew, &form)
return
}
dataPath := dataUrl
jsondatas, err := json.Marshal(DatasUrlList)
jsondatas, err := json.Marshal(datasUrlList)
if err != nil {
log.Error("Failed to Marshal: %v", err)
versionErrorDataPrepare(ctx, form)
@@ -2508,43 +2482,50 @@ func TrainJobDownloadLogFile(ctx *context.Context) {
ctx.Resp.Header().Set("Cache-Control", "max-age=0")
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently)
}
func GetDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, bool, error) {
func getDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) {
var isMultiDataset bool
var dataUrl string
var DatasUrlList []models.Datasurl
uuidList := strings.Split(uuidStr, ";")
isMultiDataset := false

if len(uuidList) >= 1 {
for _, uuid := range uuidList {
attach, err := models.GetAttachmentByUUID(uuid)
if err != nil {
log.Error("GetAttachmentByUUID failed: %v", err)
return nil, "", isMultiDataset, err
var datasetNames string
var datasUrlList []models.Datasurl
uuids := strings.Split(uuidStr, ";")
if len(uuids) > setting.MaxDatasetNum {
log.Error("the dataset count(%d) exceed the limit", len(uuids))
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset count exceed the limit")
}

datasetInfos := make(map[string]models.DatasetInfo)
attachs, err := models.GetAttachmentsByUUIDs(uuids)
if err != nil {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}
for i, attach := range attachs {
fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz")
for _, datasetInfo := range datasetInfos {
if fileName == datasetInfo.Name {
log.Error("the dataset name is same: %v", attach.Name)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset name is same")
}
datasetName := attach.Name
index := strings.LastIndex(datasetName, ".")
if index <= 0 {
index = 0
}
datasetNameHead := datasetName[:index]
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/"
DatasUrlList = append(DatasUrlList, models.Datasurl{
}
if len(attachs) <= 1 {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
isMultiDataset = false
} else {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attachs[0].UUID[0:1], attachs[0].UUID[1:2]) + "/" + attachs[0].UUID + attachs[0].UUID + "/"
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
datasUrlList = append(datasUrlList, models.Datasurl{
DatasetUrl: datasetUrl,
DatasetName: datasetNameHead,
DatasetName: fileName,
})
isMultiDataset = true
}

if i == 0 {
datasetNames = attach.Name
} else {
datasetNames += ";" + attach.Name
}
firstDataset := uuidList[0]
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(firstDataset[0:1], firstDataset[1:2]) + "/" + firstDataset + firstDataset + "/"
isMultiDataset = true
return DatasUrlList, dataUrl, isMultiDataset, nil
}
return nil, "", isMultiDataset, nil
}
func IsDatasetUseCountExceed(uuid string) bool {
uuidList := strings.Split(uuid, ";")
if len(uuidList) > setting.MaxDatasetNum {
return true
} else {
return false
}

return datasUrlList, dataUrl, datasetNames, isMultiDataset, nil
}

Loading…
Cancel
Save