Browse Source

fix-bug

tags/v1.22.6.2^2
liuzx 3 years ago
parent
commit
b9650e6396
1 changed files with 74 additions and 54 deletions
  1. +74
    -54
      routers/repo/modelarts.go

+ 74
- 54
routers/repo/modelarts.go View File

@@ -991,19 +991,6 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
FlavorName := form.FlavorName
VersionCount := modelarts.VersionCount
EngineName := form.EngineName
if IsDatasetUseCountExceed(uuid) {
log.Error("DatasetUseCount is Exceed:%v")
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("DatasetUseCount is Exceed", tplModelArtsTrainJobNew, &form)
return
}
_, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"])
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tplModelArtsTrainJobNew, &form)
return
}

count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID)
if err != nil {
@@ -1114,7 +1101,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
Value: modelarts.Ascend,
})
}
DatasUrlList, dataUrl, isMultiDataset, err := GetDatasUrlListByUUIDS(uuid)
DatasUrlList, dataUrl, datasetNames, isMultiDataset, err := GetDatasUrlListByUUIDS(uuid)
if err != nil {
log.Error("Failed to GetDatasUrlListByUUIDS: %v", err)
trainJobErrorNewDataPrepare(ctx, form)
@@ -1270,20 +1257,6 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
EngineName := form.EngineName
isLatestVersion := modelarts.IsLatestVersion

if IsDatasetUseCountExceed(uuid) {
log.Error("DatasetUseCount is Exceed:%v")
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr("DatasetUseCount is Exceed", tplModelArtsTrainJobVersionNew, &form)
return
}
_, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"])
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tplModelArtsTrainJobVersionNew, &form)
return
}

canNewJob, _ := canUserCreateTrainJobVersion(ctx, latestTask.UserID)
if !canNewJob {
ctx.RenderWithErr("user cann't new trainjob", tplModelArtsTrainJobVersionNew, &form)
@@ -1368,7 +1341,8 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
Value: modelarts.Ascend,
})
}
DatasUrlList, dataUrl, isMultiDataset, err := GetDatasUrlListByUUIDS(uuid)

DatasUrlList, dataUrl, datasetNames, isMultiDataset, err := GetDatasUrlListByUUIDS(uuid)
if err != nil {
log.Error("Failed to GetDatasUrlListByUUIDS: %v", err)
versionErrorDataPrepare(ctx, form)
@@ -2508,37 +2482,83 @@ func TrainJobDownloadLogFile(ctx *context.Context) {
ctx.Resp.Header().Set("Cache-Control", "max-age=0")
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently)
}
func GetDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, bool, error) {
func GetDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) {
var isMultiDataset bool
var dataUrl string
var DatasUrlList []models.Datasurl
uuidList := strings.Split(uuidStr, ";")
isMultiDataset := false

if len(uuidList) >= 1 {
for _, uuid := range uuidList {
attach, err := models.GetAttachmentByUUID(uuid)
if err != nil {
log.Error("GetAttachmentByUUID failed: %v", err)
return nil, "", isMultiDataset, err
}
datasetName := attach.Name
index := strings.LastIndex(datasetName, ".")
if index <= 0 {
index = 0
var datasetNames string
var datasUrlList []models.Datasurl
uuids := strings.Split(uuidStr, ";")
if len(uuids) > setting.MaxDatasetNum {
log.Error("the dataset count(%d) exceed the limit", len(uuids))
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset count exceed the limit")
}

datasetInfos := make(map[string]models.DatasetInfo)
attachs, err := models.GetAttachmentsByUUIDs(uuids)
if err != nil {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}
for i, attach := range attachs {
fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz")
for _, datasetInfo := range datasetInfos {
if fileName == datasetInfo.Name {
log.Error("the dataset name is same: %v", attach.Name)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset name is same")
}
datasetNameHead := datasetName[:index]
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/"
DatasUrlList = append(DatasUrlList, models.Datasurl{
}
if len(attachs) <= 1 {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
isMultiDataset = false
} else {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attachs[0].UUID[0:1], attachs[0].UUID[1:2]) + "/" + attachs[0].UUID + attachs[0].UUID + "/"
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
datasUrlList = append(datasUrlList, models.Datasurl{
DatasetUrl: datasetUrl,
DatasetName: datasetNameHead,
DatasetName: fileName,
})
isMultiDataset = true
}

if i == 0 {
datasetNames = attach.Name
} else {
datasetNames += ";" + attach.Name
}
firstDataset := uuidList[0]
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(firstDataset[0:1], firstDataset[1:2]) + "/" + firstDataset + firstDataset + "/"
isMultiDataset = true
return DatasUrlList, dataUrl, isMultiDataset, nil
}
return nil, "", isMultiDataset, nil

return datasUrlList, dataUrl, datasetNames, isMultiDataset, nil

// var dataUrl string
// var DatasUrlList []models.Datasurl
// uuidList := strings.Split(uuidStr, ";")
// isMultiDataset := false

// if len(uuidList) >= 1 {
// for _, uuid := range uuidList {
// attach, err := models.GetAttachmentByUUID(uuid)
// if err != nil {
// log.Error("GetAttachmentByUUID failed: %v", err)
// return nil, "", isMultiDataset, err
// }
// datasetName := attach.Name
// index := strings.LastIndex(datasetName, ".")
// if index <= 0 {
// index = 0
// }
// datasetNameHead := datasetName[:index]
// datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/"
// DatasUrlList = append(DatasUrlList, models.Datasurl{
// DatasetUrl: datasetUrl,
// DatasetName: datasetNameHead,
// })
// }
// firstDataset := uuidList[0]
// dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(firstDataset[0:1], firstDataset[1:2]) + "/" + firstDataset + firstDataset + "/"
// isMultiDataset = true
// return DatasUrlList, dataUrl, isMultiDataset, nil
// }
// return nil, "", isMultiDataset, nil
}
func IsDatasetUseCountExceed(uuid string) bool {
uuidList := strings.Split(uuid, ";")


Loading…
Cancel
Save