|
|
|
@@ -59,6 +59,7 @@ var ( |
|
|
|
benchmarkGpuInfos *models.GpuInfos |
|
|
|
benchmarkResourceSpecs *models.ResourceSpecs |
|
|
|
trainGpuInfos *models.GpuInfos |
|
|
|
inferenceGpuInfos *models.GpuInfos |
|
|
|
) |
|
|
|
|
|
|
|
const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" |
|
|
|
@@ -130,6 +131,11 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { |
|
|
|
} |
|
|
|
ctx.Data["train_gpu_types"] = trainGpuInfos.GpuInfo |
|
|
|
|
|
|
|
if inferenceGpuInfos == nil { |
|
|
|
json.Unmarshal([]byte(setting.InferenceGpuTypes), &inferenceGpuInfos) |
|
|
|
} |
|
|
|
ctx.Data["train_gpu_types"] = trainGpuInfos.GpuInfo |
|
|
|
|
|
|
|
if benchmarkGpuInfos == nil { |
|
|
|
json.Unmarshal([]byte(setting.BenchmarkGpuTypes), &benchmarkGpuInfos) |
|
|
|
} |
|
|
|
@@ -150,6 +156,11 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { |
|
|
|
} |
|
|
|
ctx.Data["train_resource_specs"] = cloudbrain.TrainResourceSpecs.ResourceSpec |
|
|
|
|
|
|
|
if cloudbrain.InferenceResourceSpecs == nil { |
|
|
|
json.Unmarshal([]byte(setting.InferenceResourceSpecs), &cloudbrain.InferenceResourceSpecs) |
|
|
|
} |
|
|
|
ctx.Data["inference_resource_specs"] = cloudbrain.InferenceResourceSpecs.ResourceSpec |
|
|
|
|
|
|
|
if cloudbrain.SpecialPools != nil { |
|
|
|
var debugGpuTypes []*models.GpuInfo |
|
|
|
var trainGpuTypes []*models.GpuInfo |
|
|
|
@@ -610,7 +621,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo |
|
|
|
|
|
|
|
var task *models.Cloudbrain |
|
|
|
var err error |
|
|
|
if jobType == models.JobTypeTrain { |
|
|
|
if jobType == models.JobTypeTrain || jobType == models.JobTypeInference { |
|
|
|
task, err = models.GetCloudbrainByJobID(ctx.Params(":jobid")) |
|
|
|
} else { |
|
|
|
task, err = models.GetCloudbrainByIDWithDeleted(ctx.Params(":id")) |
|
|
|
@@ -641,6 +652,18 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo |
|
|
|
ctx.Data["ShareMemMiB"] = tmp.ShareMemMiB |
|
|
|
} |
|
|
|
} |
|
|
|
} else if task.JobType == string(models.JobTypeInference) { |
|
|
|
if cloudbrain.InferenceResourceSpecs == nil { |
|
|
|
json.Unmarshal([]byte(setting.InferenceResourceSpecs), &cloudbrain.InferenceResourceSpecs) |
|
|
|
} |
|
|
|
for _, tmp := range cloudbrain.InferenceResourceSpecs.ResourceSpec { |
|
|
|
if tmp.Id == task.ResourceSpecId { |
|
|
|
ctx.Data["GpuNum"] = tmp.GpuNum |
|
|
|
ctx.Data["CpuNum"] = tmp.CpuNum |
|
|
|
ctx.Data["MemMiB"] = tmp.MemMiB |
|
|
|
ctx.Data["ShareMemMiB"] = tmp.ShareMemMiB |
|
|
|
} |
|
|
|
} |
|
|
|
} else { |
|
|
|
if cloudbrain.ResourceSpecs == nil { |
|
|
|
json.Unmarshal([]byte(setting.ResourceSpecs), &cloudbrain.ResourceSpecs) |
|
|
|
@@ -669,6 +692,15 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo |
|
|
|
ctx.Data["resource_type"] = resourceType.Value |
|
|
|
} |
|
|
|
} |
|
|
|
} else if task.JobType == string(models.JobTypeInference) { |
|
|
|
if inferenceGpuInfos == nil { |
|
|
|
json.Unmarshal([]byte(setting.InferenceGpuTypes), &inferenceGpuInfos) |
|
|
|
} |
|
|
|
for _, resourceType := range inferenceGpuInfos.GpuInfo { |
|
|
|
if resourceType.Queue == jobRes.Config.GpuType { |
|
|
|
ctx.Data["resource_type"] = resourceType.Value |
|
|
|
} |
|
|
|
} |
|
|
|
} else if cloudbrain.IsBenchmarkJob(task.JobType) { |
|
|
|
if benchmarkGpuInfos == nil { |
|
|
|
json.Unmarshal([]byte(setting.BenchmarkGpuTypes), &benchmarkGpuInfos) |
|
|
|
@@ -2472,7 +2504,7 @@ func InferenceCloudBrainJobNew(ctx *context.Context) { |
|
|
|
} |
|
|
|
|
|
|
|
func InferenceCloudBrainJobShow(ctx *context.Context) { |
|
|
|
cloudBrainShow(ctx, tplCloudBrainInferenceJobShow, models.JobTypeTrain) |
|
|
|
cloudBrainShow(ctx, tplCloudBrainInferenceJobShow, models.JobTypeInference) |
|
|
|
} |
|
|
|
|
|
|
|
func DownloadInferenceResultFile(ctx *context.Context) { |
|
|
|
|