diff --git a/models/action.go b/models/action.go
index 4b6f1dbad..869acb762 100755
--- a/models/action.go
+++ b/models/action.go
@@ -65,6 +65,8 @@ const (
ActionCreateImage //36
ActionImageRecommend //37
ActionChangeUserAvatar //38
+ ActionCreateGrampusNPUDebugTask //39
+ ActionCreateGrampusGPUDebugTask //40
)
// Action represents user operation type and other information to
@@ -375,6 +377,8 @@ func (a *Action) IsCloudbrainAction() bool {
ActionCreateInferenceTask,
ActionCreateBenchMarkTask,
ActionCreateGPUTrainTask,
+ ActionCreateGrampusGPUDebugTask,
+ ActionCreateGrampusNPUDebugTask,
ActionCreateGrampusNPUTrainTask,
ActionCreateGrampusGPUTrainTask:
return true
diff --git a/models/ai_model_manage.go b/models/ai_model_manage.go
index 4fe74e555..d55370ea1 100644
--- a/models/ai_model_manage.go
+++ b/models/ai_model_manage.go
@@ -221,6 +221,19 @@ func SaveModelToDb(model *AiModelManage) error {
return nil
}
+func QueryModelConvertByName(name string, repoId int64) ([]*AiModelConvert, error) {
+ sess := x.NewSession()
+ defer sess.Close()
+ sess.Select("*").Table(new(AiModelConvert)).
+ Where("name='" + name + "' and repo_id=" + fmt.Sprint(repoId)).OrderBy("created_unix desc")
+ aiModelManageConvertList := make([]*AiModelConvert, 0)
+ err := sess.Find(&aiModelManageConvertList)
+ if err == nil {
+ return aiModelManageConvertList, nil
+ }
+ return nil, err
+}
+
func QueryModelConvertById(id string) (*AiModelConvert, error) {
sess := x.NewSession()
defer sess.Close()
@@ -390,6 +403,18 @@ func QueryModelByName(name string, repoId int64) []*AiModelManage {
return aiModelManageList
}
+func QueryModelByPath(path string) (*AiModelManage, error) {
+ modelManage := new(AiModelManage)
+ has, err := x.Where("path=?", path).Get(modelManage)
+ if err != nil {
+ return nil, err
+ }
+ if !has {
+ return nil, ErrNotExist{}
+ }
+ return modelManage, nil
+}
+
func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) {
sess := x.NewSession()
defer sess.Close()
@@ -460,6 +485,12 @@ func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) {
return aiModelManages, count, nil
}
+func QueryModelConvertCountByRepoID(repoId int64) int64 {
+ convert := new(AiModelConvert)
+ total, _ := x.Where("repo_id =?", repoId).Count(convert)
+ return total
+}
+
func QueryModelConvertByRepoID(repoId int64) ([]*AiModelConvert, error) {
sess := x.NewSession()
defer sess.Close()
diff --git a/models/attachment.go b/models/attachment.go
index 2b747db21..2788ef559 100755
--- a/models/attachment.go
+++ b/models/attachment.go
@@ -259,6 +259,17 @@ func GetAttachmentsByCommentID(commentID int64) ([]*Attachment, error) {
return getAttachmentsByCommentID(x, commentID)
}
+func GetAttachmentByDatasetIdFileName(fileName string, datasetId int64) (*Attachment, error) {
+ attach := &Attachment{DatasetID: datasetId, Name: fileName}
+ has, err := x.Get(attach)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, err
+ }
+ return attach, nil
+}
+
func getAttachmentsByCommentID(e Engine, commentID int64) ([]*Attachment, error) {
attachments := make([]*Attachment, 0, 10)
return attachments, e.Where("comment_id=?", commentID).Find(&attachments)
diff --git a/models/cloudbrain.go b/models/cloudbrain.go
index 366358638..e12103be3 100755
--- a/models/cloudbrain.go
+++ b/models/cloudbrain.go
@@ -60,6 +60,7 @@ const (
JobTypeModelSafety JobType = "MODELSAFETY"
JobTypeSnn4imagenet JobType = "SNN4IMAGENET"
JobTypeBrainScore JobType = "BRAINSCORE"
+ JobTypeSnn4Ecoset JobType = "SNN4ECOSET"
JobTypeTrain JobType = "TRAIN"
JobTypeInference JobType = "INFERENCE"
@@ -114,6 +115,7 @@ const (
GrampusStatusFailed = "FAILED"
GrampusStatusSucceeded = "SUCCEEDED"
GrampusStatusStopped = "STOPPED"
+ GrampusStatusStopping = "STOPPING"
GrampusStatusUnknown = "UNKNOWN"
GrampusStatusWaiting = "WAITING"
@@ -181,7 +183,7 @@ type Cloudbrain struct {
BranchName string //分支名称
Parameters string //传给modelarts的param参数
BootFile string //启动文件
- DataUrl string //数据集的obs路径
+ DataUrl string `xorm:"varchar(3500)"` //数据集的obs路径
LogUrl string //日志输出的obs路径
PreVersionId int64 //父版本的版本id
FlavorCode string //modelarts上的规格id
@@ -298,6 +300,12 @@ func (task *Cloudbrain) IsUserHasRight(user *User) bool {
}
return user.IsAdmin || user.ID == task.UserID
}
+func (task *Cloudbrain) IsGPUTask() bool {
+ return task.ComputeResource == GPUResource
+}
+func (task *Cloudbrain) IsNPUTask() bool {
+ return task.ComputeResource == NPUResource
+}
func ConvertDurationToStr(duration int64) string {
if duration <= 0 {
@@ -328,6 +336,9 @@ func IsModelArtsDebugJobTerminal(status string) bool {
func IsCloudBrainOneDebugJobTerminal(status string) bool {
return status == string(JobStopped) || status == string(JobFailed) || status == string(JobSucceeded)
}
+func IsModelBenchMarkJobType(jobType string) bool {
+ return jobType == string(JobTypeSnn4imagenet) || jobType == string(JobTypeBrainScore) || jobType == string(JobTypeSnn4Ecoset)
+}
func ParseAndSetDurationFromCloudBrainOne(result JobResultPayload, task *Cloudbrain) {
isActivated := result.JobStatus.CreatedTime > 0
@@ -1064,6 +1075,9 @@ type UserImageConfig struct {
CreateVersion bool `json:"create_version"`
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
type CreateTrainJobParams struct {
@@ -1087,13 +1101,18 @@ type Config struct {
CreateVersion bool `json:"create_version"`
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
+
type CreateInferenceJobParams struct {
JobName string `json:"job_name"`
Description string `json:"job_desc"`
InfConfig InfConfig `json:"config"`
WorkspaceID string `json:"workspace_id"`
}
+
type CreateInfUserImageParams struct {
JobName string `json:"job_name"`
Description string `json:"job_desc"`
@@ -1151,6 +1170,9 @@ type TrainJobVersionConfig struct {
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
PreVersionId int64 `json:"pre_version_id"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
type TrainJobVersionUserImageConfig struct {
@@ -1166,6 +1188,9 @@ type TrainJobVersionUserImageConfig struct {
PreVersionId int64 `json:"pre_version_id"`
UserImageUrl string `json:"user_image_url"`
UserCommand string `json:"user_command"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
type CreateConfigParams struct {
@@ -1181,6 +1206,7 @@ type CreateConfigParams struct {
LogUrl string `json:"log_url"`
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
+ Volumes []Volumes `json:"volumes"`
}
type Parameter struct {
@@ -1203,6 +1229,13 @@ type DatasetDownload struct {
IsDelete bool `json:"is_delete"`
}
+type ModelDownload struct {
+ Name string `json:"name"`
+ DownloadLink string `json:"download_link"`
+ RepositoryLink string `json:"repository_link"`
+ IsDelete bool `json:"is_delete"`
+}
+
type DataSource struct {
DatasetID string `json:"dataset_id"`
DatasetVersion string `json:"dataset_version"`
@@ -1446,6 +1479,20 @@ type GrampusJobInfo struct {
UserID string `json:"userId"`
Tasks []GrampusTasks `json:"tasks"`
}
+
+type GrampusNotebookInfo struct {
+ StartedAt int64 `json:"startedAt"`
+ RunSec int64 `json:"runSec"`
+ CompletedAt int64 `json:"completedAt"`
+ CreatedAt int64 `json:"createdAt"`
+ UpdatedAt int64 `json:"updatedAt"`
+ Desc string `json:"desc"`
+ JobID string `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+ UserID string `json:"userId"`
+ Tasks []GrampusNotebookTask `json:"tasks"`
+}
type Center struct {
ID string `json:"id"`
Name string `json:"name"`
@@ -1522,9 +1569,22 @@ type GetGrampusJobResponse struct {
JobInfo GrampusJobInfo `json:"otJob"`
}
+type GrampusNotebookResponse struct {
+ GrampusResult
+ JobInfo GrampusNotebookInfo `json:"otJob"`
+}
+
+type GrampusNotebookRestartResponse struct {
+ GrampusResult
+ NewId string `json:"newId"`
+ Status string `json:"status"`
+}
+
type GrampusStopJobResponse struct {
GrampusResult
- StoppedAt int64 `json:"stoppedAt"`
+ StoppedAt int64 `json:"stoppedAt"`
+ ID string `json:"id"`
+ Status string `json:"status"`
}
type GrampusTasks struct {
@@ -1541,12 +1601,32 @@ type GrampusTasks struct {
Code GrampusDataset `json:"code"`
BootFile string `json:"bootFile"`
}
+type GrampusNotebookTask struct {
+ AutoStopDuration int `json:"autoStopDuration"`
+ Name string `json:"name"`
+ Capacity int `json:"capacity"`
+ CenterID []string `json:"centerID"`
+ CenterName []string `json:"centerName"`
+ Code GrampusDataset `json:"code"`
+ Datasets []GrampusDataset `json:"datasets"`
+ CodeUrl string `json:"codeUrl"`
+ DataUrl string `json:"dataUrl"`
+ ImageId string `json:"imageId"`
+ ImageUrl string `json:"imageUrl"`
+ ResourceSpecId string `json:"resourceSpecId"`
+ Token string `json:"token"`
+ Url string `json:"url"`
+ Status string `json:"status"`
+ Command string `json:"command"`
+}
type GrampusDataset struct {
- Name string `json:"name"`
- Bucket string `json:"bucket"`
- EndPoint string `json:"endPoint"`
- ObjectKey string `json:"objectKey"`
+ Name string `json:"name"`
+ Bucket string `json:"bucket"`
+ EndPoint string `json:"endPoint"`
+ ObjectKey string `json:"objectKey"`
+ ContainerPath string `json:"containerPath"`
+ ReadOnly bool `json:"readOnly"`
}
type CreateGrampusJobRequest struct {
@@ -1554,6 +1634,11 @@ type CreateGrampusJobRequest struct {
Tasks []GrampusTasks `json:"tasks"`
}
+type CreateGrampusNotebookRequest struct {
+ Name string `json:"name"`
+ Tasks []GrampusNotebookTask `json:"tasks"`
+}
+
type GetTrainJobMetricStatisticResult struct {
TrainJobResult
Interval int `json:"interval"` //查询的时间间隔,单位为分钟
@@ -1745,7 +1830,7 @@ func QueryModelTrainJobVersionList(jobId string) ([]*Cloudbrain, int, error) {
return cloudbrains, int(len(cloudbrains)), nil
}
-func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) {
+func QueryModelTrainJobList(repoId int64) ([]*Cloudbrain, int, error) {
sess := x.NewSession()
defer sess.Close()
var cond = builder.NewCond()
@@ -1762,14 +1847,14 @@ func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) {
// builder.In("type", 0, 1),
// )
- cloudbrains := make([]*CloudbrainInfo, 0)
+ cloudbrains := make([]*Cloudbrain, 0)
if err := sess.Select("job_id,display_job_name").Table(&Cloudbrain{}).Where(cond).OrderBy("created_unix DESC").
Find(&cloudbrains); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}
keys := make(map[string]string)
- uniqueElements := make([]*CloudbrainInfo, 0)
+ uniqueElements := make([]*Cloudbrain, 0)
for _, entry := range cloudbrains {
if _, value := keys[entry.JobID]; !value {
keys[entry.JobID] = entry.DisplayJobName
@@ -1865,6 +1950,7 @@ func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) {
session.Commit()
go IncreaseDatasetUseCount(cloudbrain.Uuid)
+ go OperateRepoAITaskNum(cloudbrain.RepoID, 1)
return nil
}
@@ -2020,13 +2106,42 @@ func DeleteJob(job *Cloudbrain) error {
func deleteJob(e Engine, job *Cloudbrain) error {
_, err := e.ID(job.ID).Delete(job)
+ if err == nil {
+ go updateAITaskNumWhenDeleteJob(job)
+ }
return err
}
+func updateAITaskNumWhenDeleteJob(job *Cloudbrain) {
+ repoId := job.RepoID
+ if repoId == 0 {
+ t := &Cloudbrain{}
+ _, tempErr := x.ID(job.ID).Unscoped().Get(t)
+ if tempErr != nil {
+ log.Error("updateAITaskNumWhenDeleteJob error.%v", tempErr)
+ return
+ }
+ repoId = t.RepoID
+ }
+
+ if repoId > 0 {
+ go OperateRepoAITaskNum(repoId, -1)
+ }
+}
+
func GetCloudbrainByName(jobName string) (*Cloudbrain, error) {
cb := &Cloudbrain{JobName: jobName}
return getRepoCloudBrain(cb)
}
+func GetWaitOrRunFileNotebookByRepo(repoId int64, cloudbrainType int) (*Cloudbrain, error) {
+ cloudBrain := new(Cloudbrain)
+ has, err := x.In("status", JobWaiting, JobRunning, ModelArtsCreateQueue, ModelArtsCreating, ModelArtsStarting,
+ ModelArtsReadyToStart, ModelArtsResizing, ModelArtsStartQueuing, ModelArtsRunning, ModelArtsDeleting, ModelArtsRestarting).Where("repo_id=? and type=? and boot_file!=''", repoId, cloudbrainType).Get(cloudBrain)
+ if has {
+ return cloudBrain, err
+ }
+ return nil, err
+}
func CanDelJob(isSigned bool, user *User, job *CloudbrainInfo) bool {
if !isSigned || (job.Status != string(JobStopped) && job.Status != string(JobFailed) && job.Status != string(ModelArtsStartFailed) && job.Status != string(ModelArtsCreateFailed)) {
@@ -2060,7 +2175,7 @@ func GetCloudBrainUnStoppedJob() ([]*Cloudbrain, error) {
Find(&cloudbrains)
}
-func GetCloudBrainOneStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
+func GetGPUStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0, 10)
endTimeBefore := time.Now().Unix() - int64(days)*24*3600
missEndTimeBefore := endTimeBefore - 24*3600
@@ -2069,7 +2184,7 @@ func GetCloudBrainOneStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbra
JobStopped, JobSucceeded, JobFailed, ModelArtsCreateFailed, ModelArtsStartFailed, ModelArtsUnavailable, ModelArtsResizFailed, ModelArtsDeleted,
ModelArtsStopped, ModelArtsTrainJobCanceled, ModelArtsTrainJobCheckFailed, ModelArtsTrainJobCompleted, ModelArtsTrainJobDeleteFailed, ModelArtsTrainJobDeployServiceFailed,
ModelArtsTrainJobFailed, ModelArtsTrainJobImageFailed, ModelArtsTrainJobKilled, ModelArtsTrainJobLost, ModelArtsTrainJobSubmitFailed, ModelArtsTrainJobSubmitModelFailed).
- Where("(((end_time is null or end_time=0) and updated_unix and updated_unix != 0 ) or (end_time and end_time != 0)) and cleared=false and type=0 and job_type != 'DEBUG'", missEndTimeBefore, endTimeBefore).
+ Where("(((end_time is null or end_time=0) and updated_unix and updated_unix != 0 ) or (end_time and end_time != 0)) and cleared=false and (type=0 or (type =2 and compute_resource='CPU/GPU')) and job_type != 'DEBUG'", missEndTimeBefore, endTimeBefore).
Limit(limit).
Find(&cloudbrains)
}
@@ -2077,14 +2192,14 @@ func GetCloudBrainOneStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbra
/**
本方法考虑了再次调试的情况,多次调试取最后一次的任务的结束时间
*/
-func GetCloudBrainOneStoppedDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
+func GetGPUStoppedDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0, 10)
endTimeBefore := time.Now().Unix() - int64(days)*24*3600
missEndTimeBefore := endTimeBefore - 24*3600
sql := `SELECT id,job_name,job_id from (SELECT DISTINCT ON (job_name)
id, job_name, job_id,status,end_time,updated_unix,cleared
FROM cloudbrain
- where type=0 and job_type='DEBUG'
+ where (type=0 or (type =2 and compute_resource='CPU/GPU')) and job_type='DEBUG'
ORDER BY job_name, updated_unix DESC) a
where status in ('STOPPED','SUCCEEDED','FAILED') and (((end_time is null or end_time=0) and updated_unix and updated_unix != 0 ) or (end_time and end_time != 0)) and cleared=false`
@@ -2225,7 +2340,6 @@ func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) {
}
go IncreaseDatasetUseCount(new.Uuid)
-
return nil
}
func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) {
@@ -2538,6 +2652,7 @@ type DatasetInfo struct {
DataLocalPath string
Name string
FullName string
+ Size int
}
func GetDatasetInfo(uuidStr string, grampusType ...string) (map[string]DatasetInfo, string, error) {
@@ -2593,6 +2708,7 @@ func GetDatasetInfo(uuidStr string, grampusType ...string) (map[string]DatasetIn
DataLocalPath: dataLocalPath,
Name: fileName,
FullName: attach.Name,
+ Size: int(attach.Size),
}
if i == 0 {
datasetNames = attach.Name
diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go
index 40d7a2a2e..beb1ceee5 100644
--- a/models/cloudbrain_static.go
+++ b/models/cloudbrain_static.go
@@ -92,6 +92,17 @@ type HourTimeStatistic struct {
HourTimeTotalDuration map[string]int `json:"hourTimeTotalDuration"`
HourTimeUsageRate map[string]float64 `json:"hourTimeUsageRate"`
}
+type CloudbrainTypeDuration []struct {
+ Type int `xorm:"type"`
+ DurationSum int `xorm:"durationSum"`
+ CardDurationSum int `xorm:"cardDurationSum"`
+ Count int `xorm:"count"`
+}
+type CloudbrainAllDuration struct {
+ DurationSum int `xorm:"durationSum"`
+ CardDurationSum int `xorm:"cardDurationSum"`
+ Count int `xorm:"count"`
+}
func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) {
countSql := "SELECT count(distinct user_id) FROM " +
@@ -303,7 +314,7 @@ func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, err
builder.And(builder.Gte{"cloudbrain.start_time": beginTime}, builder.Lte{"cloudbrain.start_time": endTime}, builder.Gt{"cloudbrain.start_time": 0}),
)
cond = cond.Or(
- builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}),
+ builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}, builder.Lte{"cloudbrain.start_time": beginTime}),
)
sess.OrderBy("cloudbrain.id ASC")
cloudbrains := make([]*CloudbrainInfo, 0, 10)
@@ -425,3 +436,55 @@ func DeleteCloudbrainDurationStatistic(beginTime timeutil.TimeStamp, endTime tim
}
return nil
}
+
+func GetCloudbrainTypeCardDuration() (CloudbrainTypeDuration, error) {
+ query := `
+ SELECT
+ cloudbrain.type,
+ SUM(cloudbrain.duration) as durationSum,
+ SUM(
+ COALESCE(cloudbrain.duration *
+ CASE
+ WHEN cloudbrain.work_server_number = 0 THEN 1
+ ELSE COALESCE(cloudbrain.work_server_number, 1)
+ END *
+ COALESCE(cloudbrain_spec.acc_cards_num, 1), 0)
+ ) as cardDurationSum,
+ COUNT(*) as count
+ FROM cloudbrain
+ LEFT JOIN cloudbrain_spec
+ ON cloudbrain.id = cloudbrain_spec.cloudbrain_id
+ GROUP BY cloudbrain.type
+ `
+ // 执行查询
+ var results CloudbrainTypeDuration
+ if err := x.SQL(query).Find(&results); err != nil {
+ panic(err)
+ }
+ return results, nil
+}
+
+func GetCloudbrainAllCardDuration() (CloudbrainAllDuration, error) {
+ query := `
+ SELECT
+ SUM(cloudbrain.duration) as durationSum,
+ SUM(
+ COALESCE(cloudbrain.duration *
+ CASE
+ WHEN cloudbrain.work_server_number = 0 THEN 1
+ ELSE COALESCE(cloudbrain.work_server_number, 1)
+ END *
+ COALESCE(cloudbrain_spec.acc_cards_num, 1), 0)
+ ) as cardDurationSum,
+ COUNT(*) as count
+ FROM cloudbrain
+ LEFT JOIN cloudbrain_spec
+ ON cloudbrain.id = cloudbrain_spec.cloudbrain_id
+ `
+ // 执行查询
+ var result CloudbrainAllDuration
+ if _, err := x.SQL(query).Get(&result); err != nil {
+ panic(err)
+ }
+ return result, nil
+}
diff --git a/models/list_options.go b/models/list_options.go
index 0946917fe..d6d1dcf0d 100644
--- a/models/list_options.go
+++ b/models/list_options.go
@@ -10,6 +10,26 @@ import (
"xorm.io/xorm"
)
+type AvailablePageSize int
+
+const (
+ PageSize15 AvailablePageSize = 15
+ PageSize30 AvailablePageSize = 30
+ PageSize50 AvailablePageSize = 50
+)
+
+func (s AvailablePageSize) IsLegal() bool {
+ switch s {
+ case PageSize30, PageSize50, PageSize15:
+ return true
+ }
+ return false
+}
+
+func (s AvailablePageSize) Int() int {
+ return int(s)
+}
+
// ListOptions options to paginate results
type ListOptions struct {
PageSize int
diff --git a/models/repo.go b/models/repo.go
index 832e3fc37..e390ef70d 100755
--- a/models/repo.go
+++ b/models/repo.go
@@ -231,10 +231,43 @@ type Repository struct {
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
- Hot int64 `xorm:"-"`
- Active int64 `xorm:"-"`
- Alias string `xorm:"INDEX"`
- LowerAlias string `xorm:"INDEX"`
+ Hot int64 `xorm:"-"`
+ Active int64 `xorm:"-"`
+ Alias string `xorm:"INDEX"`
+ LowerAlias string `xorm:"INDEX"`
+ AiTaskCnt int64 `xorm:"NOT NULL DEFAULT 0"`
+ ModelCnt int64 `xorm:"NOT NULL DEFAULT 0"`
+ DatasetCnt int64 `xorm:"NOT NULL DEFAULT 0"`
+ LastMonthVisits int64 `xorm:"NOT NULL DEFAULT 0"`
+ LastFourMonthCommits int64 `xorm:"NOT NULL DEFAULT 0"`
+}
+
+// Repository4Card format for front display
+type Repository4Card struct {
+ ID int64
+ OwnerID int64
+ OwnerName string
+ LowerName string
+ Name string
+ Alias string
+ NumWatches int
+ NumStars int
+ NumForks int
+ Description string
+ Topics []string
+ AiTaskCnt int64
+ ModelCnt int64
+ DatasetCnt int64
+ CreatedUnix timeutil.TimeStamp
+ UpdatedUnix timeutil.TimeStamp
+ PrimaryLanguage *LanguageStat
+ RelAvatarLink string
+ Contributors []*ContributorInfo
+ IsPrivate bool
+ IsFork bool
+ IsMirror bool
+ IsOwnerPrivate bool
+ IsArchived bool
}
type RepositoryShow struct {
@@ -243,6 +276,47 @@ type RepositoryShow struct {
Alias string
}
+func (repo *Repository) ToCardFormat() *Repository4Card {
+ link := repo.RelAvatarLink()
+ var isOwnerPrivate bool
+ if repo.Owner != nil && repo.Owner.Visibility.IsPrivate() {
+ isOwnerPrivate = true
+ }
+ result := &Repository4Card{
+ ID: repo.ID,
+ OwnerID: repo.OwnerID,
+ OwnerName: repo.OwnerName,
+ LowerName: repo.LowerName,
+ Name: repo.Name,
+ NumWatches: repo.NumWatches,
+ NumStars: repo.NumStars,
+ NumForks: repo.NumForks,
+ Description: repo.Description,
+ Topics: repo.Topics,
+ AiTaskCnt: repo.AiTaskCnt,
+ ModelCnt: repo.ModelCnt,
+ DatasetCnt: repo.DatasetCnt,
+ CreatedUnix: repo.CreatedUnix,
+ UpdatedUnix: repo.UpdatedUnix,
+ PrimaryLanguage: repo.PrimaryLanguage,
+ RelAvatarLink: link,
+ Alias: repo.Alias,
+ IsPrivate: repo.IsPrivate,
+ IsFork: repo.IsFork,
+ IsMirror: repo.IsMirror,
+ IsOwnerPrivate: isOwnerPrivate,
+ IsArchived: repo.IsArchived,
+ }
+ return result
+}
+
+type ContributorInfo struct {
+ RelAvatarLink string
+ UserName string
+ Email string
+ CommitCnt int
+}
+
// SanitizedOriginalURL returns a sanitized OriginalURL
func (repo *Repository) SanitizedOriginalURL() string {
if repo.OriginalURL == "" {
@@ -2379,6 +2453,75 @@ func CheckRepoStats(ctx context.Context) error {
}
}
// ***** END: Repository.NumForks *****
+
+ // ***** START: Repository.DatasetCnt *****
+ desc = "repository count 'dataset_cnt'"
+ results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.dataset_cnt!=(select count(1) from attachment inner join dataset on attachment.dataset_id = dataset.id where dataset.repo_id = repository.id)")
+ if err != nil {
+ log.Error("Select %s: %v", desc, err)
+ } else {
+ for _, result := range results {
+ id := com.StrTo(result["id"]).MustInt64()
+ select {
+ case <-ctx.Done():
+ log.Warn("CheckRepoStats: Cancelled")
+ return ErrCancelledf("during %s for repo ID %d", desc, id)
+ default:
+ }
+ log.Trace("Updating %s: %d", desc, id)
+ err = ResetRepoDatasetNum(id)
+ if err != nil {
+ log.Error("Update %s[%d]: %v", desc, id, err)
+ }
+ }
+ }
+ // ***** END: Repository.DatasetCnt *****
+
+ // ***** START: Repository.ModelCnt *****
+ desc = "repository count 'model_cnt'"
+ results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.model_cnt!=(select count(1) from ai_model_manage where repository.id = ai_model_manage.repo_id and ai_model_manage.size > 0 )")
+ if err != nil {
+ log.Error("Select %s: %v", desc, err)
+ } else {
+ for _, result := range results {
+ id := com.StrTo(result["id"]).MustInt64()
+ select {
+ case <-ctx.Done():
+ log.Warn("CheckRepoStats: Cancelled")
+ return ErrCancelledf("during %s for repo ID %d", desc, id)
+ default:
+ }
+ log.Trace("Updating %s: %d", desc, id)
+ err = ResetRepoModelNum(id)
+ if err != nil {
+ log.Error("Update %s[%d]: %v", desc, id, err)
+ }
+ }
+ }
+ // ***** END: Repository.ModelCnt *****
+
+ // ***** START: Repository.AiTaskCnt *****
+ desc = "repository count 'ai_task_cnt'"
+ results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.ai_task_cnt!=(select count(1) from cloudbrain where repository.id = cloudbrain.repo_id and (cloudbrain.deleted_at is null or cloudbrain.deleted_at = '0001-01-01 00:00:00') )")
+ if err != nil {
+ log.Error("Select %s: %v", desc, err)
+ } else {
+ for _, result := range results {
+ id := com.StrTo(result["id"]).MustInt64()
+ select {
+ case <-ctx.Done():
+ log.Warn("CheckRepoStats: Cancelled")
+ return ErrCancelledf("during %s for repo ID %d", desc, id)
+ default:
+ }
+ log.Trace("Updating %s: %d", desc, id)
+ err = ResetRepoAITaskNum(id)
+ if err != nil {
+ log.Error("Update %s[%d]: %v", desc, id, err)
+ }
+ }
+ }
+ // ***** END: Repository.AiTaskCnt *****
return nil
}
@@ -2775,3 +2918,85 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi
}
return &RepoFile{CommitId: commitId, Content: d}, nil
}
+
+func ResetRepoAITaskNum(repoId int64) error {
+ n, err := x.Where("repo_id = ? ", repoId).Count(&Cloudbrain{})
+ if err != nil {
+ return err
+ }
+ r := Repository{
+ AiTaskCnt: n,
+ }
+ _, err = x.Cols("ai_task_cnt").Where("id = ?", repoId).Update(&r)
+ return err
+}
+
+func ResetRepoDatasetNum(repoId int64) error {
+ n, err := x.Table("attachment").Join("inner", "dataset", "attachment.dataset_id = dataset.id").Where("dataset.repo_id = ?", repoId).Count()
+ if err != nil {
+ return err
+ }
+ r := Repository{
+ DatasetCnt: n,
+ }
+ _, err = x.Cols("dataset_cnt").Where("id = ?", repoId).Update(&r)
+ return err
+}
+
+func ResetRepoModelNum(repoId int64) error {
+ _, err := x.Exec("update repository set model_cnt = (select count(1) from ai_model_manage where ai_model_manage.repo_id = ? and size > 0) where id = ?", repoId, repoId)
+ return err
+}
+
+func operateRepoCol(repoId int64, colName string, amount int64, engines ...*xorm.Engine) error {
+ var err error
+
+ if amount == 0 {
+ return nil
+ }
+ var ee *xorm.Engine
+ if len(engines) == 0 {
+ ee = x
+ } else {
+ ee = engines[0]
+ }
+ if amount > 0 {
+ _, err = ee.Exec(fmt.Sprintf("update repository set %s = %s + ? where id = ?", colName, colName), amount, repoId)
+ } else {
+ _, err = ee.Exec(fmt.Sprintf("update repository set %s = %s - ? where id = ?", colName, colName), -1*amount, repoId)
+ }
+
+ return err
+}
+
+func OperateRepoDatasetNum(repoId int64, amount int64, engines ...*xorm.Engine) error {
+ return operateRepoCol(repoId, "dataset_cnt", amount, engines...)
+}
+
+func OperateRepoModelNum(repoId int64, amount int64, engines ...*xorm.Engine) error {
+ return operateRepoCol(repoId, "model_cnt", amount, engines...)
+}
+
+func OperateRepoAITaskNum(repoId int64, amount int64, engines ...*xorm.Engine) error {
+ return operateRepoCol(repoId, "ai_task_cnt", amount, engines...)
+}
+
+func UpdateRepositoryLastFourMonthCommits(repoID int64, amount int64) error {
+ _, err := x.Exec("update repository set last_four_month_commits = ? where id = ?", amount, repoID)
+ return err
+}
+func UpdateRepositoryLastMonthVisits(repoID int64, amount int64) error {
+ _, err := x.Exec("update repository set last_month_visits = ? where id = ?", amount, repoID)
+ return err
+}
+
+func SyncStatDataToRepo(repo *Repository) {
+ //Save the visit number of repository in the last month
+ if lv, err := SumLastMonthNumVisits(repo.ID); err == nil {
+ UpdateRepositoryLastMonthVisits(repo.ID, lv)
+ }
+ //Save the commits number of repository in the last four month
+ if lc, err := SumLastFourMonthNumCommits(repo.ID); err == nil {
+ UpdateRepositoryLastFourMonthCommits(repo.ID, lc)
+ }
+}
diff --git a/models/repo_list.go b/models/repo_list.go
index 92654c11c..3c655fbd9 100755
--- a/models/repo_list.go
+++ b/models/repo_list.go
@@ -201,29 +201,41 @@ func (s SearchOrderBy) String() string {
return string(s)
}
+type FindReposResponse struct {
+ Repos []*Repository4Card
+ Page int
+ PageSize int
+ Total int64
+}
+
// Strings for sorting result
const (
- SearchOrderByAlphabetically SearchOrderBy = "name ASC"
- SearchOrderByAlphabeticallyReverse SearchOrderBy = "name DESC"
- SearchOrderByLeastUpdated SearchOrderBy = "updated_unix ASC"
- SearchOrderByRecentUpdated SearchOrderBy = "updated_unix DESC"
- SearchOrderByOldest SearchOrderBy = "created_unix ASC"
- SearchOrderByNewest SearchOrderBy = "created_unix DESC"
- SearchOrderBySize SearchOrderBy = "size ASC"
- SearchOrderBySizeReverse SearchOrderBy = "size DESC"
- SearchOrderByID SearchOrderBy = "id ASC"
- SearchOrderByIDReverse SearchOrderBy = "id DESC"
- SearchOrderByStars SearchOrderBy = "num_stars ASC"
- SearchOrderByStarsReverse SearchOrderBy = "num_stars DESC"
- SearchOrderByForks SearchOrderBy = "num_forks ASC"
- SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
- SearchOrderByDownloadTimes SearchOrderBy = "download_times DESC"
- SearchOrderByUseCount SearchOrderBy = "use_count ASC"
- SearchOrderByUseCountReverse SearchOrderBy = "use_count DESC"
- SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC"
- SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC"
- SearchOrderByWatches SearchOrderBy = "num_watches DESC"
- SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC"
+ SearchOrderByAlphabetically SearchOrderBy = "name ASC"
+ SearchOrderByAlphabeticallyReverse SearchOrderBy = "name DESC"
+ SearchOrderByLeastUpdated SearchOrderBy = "updated_unix ASC"
+ SearchOrderByRecentUpdated SearchOrderBy = "updated_unix DESC"
+ SearchOrderByOldest SearchOrderBy = "created_unix ASC"
+ SearchOrderByNewest SearchOrderBy = "created_unix DESC"
+ SearchOrderBySize SearchOrderBy = "size ASC"
+ SearchOrderBySizeReverse SearchOrderBy = "size DESC"
+ SearchOrderByID SearchOrderBy = "id ASC"
+ SearchOrderByIDReverse SearchOrderBy = "id DESC"
+ SearchOrderByStars SearchOrderBy = "num_stars ASC"
+ SearchOrderByStarsReverse SearchOrderBy = "num_stars DESC"
+ SearchOrderByForks SearchOrderBy = "num_forks ASC"
+ SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
+ SearchOrderByDownloadTimes SearchOrderBy = "download_times DESC"
+ SearchOrderByUseCount SearchOrderBy = "use_count ASC"
+ SearchOrderByUseCountReverse SearchOrderBy = "use_count DESC"
+ SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC"
+ SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC"
+ SearchOrderByWatches SearchOrderBy = "num_watches DESC"
+ SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC"
+ SearchOrderByAiTaskCntReverse SearchOrderBy = "ai_task_cnt desc"
+ SearchOrderByModelCntReverse SearchOrderBy = "model_cnt desc"
+ SearchOrderByDatasetCntReverse SearchOrderBy = "dataset_cnt desc"
+ SearchOrderByLastMonthVisitsReverse SearchOrderBy = "last_month_visits desc"
+ SearchOrderByLastFourMonthCommitsReverse SearchOrderBy = "last_four_month_commits desc"
)
// SearchRepositoryCondition creates a query condition according search repository options
diff --git a/models/repo_statistic.go b/models/repo_statistic.go
index 4f8f13ed7..b99b7c259 100755
--- a/models/repo_statistic.go
+++ b/models/repo_statistic.go
@@ -36,7 +36,7 @@ type RepoStatistic struct {
NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
RepoSize int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
DatasetSize int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
- NumModels int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
+ NumModels int64 `xorm:"NOT NULL DEFAULT 0" json:"model"`
NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
NumCommits int64 `xorm:"NOT NULL DEFAULT 0" json:"commit"`
NumCommitsAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
@@ -55,6 +55,15 @@ type RepoStatistic struct {
NumIssuesGrowth int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
NumCommentsGrowth int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
+ NumDatasetFile int64 `xorm:"NOT NULL DEFAULT 0" json:"datasetFiles"`
+ NumCloudbrain int64 `xorm:"NOT NULL DEFAULT 0" json:"cloudbrains"`
+ NumModelConvert int64 `xorm:"NOT NULL DEFAULT 0" json:"modelConverts"`
+
+ NumDatasetFileAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
+ NumCloudbrainAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"-"`
+ NumModelConvertAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"- "`
+ NumModelsAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"- "`
+
Impact float64 `xorm:"NOT NULL DEFAULT 0" json:"impact"`
Completeness float64 `xorm:"NOT NULL DEFAULT 0" json:"completeness"`
Liveness float64 `xorm:"NOT NULL DEFAULT 0" json:"liveness"`
@@ -200,3 +209,23 @@ func UpdateRepoStatVisits(repoStat *RepoStatistic) error {
_, err := xStatistic.Exec(sql, repoStat.NumVisits, repoStat.RepoID, repoStat.Date)
return err
}
+
+func SumRepoStatColumn(begin, end time.Time, repoId int64, columnName string) (int64, error) {
+ res, err := xStatistic.Where("created_unix <= ? and created_unix >= ? and repo_id = ? ", end.Unix(), begin.Unix(), repoId).Sum(&RepoStatistic{}, columnName)
+ if err != nil {
+ return 0, err
+ }
+ return int64(res), nil
+}
+
+func SumLastMonthNumVisits(repoId int64) (int64, error) {
+ end := time.Now()
+ begin := end.AddDate(0, 0, -30)
+ return SumRepoStatColumn(begin, end, repoId, "num_visits")
+}
+
+func SumLastFourMonthNumCommits(repoId int64) (int64, error) {
+ end := time.Now()
+ begin := end.AddDate(0, 0, -120)
+ return SumRepoStatColumn(begin, end, repoId, "num_commits_added")
+}
diff --git a/models/repo_tag.go b/models/repo_tag.go
index 730eb3f2a..4585a95b6 100644
--- a/models/repo_tag.go
+++ b/models/repo_tag.go
@@ -4,6 +4,7 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"fmt"
+ "xorm.io/builder"
)
type OfficialTag struct {
@@ -166,3 +167,33 @@ func GetAllOfficialTags() ([]OfficialTag, error) {
}
return o, nil
}
+
+type FindSelectedReposOpts struct {
+ ListOptions
+ OrgId int64
+ OnlyPublic bool
+}
+
+func GetSelectedRepos(opts FindSelectedReposOpts) ([]*Repository, error) {
+ if opts.Page < 1 {
+ opts.Page = 1
+ }
+ var cond = builder.NewCond()
+ cond = cond.And(builder.Eq{"official_tag.code": "selected"})
+ if opts.OrgId > 0 {
+ cond = cond.And(builder.Eq{"official_tag_repos.org_id": opts.OrgId})
+ }
+ if opts.OnlyPublic {
+ cond = cond.And(builder.Eq{"repository.is_private": false})
+ }
+ t := make([]*Repository, 0)
+ err := x.Join("inner", "official_tag_repos", "repository.id = official_tag_repos.repo_id").
+ Join("inner", "official_tag", "official_tag.id = official_tag_repos.tag_id").
+ Where(cond).OrderBy("repository.updated_unix desc").Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).Find(&t)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return t, nil
+}
diff --git a/models/task_config.go b/models/task_config.go
index 0d9d21187..f86032fc9 100644
--- a/models/task_config.go
+++ b/models/task_config.go
@@ -36,6 +36,8 @@ func GetTaskTypeFromAction(a ActionType) TaskType {
ActionCreateInferenceTask,
ActionCreateBenchMarkTask,
ActionCreateGPUTrainTask,
+ ActionCreateGrampusGPUDebugTask,
+ ActionCreateGrampusNPUDebugTask,
ActionCreateGrampusNPUTrainTask,
ActionCreateGrampusGPUTrainTask:
return TaskCreateCloudbrainTask
diff --git a/models/topic.go b/models/topic.go
index 0b19bc1f0..ea5698f4c 100644
--- a/models/topic.go
+++ b/models/topic.go
@@ -9,6 +9,7 @@ import (
"regexp"
"strings"
"unicode/utf8"
+ "xorm.io/xorm"
"code.gitea.io/gitea/modules/timeutil"
@@ -337,3 +338,16 @@ func GetOrgTopics(orgId int64) ([]Topic, error) {
return result, nil
}
+
+func UpdateRepoTopics(repoID int64, topicNames []string, sess ...*xorm.Engine) error {
+ e := x
+ if len(sess) > 0 {
+ e = sess[0]
+ }
+ if _, err := e.ID(repoID).Cols("topics").Update(&Repository{
+ Topics: topicNames,
+ }); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/models/user.go b/models/user.go
index c421455bc..d34860f2d 100755
--- a/models/user.go
+++ b/models/user.go
@@ -16,6 +16,7 @@ import (
"fmt"
_ "image/jpeg" // Needed for jpeg support
"image/png"
+ "math/rand"
"os"
"path/filepath"
"regexp"
@@ -198,6 +199,40 @@ type SearchOrganizationsOptions struct {
All bool
}
+type User4Front struct {
+ ID int64
+ LowerName string `xorm:"UNIQUE NOT NULL"`
+ Name string `xorm:"UNIQUE NOT NULL"`
+ FullName string
+ Email string `xorm:"NOT NULL"`
+ Language string `xorm:"VARCHAR(5)"`
+ Description string
+ RelAvatarLink string
+ NumMembers int
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+}
+
+func (u *User) ToFrontFormat() *User4Front {
+ uf := &User4Front{
+ ID: u.ID,
+ LowerName: u.LowerName,
+ Name: u.Name,
+ FullName: u.FullName,
+ Email: u.Email,
+ Language: u.Language,
+ Description: u.Description,
+ CreatedUnix: u.CreatedUnix,
+ UpdatedUnix: u.UpdatedUnix,
+ NumMembers: u.NumMembers,
+ }
+ if !u.KeepEmailPrivate {
+ uf.Email = u.Email
+ }
+ uf.RelAvatarLink = u.RelAvatarLink()
+ return uf
+}
+
// GenerateRandomAvatar generates a random avatar for user.
func (u *User) IsBindWechat() bool {
return u.WechatOpenId != ""
@@ -461,7 +496,11 @@ func (u *User) RealSizedAvatarLink(size int) string {
// may either be a sub-URL to this site, or a full URL to an external avatar
// service.
func (u *User) RelAvatarLink() string {
- return u.SizedRelAvatarLink(base.DefaultAvatarSize)
+ append := ""
+ if u.UseCustomAvatar {
+ append = "?" + fmt.Sprint(rand.Intn(100))
+ }
+ return u.SizedRelAvatarLink(base.DefaultAvatarSize) + append
}
// AvatarLink returns user avatar absolute link.
diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go
index ca1eb4c7b..d5ab871ce 100644
--- a/models/user_business_analysis.go
+++ b/models/user_business_analysis.go
@@ -355,6 +355,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)
+ AiModelConvertMap := queryUserModelConvert(start_unix, end_unix)
CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
@@ -427,6 +428,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
+ dateRecord.ModelConvertCount = getMapValue(dateRecord.ID, AiModelConvertMap)
dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
@@ -546,6 +548,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
resultMap[userRecord.ID].CommitDatasetNum += userRecord.CommitDatasetNum
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
+ resultMap[userRecord.ID].ModelConvertCount += userRecord.ModelConvertCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
@@ -583,7 +586,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
startTime := currentTimeNow.AddDate(0, 0, -1)
CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
- CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
+ CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)
CommentCountMap := queryComment(start_unix, end_unix)
@@ -599,29 +602,25 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
//CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
- CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
+ CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
- CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
+ CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)
+ AiModelConvertMap := queryUserModelConvert(start_unix, end_unix)
CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
- RecommendDataset, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
+ RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)
InvitationMap := queryUserInvitationCount(start_unix, end_unix)
DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"
- bonusMap := make(map[string]map[string]int)
- if isUserYearData(tableName) {
- bonusMap = getBonusMap()
- log.Info("truncate all data from table:user_summary_current_year ")
- statictisSess.Exec("TRUNCATE TABLE user_summary_current_year")
- }
+
cond := "type != 1 and is_active=true"
count, err := sess.Where(cond).Count(new(User))
if err != nil {
@@ -687,6 +686,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
+ dateRecordAll.ModelConvertCount = getMapValue(dateRecordAll.ID, AiModelConvertMap)
dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset)
dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset)
dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset)
@@ -719,37 +719,6 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1
}
}
- if isUserYearData(tableName) {
- //年度数据
- subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
- mostActiveDay := ""
- if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
- mostActiveDay = getMostActiveJson(userInfo)
- }
- scoreMap := make(map[string]float64)
- repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
- dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
- scoreMap["datasetscore"] = datasetscore
- codeInfo, codescore := getCodeInfo(dateRecordAll)
- scoreMap["codescore"] = codescore
- cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap, scoreMap)
- playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap)
- re := &UserSummaryCurrentYear{
- ID: dateRecordAll.ID,
- Name: dateRecordAll.Name,
- Email: dateRecordAll.Email,
- Phone: dateRecordAll.Phone,
- RegistDate: dateRecordAll.RegistDate,
- DateCount: int(subTime.Hours()) / 24,
- MostActiveDay: mostActiveDay,
- RepoInfo: repoInfo,
- DataSetInfo: dataSetInfo,
- CodeInfo: codeInfo,
- CloudBrainInfo: cloudBrainInfo,
- PlayARoll: playARoll,
- }
- statictisSess.Insert(re)
- }
}
if len(dateRecordBatch) > 0 {
err := insertTable(dateRecordBatch, tableName, statictisSess)
@@ -779,14 +748,136 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount))
}
+func RefreshUserYearTable(pageStartTime time.Time, pageEndTime time.Time) {
+ sess := x.NewSession()
+ defer sess.Close()
+ log.Info("RefreshUserYearTable start....")
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+
+ log.Info("UserYear StartTime:" + pageStartTime.Format("2006-01-02 15:04:05"))
+ log.Info("UserYear EndTime time:" + pageEndTime.Format("2006-01-02 15:04:05"))
+
+ start_unix := pageStartTime.Unix()
+ end_unix := pageEndTime.Unix()
+
+ CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
+ CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
+ IssueCountMap := queryCreateIssue(start_unix, end_unix)
+
+ CommentCountMap := queryComment(start_unix, end_unix)
+
+ CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime)
+ if err != nil {
+ log.Info("query commit code errr.")
+ } else {
+ log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
+ }
+ CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
+ SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
+ CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
+
+ CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
+
+ _, CollectedDataset := queryDatasetStars(start_unix, end_unix)
+ _, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
+
+ bonusMap := getBonusMap()
+ log.Info("truncate all data from table:user_summary_current_year ")
+ statictisSess.Exec("TRUNCATE TABLE user_summary_current_year")
+
+ cond := "type != 1 and is_active=true"
+ count, err := sess.Where(cond).Count(new(User))
+ if err != nil {
+ log.Info("query user error. return.")
+ return
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ userList := make([]*User, 0)
+ sess.Find(&userList)
+ for _, userRecord := range userList {
+ var dateRecordAll UserBusinessAnalysisAll
+ dateRecordAll.ID = userRecord.ID
+ dateRecordAll.Email = userRecord.Email
+ dateRecordAll.Phone = userRecord.PhoneNumber
+ dateRecordAll.RegistDate = userRecord.CreatedUnix
+ dateRecordAll.Name = userRecord.Name
+
+ dateRecordAll.CodeMergeCount = getMapValue(dateRecordAll.ID, CodeMergeCountMap)
+ dateRecordAll.CommitCount = getMapValue(dateRecordAll.ID, CommitCountMap)
+ dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap)
+ dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap)
+
+ if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok {
+ dateRecordAll.CommitCodeSize = 0
+ } else {
+ dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines)
+ }
+ //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
+ dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap)
+ dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap)
+ dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap)
+ dateRecordAll.CreateRepoCount = getMapValue(dateRecordAll.ID, CreateRepoCountMap)
+
+ dateRecordAll.CloudBrainTaskNum = getMapValue(dateRecordAll.ID, CloudBrainTaskMap)
+ dateRecordAll.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
+ dateRecordAll.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
+ dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
+ dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
+
+ //年度数据
+ subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
+ mostActiveDay := ""
+ if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
+ mostActiveDay = getMostActiveJson(userInfo)
+ }
+ scoreMap := make(map[string]float64)
+ repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
+ dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
+ scoreMap["datasetscore"] = datasetscore
+ codeInfo, codescore := getCodeInfo(dateRecordAll)
+ scoreMap["codescore"] = codescore
+ cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap, scoreMap)
+ playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap)
+ re := &UserSummaryCurrentYear{
+ ID: dateRecordAll.ID,
+ Name: dateRecordAll.Name,
+ Email: dateRecordAll.Email,
+ Phone: dateRecordAll.Phone,
+ RegistDate: dateRecordAll.RegistDate,
+ DateCount: int(subTime.Hours()) / 24,
+ MostActiveDay: mostActiveDay,
+ RepoInfo: repoInfo,
+ DataSetInfo: dataSetInfo,
+ CodeInfo: codeInfo,
+ CloudBrainInfo: cloudBrainInfo,
+ PlayARoll: playARoll,
+ }
+ statictisSess.Insert(re)
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ log.Info("update user year data finished. ")
+}
+
func isUserYearData(tableName string) bool {
if tableName == "user_business_analysis_current_year" {
currentTimeNow := time.Now()
if currentTimeNow.Year() >= 2023 {
return false
}
+ return true
}
- return true
+ return false
}
func getBonusMap() map[string]map[string]int {
@@ -811,6 +902,7 @@ func getBonusMap() map[string]map[string]int {
record, ok := bonusMap[userName]
if !ok {
record = make(map[string]int)
+ bonusMap[userName] = record
}
record["times"] = getMapKeyStringValue("times", record) + getIntValue(aLine[3])
record["total_bonus"] = getMapKeyStringValue("total_bonus", record) + getIntValue(aLine[4])
@@ -996,7 +1088,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
insertBatchSql := "INSERT INTO public." + tableName +
"(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " +
- "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive,phone,invitation_user_num) " +
+ "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive,phone,invitation_user_num,model_convert_count) " +
"VALUES"
for i, record := range dateRecords {
@@ -1005,7 +1097,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) +
", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) +
", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," +
- fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ",'" + record.Phone + "'" + "," + fmt.Sprint(record.InvitationUserNum) + ")"
+ fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ",'" + record.Phone + "'" + "," + fmt.Sprint(record.InvitationUserNum) + "," + fmt.Sprint(record.ModelConvertCount) + ")"
if i < (len(dateRecords) - 1) {
insertBatchSql += ","
}
@@ -1096,6 +1188,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)
+ AiModelConvertMap := queryUserModelConvert(start_unix, end_unix)
CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
@@ -1177,7 +1270,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
-
+ dateRecord.ModelConvertCount = getMapValue(dateRecord.ID, AiModelConvertMap)
dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
@@ -1366,6 +1459,7 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
+ result += float64(dateRecord.ModelConvertCount) * getParaWeightValue("ModelConvertCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
@@ -1391,6 +1485,7 @@ func getUserActivateAll(dateRecord UserBusinessAnalysisAll) int {
result += dateRecord.CreateRepoCount
result += dateRecord.CloudBrainTaskNum
result += dateRecord.CommitModelCount
+ result += dateRecord.ModelConvertCount
result += dateRecord.CommitDatasetNum
result += dateRecord.FocusOtherUser
result += dateRecord.CollectDataset
@@ -1412,6 +1507,7 @@ func getUserActivate(dateRecord UserBusinessAnalysis) int {
result += dateRecord.CreateRepoCount
result += dateRecord.CloudBrainTaskNum
result += dateRecord.CommitModelCount
+ result += dateRecord.ModelConvertCount
result += dateRecord.CommitDatasetNum
result += dateRecord.FocusOtherUser
result += dateRecord.CollectDataset
@@ -1448,6 +1544,7 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
+ result += float64(dateRecord.ModelConvertCount) * getParaWeightValue("ModelConvertCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
@@ -1492,10 +1589,6 @@ func getInt(str string) int {
return int(re)
}
-func CounDataByDate(wikiCountMap map[string]int, startTime time.Time, endTime time.Time) {
- CounDataByDateAndReCount(wikiCountMap, startTime, endTime, false)
-}
-
func querySolveIssue(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
@@ -2276,6 +2369,38 @@ func queryUserModel(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}
+func queryUserModelConvert(start_unix int64, end_unix int64) map[int64]int {
+ sess := x.NewSession()
+ defer sess.Close()
+ resultMap := make(map[int64]int)
+ cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
+ count, err := sess.Where(cond).Count(new(AiModelConvert))
+ if err != nil {
+ log.Info("query AiModelConvert error. return.")
+ return resultMap
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("id,user_id").Table("ai_model_convert").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ aiModelList := make([]*AiModelConvert, 0)
+ sess.Find(&aiModelList)
+ log.Info("query AiModelConvert size=" + fmt.Sprint(len(aiModelList)))
+ for _, aiModelRecord := range aiModelList {
+ if _, ok := resultMap[aiModelRecord.UserId]; !ok {
+ resultMap[aiModelRecord.UserId] = 1
+ } else {
+ resultMap[aiModelRecord.UserId] += 1
+ }
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ return resultMap
+}
+
func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[string]int) {
sess := x.NewSession()
defer sess.Close()
@@ -2441,3 +2566,9 @@ func GetContentFromPromote(url string) (string, error) {
allLineStr := string(bytes)
return allLineStr, nil
}
+
+func QueryLast30DaysHighestIndexUsers(size int) ([]int64, error) {
+ userIds := make([]int64, 0)
+ err := xStatistic.Table("user_business_analysis_last30_day").Cols("id").OrderBy("user_index desc").Limit(size).Find(&userIds)
+ return userIds, err
+}
diff --git a/models/user_business_struct.go b/models/user_business_struct.go
index 9dcc12342..00c7f6176 100644
--- a/models/user_business_struct.go
+++ b/models/user_business_struct.go
@@ -89,6 +89,7 @@ type UserBusinessAnalysisCurrentYear struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisLast30Day struct {
@@ -157,6 +158,7 @@ type UserBusinessAnalysisLast30Day struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisLastMonth struct {
@@ -225,6 +227,7 @@ type UserBusinessAnalysisLastMonth struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisCurrentMonth struct {
@@ -293,6 +296,7 @@ type UserBusinessAnalysisCurrentMonth struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisCurrentWeek struct {
@@ -362,6 +366,7 @@ type UserBusinessAnalysisCurrentWeek struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisYesterday struct {
@@ -431,6 +436,7 @@ type UserBusinessAnalysisYesterday struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisLastWeek struct {
@@ -500,6 +506,7 @@ type UserBusinessAnalysisLastWeek struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserAnalysisPara struct {
@@ -616,6 +623,7 @@ type UserBusinessAnalysisAll struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysis struct {
@@ -704,4 +712,5 @@ type UserBusinessAnalysis struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
diff --git a/modules/auth/grampus.go b/modules/auth/grampus.go
index 414a7c25d..f8a238124 100755
--- a/modules/auth/grampus.go
+++ b/modules/auth/grampus.go
@@ -29,3 +29,24 @@ type CreateGrampusTrainJobForm struct {
func (f *CreateGrampusTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
return validate(errs, ctx.Data, f, ctx.Locale)
}
+
+type CreateGrampusNotebookForm struct {
+ Type int `form:"type"`
+ DisplayJobName string `form:"display_job_name" binding:"Required"`
+ Attachment string `form:"attachment"`
+ ImageID string `form:"image_id" binding:"Required"`
+ Description string `form:"description"`
+ BranchName string `form:"branch_name" binding:"Required"`
+ Image string `form:"image" binding:"Required"`
+ DatasetName string `form:"dataset_name"`
+ ModelName string `form:"model_name"`
+ ModelVersion string `form:"model_version"`
+ CkptName string `form:"ckpt_name"`
+ LabelName string `form:"label_names"`
+ PreTrainModelUrl string `form:"pre_train_model_url"`
+ SpecId int64 `form:"spec_id" binding:"Required"`
+}
+
+func (f *CreateGrampusNotebookForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
+ return validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/modules/auth/modelarts.go b/modules/auth/modelarts.go
index 0221c51d8..874bbb0a4 100755
--- a/modules/auth/modelarts.go
+++ b/modules/auth/modelarts.go
@@ -16,13 +16,19 @@ func (f *CreateModelArtsForm) Validate(ctx *macaron.Context, errs binding.Errors
}
type CreateModelArtsNotebookForm struct {
- DisplayJobName string `form:"display_job_name" binding:"Required"`
- JobName string `form:"job_name" binding:"Required"`
- Attachment string `form:"attachment"`
- Description string `form:"description"`
- Flavor string `form:"flavor" binding:"Required"`
- ImageId string `form:"image_id" binding:"Required"`
- SpecId int64 `form:"spec_id" binding:"Required"`
+ DisplayJobName string `form:"display_job_name" binding:"Required"`
+ JobName string `form:"job_name" binding:"Required"`
+ Attachment string `form:"attachment"`
+ Description string `form:"description"`
+ Flavor string `form:"flavor" binding:"Required"`
+ ImageId string `form:"image_id" binding:"Required"`
+ ModelName string `form:"model_name"`
+ ModelVersion string `form:"model_version"`
+ CkptName string `form:"ckpt_name"`
+ LabelName string `form:"label_names"`
+ PreTrainModelUrl string `form:"pre_train_model_url"`
+ SpecId int64 `form:"spec_id" binding:"Required"`
+ DatasetName string `form:"dataset_name"`
}
func (f *CreateModelArtsNotebookForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go
index 6111cf460..1929c58a6 100755
--- a/modules/cloudbrain/cloudbrain.go
+++ b/modules/cloudbrain/cloudbrain.go
@@ -5,6 +5,7 @@ import (
"errors"
"os"
"strconv"
+ "strings"
"code.gitea.io/gitea/modules/timeutil"
@@ -31,10 +32,10 @@ const (
Snn4imagenetMountPath = "/snn4imagenet"
BrainScoreMountPath = "/brainscore"
TaskInfoName = "/taskInfo"
- Snn4imagenetCommand = `/opt/conda/bin/python /snn4imagenet/testSNN_script.py --modelname '%s' --modelpath '/dataset' --modeldescription '%s' >/model/benchmark-log.txt`
- BrainScoreCommand = `bash /brainscore/brainscore_test_par4shSrcipt.sh -b '%s' -n '%s' -p '/dataset' -d '%s' >/model/benchmark-log.txt`
-
- SubTaskName = "task1"
+ Snn4imagenetCommand = `/opt/conda/bin/python /benchmark/testSNN_script.py --modelname '%s' --modelpath '/pretrainmodel/%s' --modeldescription '%s' >/model/benchmark-log.txt`
+ BrainScoreCommand = `bash /benchmark/brainscore_test_par4shSrcipt.sh -b '%s' -n '%s' -p '/pretrainmodel/%s' -d '%s' >/model/benchmark-log.txt`
+ Snn4EcosetCommand = `/opt/conda/bin/python /benchmark/testSNN_script.py --datapath '/dataset' --modelname '%s' --modelpath '/pretrainmodel/%s' --modeldescription '%s' >/model/benchmark-log.txt`
+ SubTaskName = "task1"
Success = "S000"
@@ -145,7 +146,7 @@ func isAdminOrImageCreater(ctx *context.Context, image *models.Image, err error)
func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) {
var id = ctx.Params(":id")
- job, err := GetCloudBrainByIdOrJobId(id)
+ job, err := GetCloudBrainByIdOrJobId(id, "id")
if err != nil {
log.Error("GetCloudbrainByID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -161,7 +162,7 @@ func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) {
func AdminOrJobCreaterRight(ctx *context.Context) {
var id = ctx.Params(":id")
- job, err := GetCloudBrainByIdOrJobId(id)
+ job, err := GetCloudBrainByIdOrJobId(id, "id")
if err != nil {
log.Error("GetCloudbrainByID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -177,7 +178,7 @@ func AdminOrJobCreaterRight(ctx *context.Context) {
func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
- job, err := GetCloudBrainByIdOrJobId(jobID)
+ job, err := GetCloudBrainByIdOrJobId(jobID, "jobid")
if err != nil {
log.Error("GetCloudbrainByJobID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -193,7 +194,7 @@ func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) {
func AdminOrJobCreaterRightForTrain(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
- job, err := GetCloudBrainByIdOrJobId(jobID)
+ job, err := GetCloudBrainByIdOrJobId(jobID, "jobid")
if err != nil {
log.Error("GetCloudbrainByJobID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -256,20 +257,6 @@ func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) {
ReadOnly: true,
},
},
- {
- HostPath: models.StHostPath{
- Path: req.Snn4ImageNetPath,
- MountPath: Snn4imagenetMountPath,
- ReadOnly: true,
- },
- },
- {
- HostPath: models.StHostPath{
- Path: req.BrainScorePath,
- MountPath: BrainScoreMountPath,
- ReadOnly: true,
- },
- },
{
HostPath: models.StHostPath{
Path: req.ResultPath,
@@ -405,7 +392,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) {
}
func IsBenchmarkJob(jobType string) bool {
- return string(models.JobTypeModelSafety) == jobType || string(models.JobTypeBenchmark) == jobType || string(models.JobTypeBrainScore) == jobType || string(models.JobTypeSnn4imagenet) == jobType
+ return string(models.JobTypeModelSafety) == jobType || string(models.JobTypeBenchmark) == jobType || string(models.JobTypeBrainScore) == jobType || string(models.JobTypeSnn4imagenet) == jobType || string(models.JobTypeSnn4Ecoset) == jobType
}
func GetWaitingCloudbrainCount(cloudbrainType int, computeResource string, jobTypes ...models.JobType) int64 {
@@ -490,6 +477,21 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
}
}
+ if task.PreTrainModelUrl != "" { //预训练
+ _, err := models.QueryModelByPath(task.PreTrainModelUrl)
+ if err != nil {
+ log.Warn("The model may be deleted", err)
+ } else {
+ volumes = append(volumes, models.Volume{
+ HostPath: models.StHostPath{
+ Path: setting.Attachment.Minio.RealPath + task.PreTrainModelUrl,
+ MountPath: PretrainModelMountPath,
+ ReadOnly: true,
+ },
+ })
+ }
+ }
+
createTime := timeutil.TimeStampNow()
jobResult, err := CreateJob(jobName, models.CreateJobParams{
JobName: jobName,
@@ -540,10 +542,16 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
GpuQueue: task.GpuQueue,
ResourceSpecId: task.ResourceSpecId,
ComputeResource: task.ComputeResource,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- BranchName: task.BranchName,
- Spec: spec,
+
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ BranchName: task.BranchName,
+ Spec: spec,
+ ModelName: task.ModelName,
+ ModelVersion: task.ModelVersion,
+ LabelName: task.LabelName,
+ PreTrainModelUrl: task.PreTrainModelUrl,
+ CkptName: task.CkptName,
}
err = models.RestartCloudbrain(task, newTask)
@@ -653,18 +661,47 @@ func IsElementExist(s []string, str string) bool {
return false
}
-func GetCloudBrainByIdOrJobId(id string) (*models.Cloudbrain,error) {
+
+func GetCloudBrainByIdOrJobId(id string, initialQuery string) (*models.Cloudbrain, error) {
_, err := strconv.ParseInt(id, 10, 64)
var job *models.Cloudbrain
if err != nil {
job, err = models.GetCloudbrainByJobID(id)
} else {
- job, err = models.GetCloudbrainByID(id)
- if err!=nil{
+
+ if strings.EqualFold(initialQuery, "id") {
+ job, err = models.GetCloudbrainByID(id)
+ if err != nil {
+ job, err = models.GetCloudbrainByJobID(id)
+ }
+ } else {
job, err = models.GetCloudbrainByJobID(id)
+ if err != nil {
+ job, err = models.GetCloudbrainByID(id)
+ }
}
}
- return job,err
+ return job, err
+}
+
+type GenerateModelArtsNotebookReq struct {
+ JobName string
+ DisplayJobName string
+ Uuid string
+ Description string
+
+ BootFile string
+
+ ImageId string
+ AutoStopDurationMs int64
+ BranchName string
+
+ Spec *models.Specification
+ ModelName string
+ LabelName string
+ CkptName string
+ ModelVersion string
+ PreTrainModelUrl string
}
diff --git a/modules/convert/cloudbrain.go b/modules/convert/cloudbrain.go
index 599da4800..73e37b1ea 100644
--- a/modules/convert/cloudbrain.go
+++ b/modules/convert/cloudbrain.go
@@ -28,14 +28,13 @@ func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain {
BootFile: task.BootFile,
Description: task.Description,
ModelName: task.ModelName,
-
- ModelVersion: task.ModelVersion,
- CkptName: task.CkptName,
+ VersionName: task.VersionName,
+ ModelVersion: task.ModelVersion,
+ CkptName: task.CkptName,
StartTime: int64(task.StartTime),
EndTime: int64(task.EndTime),
-
- Spec: ToSpecification(task.Spec),
+ Spec: ToSpecification(task.Spec),
}
}
func ToAttachment(attachment *models.Attachment) *api.AttachmentShow {
@@ -89,6 +88,9 @@ func ToDataset(dataset *models.Dataset) *api.Dataset {
}
func ToSpecification(s *models.Specification) *api.SpecificationShow {
+ if s == nil {
+ return nil
+ }
return &api.SpecificationShow{
ID: s.ID,
AccCardsNum: s.AccCardsNum,
diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go
index 6a1fc6e39..5907a3418 100755
--- a/modules/cron/tasks_basic.go
+++ b/modules/cron/tasks_basic.go
@@ -5,10 +5,11 @@
package cron
import (
- "code.gitea.io/gitea/modules/setting"
"context"
"time"
+ "code.gitea.io/gitea/modules/setting"
+
"code.gitea.io/gitea/modules/urfs_client/urchin"
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
@@ -296,7 +297,7 @@ func registerHandleCloudbrainDurationStatistic() {
RegisterTaskFatal("handle_cloudbrain_duration_statistic", &BaseConfig{
Enabled: true,
RunAtStart: false,
- Schedule: "1 0 * * * ?",
+ Schedule: "1 1 * * * ?",
}, func(ctx context.Context, _ *models.User, _ Config) error {
repo.CloudbrainDurationStatisticHour()
return nil
diff --git a/modules/grampus/grampus.go b/modules/grampus/grampus.go
index 34d7d3fe0..37e6fc1bf 100755
--- a/modules/grampus/grampus.go
+++ b/modules/grampus/grampus.go
@@ -1,7 +1,8 @@
package grampus
import (
- "encoding/json"
+ "fmt"
+ "strconv"
"strings"
"code.gitea.io/gitea/models"
@@ -26,8 +27,10 @@ const (
CodeArchiveName = "master.zip"
- BucketRemote = "grampus"
- RemoteModelPath = "/output/" + models.ModelSuffix
+ BucketRemote = "grampus"
+ RemoteModelPath = "/output/" + models.ModelSuffix
+ autoStopDurationMs = 4 * 60 * 60 * 1000
+ CommandGpuDebug = "mkdir -p /dataset;%s! [ -x \"$(command -v jupyter)\" ] && pip install jupyterlab==3 -i https://pypi.tuna.tsinghua.edu.cn/simple;jupyter lab --ServerApp.shutdown_no_activity_timeout=%s --TerminalManager.cull_inactive_timeout=%s --TerminalManager.cull_interval=%s --MappingKernelManager.cull_idle_timeout=%s --MappingKernelManager.cull_interval=%s --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir='/code' --port=$OCTOPUS_NOTEBOOK_PORT --LabApp.token='' --LabApp.allow_origin='*' --LabApp.base_url=$OCTOPUS_NOTEBOOK_BASE_URL;"
)
var (
@@ -37,7 +40,7 @@ var (
SpecialPools *models.SpecialPools
- CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://openi.pcl.ac.cn/OpenIOSSG/%s/archive/master.zip;" +
+ CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" +
"echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;"
)
@@ -81,6 +84,32 @@ type GenerateTrainJobReq struct {
CodeName string
}
+type GenerateNotebookJobReq struct {
+ JobName string
+ Command string
+ ImageUrl string
+ ImageId string
+ DisplayJobName string
+ Uuid string
+ Description string
+ CodeStoragePath string
+ CommitID string
+ BranchName string
+ ComputeResource string
+ ProcessType string
+ DatasetNames string
+ DatasetInfos map[string]models.DatasetInfo
+ ModelName string
+ LabelName string
+ CkptName string
+ ModelVersion string
+ PreTrainModelPath string
+ PreTrainModelUrl string
+ Spec *models.Specification
+ CodeName string
+ ModelPath string //参考启智GPU调试, 挂载/model目录用户的模型可以输出到这个目录
+}
+
func getEndPoint() string {
index := strings.Index(setting.Endpoint, "//")
endpoint := setting.Endpoint[index+2:]
@@ -101,6 +130,151 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram
}
return datasetGrampus
}
+func getDatasetGPUGrampus(datasetInfos map[string]models.DatasetInfo) ([]models.GrampusDataset, string) {
+ var datasetGrampus []models.GrampusDataset
+ var command = ""
+ for uuid, datasetInfo := range datasetInfos {
+ datasetGrampus = append(datasetGrampus, models.GrampusDataset{
+ Name: datasetInfo.FullName,
+ Bucket: setting.Attachment.Minio.Bucket,
+ EndPoint: setting.Attachment.Minio.Endpoint,
+ ObjectKey: datasetInfo.DataLocalPath,
+ ReadOnly: true,
+ ContainerPath: "/dataset1/" + datasetInfo.Name,
+ })
+
+ command += "cp /dataset1/'" + datasetInfo.Name + "'/" + uuid + " /dataset/'" + datasetInfo.FullName + "';"
+
+ }
+ return datasetGrampus, command
+}
+
+func GenerateNotebookJob(ctx *context.Context, req *GenerateNotebookJobReq) (jobId string, err error) {
+ createTime := timeutil.TimeStampNow()
+
+ var datasetGrampus []models.GrampusDataset
+ var codeGrampus models.GrampusDataset
+ var cpCommand string
+ imageUrl := req.ImageUrl
+ if ProcessorTypeNPU == req.ProcessType {
+ datasetGrampus = getDatasetGrampus(req.DatasetInfos)
+ if len(req.ModelName) != 0 {
+ datasetGrampus = append(datasetGrampus, models.GrampusDataset{
+ Name: req.ModelName,
+ Bucket: setting.Bucket,
+ EndPoint: getEndPoint(),
+ ReadOnly: true,
+ ObjectKey: req.PreTrainModelPath,
+ })
+ }
+
+ codeGrampus = models.GrampusDataset{
+ Name: req.CodeName,
+ Bucket: setting.Bucket,
+ EndPoint: getEndPoint(),
+ ObjectKey: req.CodeStoragePath + cloudbrain.DefaultBranchName + ".zip",
+ ReadOnly: false,
+ }
+ imageUrl = ""
+ req.Command = ""
+ } else {
+ datasetGrampus, cpCommand = getDatasetGPUGrampus(req.DatasetInfos)
+ if len(req.ModelName) != 0 {
+ datasetGrampus = append(datasetGrampus, models.GrampusDataset{
+ Name: req.ModelName,
+ Bucket: setting.Attachment.Minio.Bucket,
+ EndPoint: setting.Attachment.Minio.Endpoint,
+ ObjectKey: req.PreTrainModelPath,
+ ReadOnly: true,
+ ContainerPath: cloudbrain.PretrainModelMountPath,
+ })
+ }
+ codeGrampus = models.GrampusDataset{
+ Name: req.CodeName,
+ Bucket: setting.Attachment.Minio.Bucket,
+ EndPoint: setting.Attachment.Minio.Endpoint,
+ ObjectKey: req.CodeStoragePath + cloudbrain.DefaultBranchName + ".zip",
+ ReadOnly: false,
+ ContainerPath: cloudbrain.CodeMountPath,
+ }
+ req.Command = fmt.Sprintf(CommandGpuDebug, cpCommand, setting.CullIdleTimeout, setting.CullIdleTimeout, setting.CullInterval, setting.CullIdleTimeout, setting.CullInterval)
+ log.Info("debug command:" + req.Command)
+
+ }
+
+ jobResult, err := createNotebookJob(models.CreateGrampusNotebookRequest{
+ Name: req.JobName,
+ Tasks: []models.GrampusNotebookTask{
+ {
+ Name: req.JobName,
+ ResourceSpecId: req.Spec.SourceSpecId,
+ ImageId: req.ImageId,
+ ImageUrl: imageUrl,
+ Datasets: datasetGrampus,
+ Code: codeGrampus,
+ AutoStopDuration: autoStopDurationMs,
+ Capacity: setting.Capacity,
+ Command: req.Command,
+ },
+ },
+ })
+ if err != nil {
+ log.Error("createNotebookJob failed: %v", err.Error())
+ return "", err
+ }
+
+ jobID := jobResult.JobInfo.JobID
+ err = models.CreateCloudbrain(&models.Cloudbrain{
+ Status: TransTrainJobStatus(jobResult.JobInfo.Status),
+ UserID: ctx.User.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ JobID: jobID,
+ JobName: req.JobName,
+ DisplayJobName: req.DisplayJobName,
+ JobType: string(models.JobTypeDebug),
+ Type: models.TypeC2Net,
+ Uuid: req.Uuid,
+ DatasetName: req.DatasetNames,
+ CommitID: req.CommitID,
+ IsLatestVersion: "1",
+ ComputeResource: req.ComputeResource,
+ ImageID: req.ImageId,
+ BranchName: req.BranchName,
+ Description: req.Description,
+ WorkServerNumber: 1,
+ EngineName: req.ImageUrl,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: req.Spec,
+ ModelName: req.ModelName,
+ ModelVersion: req.ModelVersion,
+ LabelName: req.LabelName,
+ PreTrainModelUrl: req.PreTrainModelUrl,
+ CkptName: req.CkptName,
+ })
+
+ if err != nil {
+ log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, err.Error())
+ return "", err
+ }
+
+ var actionType models.ActionType
+ if req.ComputeResource == models.NPUResource {
+ actionType = models.ActionCreateGrampusNPUDebugTask
+ } else if req.ComputeResource == models.GPUResource {
+ actionType = models.ActionCreateGrampusGPUDebugTask
+ }
+ task, err := models.GetCloudbrainByJobID(jobID)
+ if err != nil {
+ log.Error("GetCloudbrainByJobID failed: %v", err.Error())
+ return "", err
+ }
+
+ stringId := strconv.FormatInt(task.ID, 10)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, req.DisplayJobName, actionType)
+
+ return jobID, nil
+}
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
@@ -269,11 +443,6 @@ func TransTrainJobStatus(status string) string {
return strings.ToUpper(status)
}
-func InitSpecialPool() {
- if SpecialPools == nil && setting.Grampus.SpecialPools != "" {
- json.Unmarshal([]byte(setting.Grampus.SpecialPools), &SpecialPools)
- }
-}
func GetNpuModelRemoteObsUrl(jobName string) string {
return "s3:///" + BucketRemote + "/" + GetNpuModelObjectKey(jobName)
diff --git a/modules/grampus/resty.go b/modules/grampus/resty.go
index a9e1aed5c..a0d5384e2 100755
--- a/modules/grampus/resty.go
+++ b/modules/grampus/resty.go
@@ -26,6 +26,7 @@ const (
urlGetResourceSpecs = urlOpenApiV1 + "resourcespec"
urlGetAiCenter = urlOpenApiV1 + "sharescreen/aicenter"
urlGetImages = urlOpenApiV1 + "image"
+ urlNotebookJob = urlOpenApiV1 + "notebook"
errorIllegalToken = 1005
)
@@ -87,6 +88,39 @@ func getToken() error {
return nil
}
+func createNotebookJob(req models.CreateGrampusNotebookRequest) (*models.GrampusNotebookResponse, error) {
+ checkSetting()
+ client := getRestyClient()
+ var result models.GrampusNotebookResponse
+
+ retry := 0
+
+sendjob:
+ _, err := client.R().
+ SetHeader("Content-Type", "application/json").
+ SetAuthToken(TOKEN).
+ SetBody(req).
+ SetResult(&result).
+ Post(HOST + urlNotebookJob)
+
+ if err != nil {
+ return nil, fmt.Errorf("resty CreateNotebookJob: %s", err)
+ }
+
+ if result.ErrorCode == errorIllegalToken && retry < 1 {
+ retry++
+ _ = getToken()
+ goto sendjob
+ }
+
+ if result.ErrorCode != 0 {
+ log.Error("CreateNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ return &result, fmt.Errorf("CreateNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ }
+
+ return &result, nil
+}
+
func createJob(req models.CreateGrampusJobRequest) (*models.CreateGrampusJobResponse, error) {
checkSetting()
client := getRestyClient()
@@ -120,6 +154,38 @@ sendjob:
return &result, nil
}
+func GetNotebookJob(jobID string) (*models.GrampusNotebookResponse, error) {
+ checkSetting()
+ client := getRestyClient()
+ var result models.GrampusNotebookResponse
+
+ retry := 0
+
+sendjob:
+ _, err := client.R().
+ SetAuthToken(TOKEN).
+ SetResult(&result).
+ Get(HOST + urlNotebookJob + "/" + jobID)
+
+ if err != nil {
+ return nil, fmt.Errorf("resty GetNotebookJob: %v", err)
+ }
+
+ if result.ErrorCode == errorIllegalToken && retry < 1 {
+ retry++
+ log.Info("retry get token")
+ _ = getToken()
+ goto sendjob
+ }
+
+ if result.ErrorCode != 0 {
+ log.Error("GetNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ return nil, fmt.Errorf("GetNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ }
+
+ return &result, nil
+}
+
func GetJob(jobID string) (*models.GetGrampusJobResponse, error) {
checkSetting()
client := getRestyClient()
@@ -184,18 +250,23 @@ sendjob:
return &result, nil
}
-func GetImages(processorType string) (*models.GetGrampusImagesResult, error) {
+func GetImages(processorType string, jobType string) (*models.GetGrampusImagesResult, error) {
checkSetting()
client := getRestyClient()
var result models.GetGrampusImagesResult
retry := 0
+ queryType := "TrainJob"
+ if jobType == string(models.JobTypeDebug) {
+ queryType = "Notebook"
+ }
+
sendjob:
_, err := client.R().
SetAuthToken(TOKEN).
SetResult(&result).
- Get(HOST + urlGetImages + "?processorType=" + processorType)
+ Get(HOST + urlGetImages + "?processorType=" + processorType + "&trainType=" + queryType)
if err != nil {
return nil, fmt.Errorf("resty GetImages: %v", err)
@@ -271,19 +342,26 @@ func GetGrampusMetrics(jobID string) (models.GetTrainJobMetricStatisticResult, e
return result, nil
}
-func StopJob(jobID string) (*models.GrampusStopJobResponse, error) {
+func StopJob(jobID string, jobType ...string) (*models.GrampusStopJobResponse, error) {
checkSetting()
client := getRestyClient()
var result models.GrampusStopJobResponse
retry := 0
+ url := urlTrainJob
+ if len(jobType) > 0 {
+ if jobType[0] == string(models.JobTypeDebug) {
+ url = urlNotebookJob
+ }
+ }
+
sendjob:
_, err := client.R().
//SetHeader("Content-Type", "application/json").
SetAuthToken(TOKEN).
SetResult(&result).
- Post(HOST + urlTrainJob + "/" + jobID + "/stop")
+ Post(HOST + url + "/" + jobID + "/stop")
if err != nil {
return &result, fmt.Errorf("resty StopTrainJob: %v", err)
@@ -335,3 +413,33 @@ sendjob:
return &result, nil
}
+
+func RestartNotebookJob(jobID string) (*models.GrampusNotebookRestartResponse, error) {
+ checkSetting()
+ client := getRestyClient()
+ var restartResponse *models.GrampusNotebookRestartResponse
+ retry := 0
+
+sendjob:
+ res, err := client.R().
+ SetAuthToken(TOKEN).
+ SetResult(&restartResponse).
+ Post(HOST + urlNotebookJob + "/" + jobID + "/start")
+
+ if err != nil {
+ return nil, fmt.Errorf("resty grampus restart note book job: %v", err)
+ }
+ if restartResponse.ErrorCode == errorIllegalToken && retry < 1 {
+ retry++
+ log.Info("retry get token")
+ _ = getToken()
+ goto sendjob
+ }
+
+ if res.StatusCode() != http.StatusOK {
+ log.Error("resty grampus restart note book job failed(%s): %v", res.String(), err.Error())
+ return nil, fmt.Errorf("resty grampus restart note book job failed: %v", err)
+ }
+
+ return restartResponse, nil
+}
diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go
index b59be307b..bd7f848fc 100755
--- a/modules/modelarts/modelarts.go
+++ b/modules/modelarts/modelarts.go
@@ -1,13 +1,9 @@
package modelarts
import (
- "encoding/base64"
"encoding/json"
"errors"
"fmt"
- "io/ioutil"
- "net/http"
- "path"
"strconv"
"strings"
@@ -20,34 +16,16 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil"
)
const (
//notebook
+
storageTypeOBS = "obs"
autoStopDuration = 4 * 60 * 60
AutoStopDurationMs = 4 * 60 * 60 * 1000
- MORDELART_USER_IMAGE_ENGINE_ID = -1
- DataSetMountPath = "/home/ma-user/work"
- NotebookEnv = "Python3"
- NotebookType = "Ascend"
- FlavorInfo = "Ascend: 1*Ascend 910 CPU: 24 核 96GiB (modelarts.kat1.xlarge)"
-
- //train-job
- // ResourcePools = "{\"resource_pool\":[{\"id\":\"pool1328035d\", \"value\":\"专属资源池\"}]}"
- // Engines = "{\"engine\":[{\"id\":1, \"value\":\"Ascend-Powered-Engine\"}]}"
- // EngineVersions = "{\"version\":[{\"id\":118,\"value\":\"MindSpore-1.0.0-c75-python3.7-euleros2.8-aarch64\"}," +
- // "{\"id\":119,\"value\":\"MindSpore-1.1.1-c76-python3.7-euleros2.8-aarch64\"}," +
- // "{\"id\":120,\"value\":\"MindSpore-1.1.1-c76-tr5-python3.7-euleros2.8-aarch64\"}," +
- // "{\"id\":117,\"value\":\"TF-1.15-c75-python3.7-euleros2.8-aarch64\"}" +
- // "]}"
- // TrainJobFlavorInfo = "{\"flavor\":[{\"code\":\"modelarts.bm.910.arm.public.2\",\"value\":\"Ascend : 2 * Ascend 910 CPU:48 核 512GiB\"}," +
- // "{\"code\":\"modelarts.bm.910.arm.public.8\",\"value\":\"Ascend : 8 * Ascend 910 CPU:192 核 2048GiB\"}," +
- // "{\"code\":\"modelarts.bm.910.arm.public.4\",\"value\":\"Ascend : 4 * Ascend 910 CPU:96 核 1024GiB\"}," +
- // "{\"code\":\"modelarts.bm.910.arm.public.1\",\"value\":\"Ascend : 1 * Ascend 910 CPU:24 核 256GiB\"}" +
- // "]}"
+
CodePath = "/code/"
OutputPath = "/output/"
ResultPath = "/result/"
@@ -190,14 +168,6 @@ type OrgMultiNode struct {
Node []int `json:"node"`
}
-// type Parameter struct {
-// Label string `json:"label"`
-// Value string `json:"value"`
-// }
-
-// type Parameters struct {
-// Parameter []Parameter `json:"parameter"`
-// }
type Parameters struct {
Parameter []struct {
@@ -206,98 +176,23 @@ type Parameters struct {
} `json:"parameter"`
}
-func GenerateTask(ctx *context.Context, jobName, uuid, description, flavor string) error {
- var dataActualPath string
- if uuid != "" {
- dataActualPath = setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + "/"
- } else {
- userPath := setting.UserBasePath + ctx.User.Name + "/"
- isExist, err := storage.ObsHasObject(userPath)
- if err != nil {
- log.Error("ObsHasObject failed:%v", err.Error(), ctx.Data["MsgID"])
- return err
- }
-
- if !isExist {
- if err = storage.ObsCreateObject(userPath); err != nil {
- log.Error("ObsCreateObject failed:%v", err.Error(), ctx.Data["MsgID"])
- return err
- }
- }
-
- dataActualPath = setting.Bucket + "/" + userPath
- }
-
- if poolInfos == nil {
- json.Unmarshal([]byte(setting.PoolInfos), &poolInfos)
- }
- createTime := timeutil.TimeStampNow()
- jobResult, err := CreateJob(models.CreateNotebookParams{
- JobName: jobName,
- Description: description,
- ProfileID: setting.ProfileID,
- Flavor: flavor,
- Pool: models.Pool{
- ID: poolInfos.PoolInfo[0].PoolId,
- Name: poolInfos.PoolInfo[0].PoolName,
- Type: poolInfos.PoolInfo[0].PoolType,
- },
- Spec: models.Spec{
- Storage: models.Storage{
- Type: storageTypeOBS,
- Location: models.Location{
- Path: dataActualPath,
- },
- },
- AutoStop: models.AutoStop{
- Enable: true,
- Duration: autoStopDuration,
- },
- },
- })
- if err != nil {
- log.Error("CreateJob failed: %v", err.Error())
- return err
- }
- err = models.CreateCloudbrain(&models.Cloudbrain{
-
- Status: string(models.JobWaiting),
- UserID: ctx.User.ID,
- RepoID: ctx.Repo.Repository.ID,
- JobID: jobResult.ID,
- JobName: jobName,
- JobType: string(models.JobTypeDebug),
- Type: models.TypeCloudBrainTwo,
- Uuid: uuid,
- ComputeResource: models.NPUResource,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- })
-
- if err != nil {
- return err
- }
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobResult.ID, jobName, models.ActionCreateDebugNPUTask)
- return nil
-}
-
-func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) {
+func GenerateNotebook2(ctx *context.Context, req cloudbrain.GenerateModelArtsNotebookReq) (string, error) {
if poolInfos == nil {
json.Unmarshal([]byte(setting.PoolInfos), &poolInfos)
}
- imageName, err := GetNotebookImageName(imageId)
+ imageName, err := GetNotebookImageName(req.ImageId)
if err != nil {
log.Error("GetNotebookImageName failed: %v", err.Error())
return "", err
}
createTime := timeutil.TimeStampNow()
jobResult, err := createNotebook2(models.CreateNotebook2Params{
- JobName: jobName,
- Description: description,
- Flavor: spec.SourceSpecId,
- Duration: autoStopDurationInMs,
- ImageID: imageId,
+ JobName: req.JobName,
+ Description: req.Description,
+ Flavor: req.Spec.SourceSpecId,
+ Duration: req.AutoStopDurationMs,
+ ImageID: req.ImageId,
PoolID: poolInfos.PoolInfo[0].PoolId,
Feature: models.NotebookFeature,
Volume: models.VolumeReq{
@@ -310,13 +205,13 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
if err != nil {
log.Error("createNotebook2 failed: %v", err.Error())
if strings.HasPrefix(err.Error(), UnknownErrorPrefix) {
- log.Info("(%s)unknown error, set temp status", displayJobName)
+ log.Info("(%s)unknown error, set temp status", req.DisplayJobName)
errTemp := models.InsertCloudbrainTemp(&models.CloudbrainTemp{
JobID: models.TempJobId,
VersionID: models.TempVersionId,
Status: models.TempJobStatus,
Type: models.TypeCloudBrainTwo,
- JobName: jobName,
+ JobName: req.JobName,
JobType: string(models.JobTypeDebug),
})
if errTemp != nil {
@@ -327,23 +222,29 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
return "", err
}
task := &models.Cloudbrain{
- Status: jobResult.Status,
- UserID: ctx.User.ID,
- RepoID: ctx.Repo.Repository.ID,
- JobID: jobResult.ID,
- JobName: jobName,
- FlavorCode: spec.SourceSpecId,
- DisplayJobName: displayJobName,
- JobType: string(models.JobTypeDebug),
- Type: models.TypeCloudBrainTwo,
- Uuid: uuid,
- ComputeResource: models.NPUResource,
- Image: imageName,
- BootFile: bootFile,
- Description: description,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- Spec: spec,
+ Status: jobResult.Status,
+ UserID: ctx.User.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ JobID: jobResult.ID,
+ JobName: req.JobName,
+ FlavorCode: req.Spec.SourceSpecId,
+ DisplayJobName: req.DisplayJobName,
+ JobType: string(models.JobTypeDebug),
+ Type: models.TypeCloudBrainTwo,
+ Uuid: req.Uuid,
+ ComputeResource: models.NPUResource,
+ Image: imageName,
+ BootFile: req.BootFile,
+ BranchName: req.BranchName,
+ Description: req.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: req.Spec,
+ ModelName: req.ModelName,
+ ModelVersion: req.ModelVersion,
+ LabelName: req.LabelName,
+ PreTrainModelUrl: req.PreTrainModelUrl,
+ CkptName: req.CkptName,
}
err = models.CreateCloudbrain(task)
@@ -352,7 +253,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
}
stringId := strconv.FormatInt(task.ID, 10)
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugNPUTask)
return jobResult.ID, nil
}
@@ -379,6 +280,9 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str
Parameter: req.Parameters,
UserImageUrl: req.UserImageUrl,
UserCommand: req.UserCommand,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
})
} else {
@@ -399,6 +303,9 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str
Code: req.Spec.SourceSpecId,
},
Parameter: req.Parameters,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
})
}
@@ -517,6 +424,9 @@ func GenerateTrainJobVersion(ctx *context.Context, req *GenerateTrainJobReq, job
PreVersionId: req.PreVersionId,
UserImageUrl: req.UserImageUrl,
UserCommand: req.UserCommand,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
}, jobId)
} else {
@@ -536,6 +446,9 @@ func GenerateTrainJobVersion(ctx *context.Context, req *GenerateTrainJobReq, job
},
Parameter: req.Parameters,
PreVersionId: req.PreVersionId,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
}, jobId)
}
@@ -914,10 +827,6 @@ func HandleNotebookInfo(task *models.Cloudbrain) error {
task.FlavorCode = result.Flavor
}
- if oldStatus != task.Status && task.Status == string(models.ModelArtsRunning) && task.BootFile != "" {
- uploadNoteBookFile(task, result)
-
- }
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
@@ -928,81 +837,6 @@ func HandleNotebookInfo(task *models.Cloudbrain) error {
return nil
}
-func uploadNoteBookFile(task *models.Cloudbrain, result *models.GetNotebook2Result) {
- jupyterUrl := result.Url + "?token=" + result.Token
-
- cookies, xsrf := getCookiesAndCsrf(jupyterUrl)
- if xsrf == "" {
- log.Error("browser jupyterUrl failed:%v", task.DisplayJobName)
- } else {
-
- codePath := setting.JobPath + task.JobName + cloudbrain.CodeMountPath
- fileContents, err := ioutil.ReadFile(codePath + "/" + task.BootFile)
- if err != nil {
- log.Error("read jupyter file failed:%v", task.DisplayJobName, err)
- }
-
- base64Content := base64.StdEncoding.EncodeToString(fileContents)
- client := getRestyClient()
- uploadUrl := getJupyterBaseUrl(result.Url) + "api/contents/" + path.Base(task.BootFile)
- res, err := client.R().
- SetCookies(cookies).
- SetHeader("X-XSRFToken", xsrf).
- SetBody(map[string]interface{}{
- "type": "file",
- "format": "base64",
- "name": path.Base(task.BootFile),
- "path": path.Base(task.BootFile),
- "content": base64Content}).
- Put(uploadUrl)
- if err != nil {
- log.Error("upload jupyter file failed:%v", task.DisplayJobName, err)
- } else if res.StatusCode() != http.StatusCreated {
- log.Error("upload jupyter file failed:%v", task.DisplayJobName, err)
- }
-
- }
-
-}
-
-func getJupyterBaseUrl(url string) string {
- jupyterUrlLength := len(url)
- baseUrl := url[0 : jupyterUrlLength-len(path.Base(url))]
- return baseUrl
-}
-
-func getCookiesAndCsrf(jupyterUrl string) ([]*http.Cookie, string) {
- log.Info("jupyter url:"+jupyterUrl)
- var cookies []*http.Cookie
- const retryTimes = 10
- for i := 0; i < retryTimes; i++ {
- res, err := http.Get(jupyterUrl)
- if err != nil {
- log.Error("browser jupyterUrl failed.",err)
- if i==retryTimes-1{
- return cookies, ""
- }
-
- } else {
- cookies = res.Cookies()
- xsrf := ""
- for _, cookie := range cookies {
- if cookie.Name == "_xsrf" {
- xsrf = cookie.Value
- break
- }
-
- }
- if xsrf != "" {
- return cookies, xsrf
- }
-
- }
- }
- return cookies, ""
-
-}
-
func SyncTempStatusJob() {
jobs, err := models.GetCloudBrainTempJobs()
if err != nil {
diff --git a/modules/modelarts/resty.go b/modules/modelarts/resty.go
index 3ccba9011..a8981cf9a 100755
--- a/modules/modelarts/resty.go
+++ b/modules/modelarts/resty.go
@@ -497,7 +497,7 @@ sendjob:
}
req, _ := json.Marshal(createJobParams)
- log.Info("%s", req)
+ log.Info("postapi json: %s", req)
if res.StatusCode() == http.StatusUnauthorized && retry < 1 {
retry++
@@ -543,6 +543,8 @@ func createTrainJob(createJobParams models.CreateTrainJobParams) (*models.Create
var result models.CreateTrainJobResult
retry := 0
+ req, _ := json.Marshal(createJobParams)
+ log.Info("postapi json: %s", req)
sendjob:
res, err := client.R().
diff --git a/modules/modelarts_cd/modelarts.go b/modules/modelarts_cd/modelarts.go
index 93032fa89..9d74c0919 100755
--- a/modules/modelarts_cd/modelarts.go
+++ b/modules/modelarts_cd/modelarts.go
@@ -5,6 +5,8 @@ import (
"strconv"
"strings"
+ "code.gitea.io/gitea/modules/cloudbrain"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
@@ -88,19 +90,19 @@ type Parameters struct {
} `json:"parameter"`
}
-func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) {
- imageName, err := GetNotebookImageName(imageId)
+func GenerateNotebook(ctx *context.Context, req cloudbrain.GenerateModelArtsNotebookReq) (string, error) {
+ imageName, err := GetNotebookImageName(req.ImageId)
if err != nil {
log.Error("GetNotebookImageName failed: %v", err.Error())
return "", err
}
createTime := timeutil.TimeStampNow()
jobResult, err := createNotebook(models.CreateNotebookWithoutPoolParams{
- JobName: jobName,
- Description: description,
- Flavor: spec.SourceSpecId,
- Duration: autoStopDurationInMs,
- ImageID: imageId,
+ JobName: req.JobName,
+ Description: req.Description,
+ Flavor: req.Spec.SourceSpecId,
+ Duration: req.AutoStopDurationMs,
+ ImageID: req.ImageId,
Feature: models.NotebookFeature,
Volume: models.VolumeReq{
Capacity: setting.Capacity,
@@ -112,13 +114,13 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr
if err != nil {
log.Error("createNotebook failed: %v", err.Error())
if strings.HasPrefix(err.Error(), UnknownErrorPrefix) {
- log.Info("(%s)unknown error, set temp status", displayJobName)
+ log.Info("(%s)unknown error, set temp status", req.DisplayJobName)
errTemp := models.InsertCloudbrainTemp(&models.CloudbrainTemp{
JobID: models.TempJobId,
VersionID: models.TempVersionId,
Status: models.TempJobStatus,
Type: models.TypeCDCenter,
- JobName: jobName,
+ JobName: req.JobName,
JobType: string(models.JobTypeDebug),
})
if errTemp != nil {
@@ -129,23 +131,29 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr
return "", err
}
task := &models.Cloudbrain{
- Status: jobResult.Status,
- UserID: ctx.User.ID,
- RepoID: ctx.Repo.Repository.ID,
- JobID: jobResult.ID,
- JobName: jobName,
- FlavorCode: spec.SourceSpecId,
- DisplayJobName: displayJobName,
- JobType: string(models.JobTypeDebug),
- Type: models.TypeCDCenter,
- Uuid: uuid,
- ComputeResource: models.NPUResource,
- Image: imageName,
- Description: description,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- Spec: spec,
- BootFile: bootFile,
+ Status: jobResult.Status,
+ UserID: ctx.User.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ JobID: jobResult.ID,
+ JobName: req.JobName,
+ FlavorCode: req.Spec.SourceSpecId,
+ DisplayJobName: req.DisplayJobName,
+ JobType: string(models.JobTypeDebug),
+ Type: models.TypeCDCenter,
+ Uuid: req.Uuid,
+ ComputeResource: models.NPUResource,
+ Image: imageName,
+ Description: req.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: req.Spec,
+ BootFile: req.BootFile,
+ BranchName: req.BranchName,
+ ModelName: req.ModelName,
+ ModelVersion: req.ModelVersion,
+ LabelName: req.LabelName,
+ PreTrainModelUrl: req.PreTrainModelUrl,
+ CkptName: req.CkptName,
}
err = models.CreateCloudbrain(task)
@@ -154,7 +162,7 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr
}
stringId := strconv.FormatInt(task.ID, 10)
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugNPUTask)
return jobResult.ID, nil
}
diff --git a/modules/notebook/contentManager.go b/modules/notebook/contentManager.go
new file mode 100644
index 000000000..823e59665
--- /dev/null
+++ b/modules/notebook/contentManager.go
@@ -0,0 +1,198 @@
+package notebook
+
+import (
+ "crypto/tls"
+ "encoding/base64"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "path"
+ "strings"
+
+ "github.com/go-resty/resty/v2"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/cloudbrain"
+ "code.gitea.io/gitea/modules/setting"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+var restyClient *resty.Client
+
+type NotebookApiResponse struct {
+ Name string `json:"name"`
+ Path string `json:"path"`
+}
+
+type NotebookContent struct {
+ Url string
+ Path string
+ Cookies []*http.Cookie
+ Xsrf string
+ PathType string //file directory
+ Token string
+}
+
+func (c *NotebookContent) IsNotebookFileCanBrowser() bool {
+ if c.Xsrf == "" {
+ c.SetCookiesAndCsrf()
+ }
+ if c.Xsrf == "" {
+ log.Warn("xsrf is empty, can not broswer url:" + c.Url)
+ return false
+ }
+ return c.IsNoteBookContentsExist()
+
+}
+
+func (c *NotebookContent) SetCookiesAndCsrf() {
+ log.Info("jupyter url:" + c.Url)
+ var cookies []*http.Cookie
+ const retryTimes = 10
+ url := c.Url
+ if c.Token != "" {
+ url = c.Url + "?token=" + c.Token
+ }
+ for i := 0; i < retryTimes; i++ {
+ res, err := http.Get(url)
+ if err != nil {
+ log.Error("browser jupyterUrl failed.", err)
+ if i == retryTimes-1 {
+ c.Cookies = cookies
+ }
+
+ } else {
+ cookies = res.Cookies()
+ xsrf := ""
+ for _, cookie := range cookies {
+ if cookie.Name == "_xsrf" {
+ xsrf = cookie.Value
+ if len(cookies) > 1 {
+ break
+ }
+
+ }
+
+ }
+ if xsrf != "" {
+ c.Cookies = cookies
+ c.Xsrf = xsrf
+ }
+
+ }
+ }
+ c.Cookies = cookies
+
+}
+
+func (c *NotebookContent) IsNoteBookContentsExist() bool {
+ client := getRestyClient()
+ uploadUrl := getJupyterBaseUrl(c.Url) + "api/contents/" + c.Path + "?type=" + c.PathType
+ res, err := client.R().
+ SetCookies(c.Cookies).
+ SetHeader("X-XSRFToken", c.Xsrf).
+ Get(uploadUrl)
+ if err != nil {
+ log.Warn("browser url error:"+uploadUrl, err)
+ return false
+ }
+ return res.StatusCode() == http.StatusOK
+}
+
+func (c *NotebookContent) UploadNoteBookFile(task *models.Cloudbrain) error {
+
+ err := c.MakeNoteBookDir()
+ if err != nil {
+ return err
+ }
+
+ codePath := setting.JobPath + task.JobName + cloudbrain.CodeMountPath
+ fileContents, err := ioutil.ReadFile(codePath + "/" + c.Path)
+ if err != nil {
+ log.Error("read jupyter file failed:%v", task.DisplayJobName, err)
+ }
+
+ base64Content := base64.StdEncoding.EncodeToString(fileContents)
+ client := getRestyClient()
+ uploadUrl := getJupyterBaseUrl(c.Url) + "api/contents/" + c.Path
+ res, err := client.R().
+ SetCookies(c.Cookies).
+ SetHeader("X-XSRFToken", c.Xsrf).
+ SetBody(map[string]interface{}{
+ "type": "file",
+ "format": "base64",
+ "name": path.Base(c.Path),
+ "path": c.Path,
+ "content": base64Content}).
+ Put(uploadUrl)
+ if err != nil {
+ log.Error("upload jupyter file failed:%v", task.DisplayJobName, err)
+ return err
+ } else if res.StatusCode() != http.StatusCreated {
+ log.Error("upload jupyter file failed:%v, status is %s", task.DisplayJobName, res.Status(), err)
+ return fmt.Errorf("status:", res.StatusCode())
+ }
+ return nil
+}
+
+/**
+ if c.Path is a/b/c.txt
+ makedir a/b
+ if c.Path is a/b/c
+ makedir a/b
+*/
+func (c *NotebookContent) MakeNoteBookDir() error {
+ filePaths := strings.Split(c.Path, "/")
+
+ for i := 0; i < len(filePaths)-1; i++ {
+ cTemp := &NotebookContent{
+ Url: c.Url,
+ Cookies: c.Cookies,
+ Path: path.Join(filePaths[0 : i+1]...),
+ PathType: "directory",
+ Xsrf: c.Xsrf,
+ }
+ if !cTemp.IsNoteBookContentsExist() {
+
+ createTempDirUrl := getJupyterBaseUrl(cTemp.Url) + "api/contents/" + cTemp.Path
+ client := getRestyClient()
+ var jobResult NotebookApiResponse
+ res, err := client.R().
+ SetCookies(c.Cookies).
+ SetHeader("X-XSRFToken", c.Xsrf).
+ SetBody(map[string]interface{}{
+ "type": cTemp.PathType,
+ "path": cTemp.Path,
+ }).SetResult(&jobResult).
+ Put(createTempDirUrl)
+ if err != nil {
+ return err
+ }
+ if res.StatusCode() != http.StatusCreated {
+ return fmt.Errorf("status code:" + res.Status())
+ }
+
+ }
+
+ }
+ return nil
+}
+
+func getJupyterBaseUrl(url string) string {
+ jupyterUrlLength := len(url)
+ baseUrl := url
+ if strings.HasSuffix(url, "lab") {
+ baseUrl = url[0 : jupyterUrlLength-len(path.Base(url))]
+ }
+
+ return baseUrl
+}
+
+func getRestyClient() *resty.Client {
+ if restyClient == nil {
+ restyClient = resty.New()
+ restyClient.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true})
+ }
+ return restyClient
+}
diff --git a/modules/redis/redis_key/repo_redis_key.go b/modules/redis/redis_key/repo_redis_key.go
new file mode 100644
index 000000000..b2b7ccd0a
--- /dev/null
+++ b/modules/redis/redis_key/repo_redis_key.go
@@ -0,0 +1,9 @@
+package redis_key
+
+import "fmt"
+
+const REPO_PREFIX = "repo"
+
+func RepoTopNContributors(repoId int64, N int) string {
+ return KeyJoin(REPO_PREFIX, fmt.Sprint(repoId), fmt.Sprint(N), "top_n_contributor")
+}
diff --git a/modules/setting/setting.go b/modules/setting/setting.go
index bf7eb2352..09e7259f2 100755
--- a/modules/setting/setting.go
+++ b/modules/setting/setting.go
@@ -518,7 +518,7 @@ var (
MaxDatasetNum int
CullIdleTimeout string
CullInterval string
-
+ DebugAttachSize int
//benchmark config
IsBenchmarkEnabled bool
@@ -544,6 +544,12 @@ var (
BrainScoreName string
BrainScoreServerHost string
+ IsSnn4EcosetEnabled bool
+ Snn4EcosetOwner string
+ Snn4EcosetName string
+ Snn4EcosetServerHost string
+ Snn4AttachmentName string
+
//blockchain config
BlockChainHost string
CommitValidDate string
@@ -585,6 +591,9 @@ var (
TrainJobFLAVORINFOS string
ModelArtsSpecialPools string
ModelArtsMultiNode string
+ ModelArtsShareAddr string
+ ModelArtsMountPath string
+ ModelArtsNasType string
//kanban
IsCloudbrainTimingEnabled bool
@@ -614,14 +623,14 @@ var (
UsageRateBeginTime string
}{}
- ClearStrategy= struct {
- Enabled bool
- ResultSaveDays int
- BatchSize int
- DebugJobSize int
- TrashSaveDays int
- Cron string
- RunAtStart bool
+ ClearStrategy = struct {
+ Enabled bool
+ ResultSaveDays int
+ BatchSize int
+ DebugJobSize int
+ TrashSaveDays int
+ Cron string
+ RunAtStart bool
}{}
C2NetInfos *C2NetSqInfos
@@ -677,6 +686,10 @@ var (
CloudbrainStoppedTitle string
CloudbrainStoppedRemark string
+ //repo square config
+ IncubationSourceOrgName string
+ PaperRepoTopicName string
+
//nginx proxy
PROXYURL string
RadarMap = struct {
@@ -704,6 +717,7 @@ var (
ProjectHealth float64
ProjectHealthIssueCompleteRatio float64
+ ProjectHealth0IssueCloseRatio float64
TeamHealth float64
TeamHealthContributors float64
@@ -1490,6 +1504,7 @@ func NewContext() {
MaxDatasetNum = sec.Key("MAX_DATASET_NUM").MustInt(5)
CullIdleTimeout = sec.Key("CULL_IDLE_TIMEOUT").MustString("900")
CullInterval = sec.Key("CULL_INTERVAL").MustString("60")
+ DebugAttachSize = sec.Key("DEBUG_ATTACH_SIZE").MustInt(20)
sec = Cfg.Section("benchmark")
IsBenchmarkEnabled = sec.Key("ENABLED").MustBool(false)
@@ -1515,6 +1530,13 @@ func NewContext() {
BrainScoreName = sec.Key("NAME").MustString("")
BrainScoreServerHost = sec.Key("HOST").MustString("")
+ sec = Cfg.Section("snn4ecoset")
+ IsSnn4EcosetEnabled = sec.Key("ENABLED").MustBool(false)
+ Snn4EcosetOwner = sec.Key("OWNER").MustString("")
+ Snn4EcosetName = sec.Key("NAME").MustString("")
+ Snn4EcosetServerHost = sec.Key("HOST").MustString("")
+ Snn4AttachmentName = sec.Key("DATASET").MustString("")
+
sec = Cfg.Section("blockchain")
BlockChainHost = sec.Key("HOST").MustString("http://192.168.136.66:3302/")
CommitValidDate = sec.Key("COMMIT_VALID_DATE").MustString("2021-01-15")
@@ -1553,6 +1575,9 @@ func NewContext() {
TrainJobFLAVORINFOS = sec.Key("TrainJob_FLAVOR_INFOS").MustString("")
ModelArtsSpecialPools = sec.Key("SPECIAL_POOL").MustString("")
ModelArtsMultiNode = sec.Key("MULTI_NODE").MustString("")
+ ModelArtsShareAddr = sec.Key("ModelArts_Share_Addr").MustString("192.168.0.30:/")
+ ModelArtsMountPath = sec.Key("ModelArts_Mount_Path").MustString("/cache/sfs")
+ ModelArtsNasType = sec.Key("ModelArts_Nas_Type").MustString("nfs")
sec = Cfg.Section("elk")
ElkUrl = sec.Key("ELKURL").MustString("")
@@ -1585,6 +1610,10 @@ func NewContext() {
CloudbrainStoppedTitle = sec.Key("CLOUDBRAIN_STOPPED_TITLE").MustString("您好,您申请的算力资源已结束使用,任务已完成运行,状态为%s,请您关注运行结果")
CloudbrainStoppedRemark = sec.Key("CLOUDBRAIN_STOPPED_REMARK").MustString("感谢您的耐心等待。")
+ sec = Cfg.Section("repo-square")
+ IncubationSourceOrgName = sec.Key("INCUBATION_ORG_NAME").MustString("OpenI")
+ PaperRepoTopicName = sec.Key("PAPER_REPO_TOPIC_NAME").MustString("openi-paper")
+
sec = Cfg.Section("point")
CloudBrainPaySwitch = sec.Key("CLOUDBRAIN_PAY_SWITCH").MustBool(false)
CloudBrainPayDelay = sec.Key("CLOUDBRAIN_PAY_DELAY").MustDuration(30 * time.Minute)
@@ -1691,16 +1720,16 @@ func getModelartsCDConfig() {
getNotebookFlavorInfos()
}
-func getClearStrategy(){
+func getClearStrategy() {
sec := Cfg.Section("clear_strategy")
- ClearStrategy.Enabled=sec.Key("ENABLED").MustBool(false)
- ClearStrategy.ResultSaveDays=sec.Key("RESULT_SAVE_DAYS").MustInt(30)
- ClearStrategy.BatchSize=sec.Key("BATCH_SIZE").MustInt(500)
- ClearStrategy.DebugJobSize=sec.Key("DEBUG_BATCH_SIZE").MustInt(100)
- ClearStrategy.TrashSaveDays=sec.Key("TRASH_SAVE_DAYS").MustInt(90)
- ClearStrategy.Cron=sec.Key("CRON").MustString("* 0,30 2-8 * * ?")
- ClearStrategy.RunAtStart=sec.Key("RUN_AT_START").MustBool(false)
+ ClearStrategy.Enabled = sec.Key("ENABLED").MustBool(false)
+ ClearStrategy.ResultSaveDays = sec.Key("RESULT_SAVE_DAYS").MustInt(30)
+ ClearStrategy.BatchSize = sec.Key("BATCH_SIZE").MustInt(500)
+ ClearStrategy.DebugJobSize = sec.Key("DEBUG_BATCH_SIZE").MustInt(100)
+ ClearStrategy.TrashSaveDays = sec.Key("TRASH_SAVE_DAYS").MustInt(90)
+ ClearStrategy.Cron = sec.Key("CRON").MustString("* 0,30 2-8 * * ?")
+ ClearStrategy.RunAtStart = sec.Key("RUN_AT_START").MustBool(false)
}
func getGrampusConfig() {
@@ -1767,6 +1796,7 @@ func SetRadarMapConfig() {
RadarMap.LivenessRelease = sec.Key("liveness_release").MustFloat64(0.4)
RadarMap.ProjectHealth = sec.Key("project_health").MustFloat64(0.1)
RadarMap.ProjectHealthIssueCompleteRatio = sec.Key("project_health_issue_complete_ratio").MustFloat64(100)
+ RadarMap.ProjectHealth0IssueCloseRatio = sec.Key("project_health_0_issue_close_ratio").MustFloat64(0.0)
RadarMap.TeamHealth = sec.Key("team_health").MustFloat64(0.1)
RadarMap.TeamHealthContributors = sec.Key("team_health_contributors").MustFloat64(0.2)
RadarMap.TeamHealthKeyContributors = sec.Key("team_health_key_contributors").MustFloat64(0.6)
diff --git a/modules/structs/cloudbrain.go b/modules/structs/cloudbrain.go
index 9ea5601c9..cba297dc0 100644
--- a/modules/structs/cloudbrain.go
+++ b/modules/structs/cloudbrain.go
@@ -47,36 +47,37 @@ type CreateFileNotebookJobOption struct {
BranchName string `json:"branch_name" binding:"Required"`
OwnerName string `json:"owner_name" binding:"Required"`
ProjectName string `json:"project_name" binding:"Required"`
+ JobId string `json:"job_id"`
}
type Cloudbrain struct {
- ID int64 `json:"id"`
- JobID string `json:"job_id"`
- JobType string `json:"job_type"`
- Type int `json:"type"`
- DisplayJobName string `json:"display_job_name"`
- Status string `json:"status"`
- CreatedUnix int64 `json:"created_unix"`
- RepoID int64 `json:"repo_id"`
- Duration int64 `json:"duration"` //运行时长 单位秒
- TrainJobDuration string `json:"train_job_duration"`
- ImageID string `json:"image_id"` //grampus image_id
- Image string `json:"image"`
- Uuid string `json:"uuid"` //数据集id
- DatasetName string `json:"dataset_name"`
- ComputeResource string `json:"compute_resource"` //计算资源,例如npu
- AiCenter string `json:"ai_center"` //grampus ai center: center_id+center_name
- BranchName string `json:"branch_name"` //分支名称
- Parameters string `json:"parameters"` //传给modelarts的param参数
- BootFile string `json:"boot_file"` //启动文件
- Description string `json:"description"` //描述
- ModelName string `json:"model_name"` //模型名称
- ModelVersion string `json:"model_version"` //模型版本
- CkptName string `json:"ckpt_name"` //权重文件名称
- StartTime int64 `json:"start_time"`
- EndTime int64 `json:"end_time"`
-
- Spec *SpecificationShow `json:"spec"`
+ ID int64 `json:"id"`
+ JobID string `json:"job_id"`
+ JobType string `json:"job_type"`
+ Type int `json:"type"`
+ DisplayJobName string `json:"display_job_name"`
+ Status string `json:"status"`
+ CreatedUnix int64 `json:"created_unix"`
+ RepoID int64 `json:"repo_id"`
+ Duration int64 `json:"duration"` //运行时长 单位秒
+ TrainJobDuration string `json:"train_job_duration"`
+ ImageID string `json:"image_id"` //grampus image_id
+ Image string `json:"image"`
+ Uuid string `json:"uuid"` //数据集id
+ DatasetName string `json:"dataset_name"`
+ ComputeResource string `json:"compute_resource"` //计算资源,例如npu
+ AiCenter string `json:"ai_center"` //grampus ai center: center_id+center_name
+ BranchName string `json:"branch_name"` //分支名称
+ Parameters string `json:"parameters"` //传给modelarts的param参数
+ BootFile string `json:"boot_file"` //启动文件
+ Description string `json:"description"` //描述
+ ModelName string `json:"model_name"` //模型名称
+ ModelVersion string `json:"model_version"` //模型版本
+ CkptName string `json:"ckpt_name"` //权重文件名称
+ StartTime int64 `json:"start_time"`
+ EndTime int64 `json:"end_time"`
+ VersionName string `json:"version_name"`
+ Spec *SpecificationShow `json:"spec"`
}
type SpecificationShow struct {
diff --git a/modules/structs/pipeline.go b/modules/structs/pipeline.go
new file mode 100644
index 000000000..fd26d1b51
--- /dev/null
+++ b/modules/structs/pipeline.go
@@ -0,0 +1,23 @@
+package structs
+
+type Pipeline struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+}
+type NodeInfo struct {
+ Name string `json:"name"`
+ Status string `json:"status"`
+ Code string `json:"code"`
+ Message string `json:"message"`
+}
+
+type PipelineNotification struct {
+ Type int `json:"type"`
+ Username string `json:"username"`
+ Reponame string `json:"reponame"`
+ Pipeline Pipeline `json:"pipeline"`
+ PipelineRunId string `json:"pipeline_run_id"`
+ Node NodeInfo `json:"node"`
+ OccurTime int64 `json:"occur_time"`
+}
diff --git a/modules/templates/helper.go b/modules/templates/helper.go
index c314127f1..88c63c8c7 100755
--- a/modules/templates/helper.go
+++ b/modules/templates/helper.go
@@ -47,6 +47,7 @@ const (
REF_TYPE_BRANCH = "branch"
REF_TYPE_TAG = "tag"
REF_TYPE_PATTERN = "(refs/heads/|refs/tags/)"
+ DURATION_STR_ZERO = "00:00:00"
)
// Used from static.go && dynamic.go
@@ -97,11 +98,15 @@ func NewFuncMap() []template.FuncMap {
"AllowedReactions": func() []string {
return setting.UI.Reactions
},
+ "DebugAttachSize": func() int {
+ return setting.DebugAttachSize * 1000 * 1000 * 1000
+ },
"AvatarLink": models.AvatarLink,
"Safe": Safe,
"SafeJS": SafeJS,
"Str2html": Str2html,
"subOne": subOne,
+ "addOne": addOne,
"TimeSince": timeutil.TimeSince,
"TimeSinceUnix": timeutil.TimeSinceUnix,
"TimeSinceUnix1": timeutil.TimeSinceUnix1,
@@ -109,6 +114,7 @@ func NewFuncMap() []template.FuncMap {
"AttachmentStatus": dataset.GetStatusText,
"IsShowDataSetOfCurrentRepo": dataset.IsShowDataSetOfCurrentRepo,
"TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
+ "ConvertDurationToStr": ConvertDurationToStr,
"RawTimeSince": timeutil.RawTimeSince,
"FileSize": base.FileSize,
"PrettyNumber": base.PrettyNumber,
@@ -151,7 +157,7 @@ func NewFuncMap() []template.FuncMap {
"EscapePound": func(str string) string {
return strings.NewReplacer("%", "%25", "#", "%23", " ", "%20", "?", "%3F").Replace(str)
},
- "IpynbBool":func(str string) bool{
+ "IpynbBool": func(str string) bool {
return strings.Contains(str, ".ipynb")
},
"nl2br": func(text string) template.HTML {
@@ -365,6 +371,7 @@ func NewTextFuncMap() []texttmpl.FuncMap {
"TimeSinceUnix": timeutil.TimeSinceUnix,
"TimeSinceUnix1": timeutil.TimeSinceUnix1,
"TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
+ "ConvertDurationToStr": ConvertDurationToStr,
"RawTimeSince": timeutil.RawTimeSince,
"AttachmentResourceType": dataset.GetResourceType,
"AttachmentStatus": dataset.GetStatusText,
@@ -467,7 +474,9 @@ func Str2html(raw string) template.HTML {
func subOne(length int) int {
return length - 1
}
-
+func addOne(length int64) int64 {
+ return length + 1
+}
// Escape escapes a HTML string
func Escape(raw string) string {
return html.EscapeString(raw)
@@ -804,3 +813,9 @@ func MB2GB(size int) string {
}
return s
}
+func ConvertDurationToStr(duration int64) string {
+ if duration <= 0 {
+ return DURATION_STR_ZERO
+ }
+ return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60)
+}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 082b35ca8..59c8f100b 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -577,6 +577,7 @@ static.CloudBrainTaskNum=CloudBrain Task Count
static.CloudBrainRunTime=CloudBrain Run Time
static.CommitDatasetNum=Commit Dataset Count
static.CommitModelCount=Commit Model Count
+static.ModelConvertCount=Model Convert Count
static.UserIndex=Normalized user index
static.UserIndexPrimitive=User Index
static.countdate=Count Date
@@ -852,6 +853,7 @@ description = Description
description_format_err=Description's length can be up to %s characters long.
create_dataset = Create Dataset
download_url=Download Url
+download_model_url=Download Url
download_oper=Operation
download_copy=Copy URL
create_dataset_fail=Failed to create dataset.
@@ -1060,6 +1062,9 @@ model_rename=Duplicate model name, please modify model name.
notebook_file_not_exist=Notebook file does not exist.
notebook_select_wrong=Please select a Notebook(.ipynb) file first.
notebook_file_no_right=You have no right to access the Notebook(.ipynb) file.
+notebook_repo_conflict=The files in different branches of the same repository can not run together.
+debug_again_fail=Fail to restart debug task, please try again later.
+debug_again_fail_forever=The task was scheduled failed last time, can not restart.
date=Date
repo_add=Project Increment
@@ -1077,6 +1082,7 @@ delete=Delete
more=More
gpu_type_all=All
model_download=Model Download
+all_result_download=All result download
submit_image=Submit Image
modify_image=Modify Image
image_exist=Image name has been used, please use a new one.
@@ -1089,8 +1095,8 @@ image_delete_fail=Failed to delete image, please try again later.
image_overwrite=You had submitted the same name image before, are you sure to overwrite the original image?
download=Download
score=Score
-wait_count_start = There are currently
-wait_count_end = tasks queued
+wait_count_start = Your current queue position is
+wait_count_end =
file_limit_100 = Display up to 100 files or folders in a single directory
images.name = Image Tag
images.name_placerholder = Please enter the image name
@@ -1342,9 +1348,12 @@ modelconvert.inputshapeerror=Format input error, please input such as: 1,1,32,32
modelconvert.manage.create_error1=A model transformation task with the same name already exists.
modelconvert.manage.create_error2=Only one running model transformation task can be created.
-modelconvert.manage.model_not_exist=The model does not exist.
+modelconvert.manage.model_not_exist=The model in the task does not exist or has been deleted.
modelconvert.manage.no_operate_right=You have no right to do the operation.
+debug.manage.model_not_exist=The model in the task does not exist or has been deleted, please create a new debug job.
+debug.manage.dataset_not_exist=The part of datasets in the task does not exist or has been deleted, please create a new debug job.
+
grampus.train_job.ai_center = AI Center
grampus.dataset_path_rule = The code is storaged in /cache/code;the dataset is storaged in /cache/dataset;and please put your model into /cache/output, then you can download it online。
grampus.gpu_dataset_path_rule = The code is storaged in /tmp/code;the dataset is storaged in /tmp/dataset;and please put your model into /tmp/output, then you can download it online。
@@ -2750,6 +2759,10 @@ repos.pr=PR
repos.commit=Commit
repos.closedIssues=Closed Issue
repos.contributor=Contributor
+repos.numDataset=Dataset File
+repos.numCloudbrain=Cloudbrain Task
+repos.numModel=Model
+repos.numModelConvert=Model Convert Task
repos.yes=Yes
repos.no=No
@@ -3120,6 +3133,8 @@ reject_pull_request = `suggested changes for %s#%[2]s`
upload_dataset=`upload dataset %s`
task_gpudebugjob=`created CPU/GPU type debugging task %s`
task_npudebugjob=`created NPU type debugging task %s`
+task_c2net_gpudebugjob=`created CPU/GPU type debugging task %s`
+task_c2net_npudebugjob=`created NPU type debugging task %s`
task_nputrainjob=`created NPU training task %s`
task_inferencejob=`created reasoning task %s`
task_benchmark=`created profiling task %s`
@@ -3239,6 +3254,7 @@ dataset = Dataset
resource_specification = Resource specification
dataset_storage_path = Dataset storage path
model_storage_path = Model storage path
+output_storage_path = Output storage path
code_storage_path = Code storage path
benchmark_path = Benchmark script path
snn4imagenet_path = Snn4imagenet script path
@@ -3294,8 +3310,11 @@ load_code_failed=Fail to load code, please check if the right branch is selected
error.dataset_select = dataset select error:the count exceed the limit or has same name
new_train_gpu_tooltips = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the run parameter %s, and please put your model into %s then you can download it online
+new_debug_gpu_tooltips = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the %s, and please put your model into %s then you can download it online
+new_debug_gpu_tooltips1 = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the %s.
new_train_npu_tooltips = The code is storaged in %s, the pre-trained model is storaged in the run parameter %s, and please put your model into %s then you can download it online
new_infer_gpu_tooltips = The dataset is stored in %s, the model file is stored in %s, please store the inference output in %s for subsequent downloads.
+code_obs_address = Code OBS address
[points]
points = points
diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini
index f4e8f1aea..3793c7382 100755
--- a/options/locale/locale_zh-CN.ini
+++ b/options/locale/locale_zh-CN.ini
@@ -581,6 +581,7 @@ static.CloudBrainTaskNum=云脑任务数
static.CloudBrainRunTime=云脑运行时间(小时)
static.CommitDatasetNum=上传(提交)数据集文件数
static.CommitModelCount=提交模型数
+static.ModelConvertCount=模型转换数
static.UserIndex=归一化用户指数
static.UserIndexPrimitive=用户指数
static.countdate=系统统计时间
@@ -863,6 +864,7 @@ reference_dataset_fail=关联数据集失败,请稍后再试。
cancel_reference_dataset_fail=取消关联数据集失败,请稍后再试。
download_url=数据集下载地址
+download_model_url=模型文件下载地址
download_copy=复制链接
download_oper=操作
show_dataset=数据集
@@ -1059,6 +1061,9 @@ model_rename=模型名称重复,请修改模型名称
notebook_file_not_exist=Notebook文件不存在。
notebook_select_wrong=请先选择Notebook(.ipynb)文件。
notebook_file_no_right=您没有这个Notebook文件的读权限。
+notebook_repo_conflict=同一个仓库的不同分支文件不能同时运行。
+debug_again_fail=再次调试失败,请稍后再试。
+debug_again_fail_forever=这个任务之前没有调度成功,不能再次调试。
date=日期
repo_add=新增项目
@@ -1076,6 +1081,7 @@ delete=删除
more=更多
gpu_type_all=全部
model_download=结果下载
+all_result_download=全部结果下载
submit_image=提交镜像
modify_image=修改镜像
image_exist=镜像Tag已被使用,请修改镜像Tag。
@@ -1088,8 +1094,8 @@ image_delete_fail=删除镜像失败,请稍后再试。
image_overwrite=您已经提交过相同名称的镜像,您确定要覆盖原来提交的镜像吗?
download=模型下载
score=评分
-wait_count_start = 当前有
-wait_count_end = 个任务正在排队
+wait_count_start = 您当前排队位置是第
+wait_count_end = 位
file_limit_100 = 单目录下最多显示100个文件或文件夹
images.name = 镜像Tag
images.name_placerholder = 请输入镜像Tag
@@ -1356,9 +1362,13 @@ modelconvert.modelfileempty=请选择模型文件。
modelconvert.manage.create_error1=相同的名称模型转换任务已经存在。
modelconvert.manage.create_error2=只能创建一个正在运行的模型转换任务。
-modelconvert.manage.model_not_exist=选择的模型不存在。
+modelconvert.manage.model_not_exist=任务中选择的模型不存在或者已被删除。
modelconvert.manage.no_operate_right=您没有操作权限。
+
+debug.manage.model_not_exist=任务中选择的模型不存在或者已被删除,请新建调试任务。
+debug.manage.dataset_not_exist=任务中选择的部分数据集不存在或者已被删除,请新建调试任务。
+
grampus.train_job.ai_center=智算中心
grampus.dataset_path_rule = 训练脚本存储在/cache/code中,数据集存储在/cache/dataset中,训练输出请存储在/cache/output中以供后续下载。
grampus.gpu_dataset_path_rule = 训练脚本存储在/tmp/code中,数据集存储在/tmp/dataset中,训练输出请存储在/tmp/output中以供后续下载。
@@ -1666,7 +1676,7 @@ issues.action_assignee_no_select=未指派
issues.opened_by=由 %[3]s 于 %[1]s创建
pulls.merged_by=由 %[3]s 于 %[1]s 合并
pulls.merged_by_fake=由 %[2]s 于 %[1]s 合并
-issues.closed_by=按 %[3]s 关闭%[1]s
+issues.closed_by=由 %[3]s 创建,被关闭于 %[1]s
issues.opened_by_fake=由 %[2]s 于 %[1]s创建
issues.closed_by_fake=通过 %[2]s 关闭 %[1]s
issues.previous=上一页
@@ -2767,6 +2777,11 @@ repos.pr=PR数
repos.commit=Commit数
repos.closedIssues=已解决任务数
repos.contributor=贡献者数
+repos.numDataset=数据集文件数
+repos.numCloudbrain=云脑任务数
+repos.numModel=模型数
+repos.numModelConvert=转换任务数
+
repos.yes=是
repos.no=否
@@ -3137,6 +3152,8 @@ reject_pull_request=`建议变更 %s#%[2]s`
upload_dataset=`上传了数据集文件 %s`
task_gpudebugjob=`创建了CPU/GPU类型调试任务 %s`
task_npudebugjob=`创建了NPU类型调试任务 %s`
+task_c2net_gpudebugjob=`创建了CPU/GPU类型调试任务 %s`
+task_c2net_npudebugjob=`创建了NPU类型调试任务 %s`
task_nputrainjob=`创建了NPU类型训练任务 %s`
task_inferencejob=`创建了推理任务 %s`
task_benchmark=`创建了评测任务 %s`
@@ -3257,6 +3274,7 @@ resource_specification = 资源规格
dataset_storage_path = 数据集存放路径
model_storage_path = 模型存放路径
code_storage_path = 代码存放路径
+output_storage_path = 输出存放路径
benchmark_path = benchmark脚本存放路径
snn4imagenet_path = snn4imagenet脚本存放路径
brainscore_path = brainscore脚本存放路径
@@ -3311,11 +3329,14 @@ Stopped_failed=任务停止失败,请稍后再试。
Stopped_success_update_status_fail=任务停止成功,状态及运行时间更新失败。
load_code_failed=代码加载失败,请确认选择了正确的分支。
-
+error.debug_datasetsize = 数据集大小超过限制('%d'GB)
error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集
new_train_gpu_tooltips = 训练脚本存储在 %s 中,数据集存储在 %s 中,预训练模型存放在运行参数 %s 中,训练输出请存储在 %s 中以供后续下载。
+new_debug_gpu_tooltips = 项目代码存储在 %s 中,数据集存储在 %s 中,选择的模型存储在 %s 中,调试输出请存储在 %s 中以供后续下载。
+new_debug_gpu_tooltips1 = 项目代码存储在 %s 中,数据集存储在 %s 中,选择的模型存储在 %s 中。
new_train_npu_tooltips = 训练脚本存储在 %s 中,预训练模型存放在运行参数 %s 中,训练输出请存储在 %s 中以供后续下载。
new_infer_gpu_tooltips = 数据集存储在 %s 中,模型文件存储在 %s 中,推理输出请存储在 %s 中以供后续下载。
+code_obs_address = 代码obs地址
[points]
points = 积分
diff --git a/package-lock.json b/package-lock.json
index 7dc42d04c..ee922450a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1982,28 +1982,6 @@
"object.assign": "^4.1.0"
}
},
- "babel-polyfill": {
- "version": "6.26.0",
- "resolved": "https://registry.npm.taobao.org/babel-polyfill/download/babel-polyfill-6.26.0.tgz",
- "integrity": "sha1-N5k3q8Z9eJWXCtxiHyhM2WbPIVM=",
- "requires": {
- "babel-runtime": "^6.26.0",
- "core-js": "^2.5.0",
- "regenerator-runtime": "^0.10.5"
- },
- "dependencies": {
- "core-js": {
- "version": "2.6.12",
- "resolved": "https://registry.npm.taobao.org/core-js/download/core-js-2.6.12.tgz?cache=0&sync_timestamp=1611040749668&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fcore-js%2Fdownload%2Fcore-js-2.6.12.tgz",
- "integrity": "sha1-2TM9+nsGXjR8xWgiGdb2kIWcwuw="
- },
- "regenerator-runtime": {
- "version": "0.10.5",
- "resolved": "https://registry.npm.taobao.org/regenerator-runtime/download/regenerator-runtime-0.10.5.tgz",
- "integrity": "sha1-M2w+/BIgrc7dosn6tntaeVWjNlg="
- }
- }
- },
"babel-runtime": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz",
@@ -2194,9 +2172,9 @@
"integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg=="
},
"blueimp-md5": {
- "version": "2.18.0",
- "resolved": "https://registry.npm.taobao.org/blueimp-md5/download/blueimp-md5-2.18.0.tgz",
- "integrity": "sha1-EVK+EzXwxrORHtnjbbVPPmrFKTU="
+ "version": "2.19.0",
+ "resolved": "https://registry.npmjs.org/blueimp-md5/-/blueimp-md5-2.19.0.tgz",
+ "integrity": "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w=="
},
"bn.js": {
"version": "5.1.1",
@@ -3536,6 +3514,11 @@
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-2.2.0.tgz",
"integrity": "sha1-QGXiATz5+5Ft39gu+1Bq1MZ2kGI="
},
+ "dayjs": {
+ "version": "1.10.7",
+ "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.7.tgz",
+ "integrity": "sha512-P6twpd70BcPK34K26uJ1KT3wlhpuOAPoMwJzpsIWUxHZ7wpmbdZL/hQqBDfz7hGurYSa5PhzdhDHtt319hL3ig=="
+ },
"de-indent": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz",
@@ -4152,25 +4135,29 @@
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
},
"esdk-obs-browserjs": {
- "version": "3.20.7",
- "resolved": "https://registry.npm.taobao.org/esdk-obs-browserjs/download/esdk-obs-browserjs-3.20.7.tgz",
- "integrity": "sha1-vhziRlKEhW3PgZPl0DyX68bJI0s=",
+ "version": "3.22.3",
+ "resolved": "https://registry.npmjs.org/esdk-obs-browserjs/-/esdk-obs-browserjs-3.22.3.tgz",
+ "integrity": "sha512-MATZXp0FwjPtKG9tpdfURa3koUarR/ev+tbO0oUKgj0GRt0798ZxmfCvYvRpgNst4w1ht4E79ikD4H40UYLgPA==",
"requires": {
- "axios": "^0.19.0",
- "babel-polyfill": "^6.26.0",
- "blueimp-md5": "^2.10.0",
- "js-base64": "^2.3.2",
- "jssha": "^2.3.1",
- "urijs": "^1.19.1"
+ "axios": "^0.26.1",
+ "blueimp-md5": "^2.18.0",
+ "js-base64": "^3.7.1",
+ "jssha": "^3.2.0",
+ "urijs": "^1.19.7"
},
"dependencies": {
"axios": {
- "version": "0.19.2",
- "resolved": "https://registry.npm.taobao.org/axios/download/axios-0.19.2.tgz?cache=0&sync_timestamp=1608609215811&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Faxios%2Fdownload%2Faxios-0.19.2.tgz",
- "integrity": "sha1-PqNsXYgY0NX4qKl6bTa4bNwAyyc=",
+ "version": "0.26.1",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
+ "integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
"requires": {
- "follow-redirects": "1.5.10"
+ "follow-redirects": "^1.14.8"
}
+ },
+ "js-base64": {
+ "version": "3.7.3",
+ "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.3.tgz",
+ "integrity": "sha512-PAr6Xg2jvd7MCR6Ld9Jg3BmTcjYsHEBx1VlwEwULb/qowPf5VD9kEMagj23Gm7JRnSvE/Da/57nChZjnvL8v6A=="
}
}
},
@@ -5382,27 +5369,9 @@
}
},
"follow-redirects": {
- "version": "1.5.10",
- "resolved": "https://registry.npm.taobao.org/follow-redirects/download/follow-redirects-1.5.10.tgz?cache=0&sync_timestamp=1611606737937&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Ffollow-redirects%2Fdownload%2Ffollow-redirects-1.5.10.tgz",
- "integrity": "sha1-e3qfmuov3/NnhqlP9kPtB/T/Xio=",
- "requires": {
- "debug": "=3.1.0"
- },
- "dependencies": {
- "debug": {
- "version": "3.1.0",
- "resolved": "https://registry.npm.taobao.org/debug/download/debug-3.1.0.tgz?cache=0&sync_timestamp=1607566533140&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fdebug%2Fdownload%2Fdebug-3.1.0.tgz",
- "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=",
- "requires": {
- "ms": "2.0.0"
- }
- },
- "ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
- }
- }
+ "version": "1.15.2",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
+ "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA=="
},
"fomantic-ui": {
"version": "2.8.4",
@@ -7884,9 +7853,9 @@
}
},
"jssha": {
- "version": "2.4.2",
- "resolved": "https://registry.npm.taobao.org/jssha/download/jssha-2.4.2.tgz",
- "integrity": "sha1-2VCwlWNJKL1rK9odQtqaOnYtZek="
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/jssha/-/jssha-3.3.0.tgz",
+ "integrity": "sha512-w9OtT4ALL+fbbwG3gw7erAO0jvS5nfvrukGPMWIAoea359B26ALXGpzy4YJSp9yGnpUvuvOw1nSjSoHDfWSr1w=="
},
"just-debounce": {
"version": "1.0.0",
@@ -14400,9 +14369,9 @@
}
},
"urijs": {
- "version": "1.19.6",
- "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.6.tgz",
- "integrity": "sha512-eSXsXZ2jLvGWeLYlQA3Gh36BcjF+0amo92+wHPyN1mdR8Nxf75fuEuYTd9c0a+m/vhCjRK0ESlE9YNLW+E1VEw=="
+ "version": "1.19.11",
+ "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz",
+ "integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ=="
},
"urix": {
"version": "0.1.0",
diff --git a/package.json b/package.json
index 7748f3de3..fa2c5327b 100644
--- a/package.json
+++ b/package.json
@@ -17,11 +17,12 @@
"core-js": "3.6.5",
"css-loader": "3.5.3",
"cssnano": "4.1.10",
+ "dayjs": "1.10.7",
"domino": "2.1.5",
"dropzone": "5.7.2",
"echarts": "3.8.5",
"element-ui": "2.15.5",
- "esdk-obs-browserjs": "3.20.7",
+ "esdk-obs-browserjs": "3.22.3",
"esdk-obs-nodejs": "3.20.11",
"fast-glob": "3.2.2",
"file-loader": "6.0.0",
diff --git a/public/home/home.js b/public/home/home.js
index aeb51b184..fe843161e 100755
--- a/public/home/home.js
+++ b/public/home/home.js
@@ -247,7 +247,7 @@ document.onreadystatechange = function () {
html += recordPrefix + actionName;
html += " " + record.RefName + ""
}
- else if(record.OpType == "25" || record.OpType == "29"){
+ else if(record.OpType == "25" || record.OpType == "29" || record.OpType == "39" || record.OpType == "40"){
html += recordPrefix + actionName;
html += " " + record.RefName + ""
}
@@ -294,7 +294,10 @@ function getTaskLink(record){
re = re + "/cloudbrain/train-job/" + record.Content;
}else if(record.OpType == 32 || record.OpType == 33){
re = re + "/grampus/train-job/" + record.Content;
+ }else if(record.OpType == 39 || record.OpType == 40){
+ re = re + "/grampus/notebook/" + record.Content;
}
+
re = encodeURI(re);
return re;
}
@@ -450,7 +453,9 @@ var actionNameZH={
"33":"创建了CPU/GPU类型训练任务",
"35":"创建的数据集 {dataset} 被设置为推荐数据集",
"36":"提交了镜像 {image}",
- "37":"提交的镜像 {image} 被设置为推荐镜像",
+ "37": "提交的镜像 {image} 被设置为推荐镜像",
+ "39":"创建了CPU/GPU类型调试任务",
+ "40":"创建了NPU类型调试任务",
};
var actionNameEN={
@@ -481,7 +486,9 @@ var actionNameEN={
"33":" created CPU/GPU type training task",
"35":" created dataset {dataset} was set as recommended dataset",
"36":"committed image {image}",
- "37":"committed image {image} was set as recommended image",
+ "37": "committed image {image} was set as recommended image",
+ "39":" created CPU/GPU type debugging task ",
+ "40":" created NPU type debugging task ",
};
var repoAndOrgZH={
@@ -622,20 +629,12 @@ function displayRepo(json){
for (var i = 0, iLen = repos.length; i < iLen; i++) {
if (i >= 4) break;
var repo = repos[i];
- // ${repo["NumStars"]}${repo["NumForks"]}
html += `
-
- ${repo["Avatar"] ? `

` : `
![]()
`}
+
+ ${repo["Avatar"] ? `

` : `
![]()
`}
${repo["Description"]}
- `;
- // if (repo["Topics"] != null) {
- // for(var j = 0; j < repo["Topics"].length; j++){
- // var topic = repo["Topics"][j];
- // var url = "/explore/repos?q=" + (topic) + "&topic="
- // html += `
${topic}`;
- // }
- // }
+
`;
html += `
`;
diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go
index 91685251b..2002e69f4 100755
--- a/routers/admin/cloudbrains.go
+++ b/routers/admin/cloudbrains.go
@@ -53,7 +53,7 @@ func CloudBrains(ctx *context.Context) {
var jobTypes []string
jobTypeNot := false
if jobType == string(models.JobTypeBenchmark) {
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeSnn4Ecoset))
} else if jobType != "all" && jobType != "" {
jobTypes = append(jobTypes, jobType)
}
diff --git a/routers/admin/resources.go b/routers/admin/resources.go
index feea7b69b..8db958ef5 100644
--- a/routers/admin/resources.go
+++ b/routers/admin/resources.go
@@ -307,3 +307,37 @@ func RefreshHistorySpec(ctx *context.Context) {
r["total"] = total
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}
+
+func RefreshReposHistoryCnt(ctx *context.Context) {
+ scope := ctx.Query("scope")
+ list := ctx.Query("list")
+
+ var scopeAll = false
+ if scope == "all" {
+ scopeAll = true
+ }
+ var ids = make([]int64, 0)
+ if list != "" {
+ strs := strings.Split(list, "|")
+ for _, s := range strs {
+ i, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
+ return
+ }
+ ids = append(ids, i)
+ }
+
+ }
+
+ total, success, err := resource.RefreshHistorySpec(scopeAll, ids)
+ if err != nil {
+ log.Error("RefreshHistorySpec error. %v", err)
+ ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
+ return
+ }
+ r := make(map[string]interface{}, 0)
+ r["success"] = success
+ r["total"] = total
+ ctx.JSON(http.StatusOK, response.SuccessWithData(r))
+}
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index 309a484ce..ba7346481 100755
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -544,6 +544,12 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/complete_multipart", repo.CompleteMultipart)
}, reqToken())
+ m.Group("/pipeline", func() {
+ m.Post("/notification", bind(api.PipelineNotification{}), notify.PipelineNotify)
+
+ }, reqToken())
+
+ m.Get("/compute-nodes", reqToken(), user.GetComputeNodes)
// Notifications
m.Group("/notifications", func() {
@@ -741,7 +747,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/file_notebook", func() {
m.Get("", repo.GetFileNoteBookInfo)
m.Post("/create", reqToken(), reqWeChat(), bind(api.CreateFileNotebookJobOption{}), repo.CreateFileNoteBook)
-
+ m.Post("/status", reqToken(), bind(api.CreateFileNotebookJobOption{}), repo.FileNoteBookStatus)
})
m.Group("/repos", func() {
@@ -759,6 +765,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/:username/:reponame", func() {
m.Get("/right", reqToken(), repo.GetRight)
m.Get("/tagger", reqToken(), repo.ListTagger)
+ m.Get("/cloudBrainJobId", repo.GetCloudBrainJobId)
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
Delete(reqToken(), reqOwner(), repo.Delete).
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit)
@@ -995,6 +1002,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)
m.Get("/model_list", repo.CloudBrainModelList)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop)
+ m.Put("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GeneralCloudBrainJobStop)
})
})
m.Group("/inference-job", func() {
@@ -1015,12 +1023,17 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Delete("/delete_model", repo.DeleteModel)
m.Get("/downloadall", repo.DownloadModel)
m.Get("/query_model_byId", repo.QueryModelById)
+ m.Get("/query_model_byName", repo.QueryModelByName)
m.Get("/query_model_for_predict", repo.QueryModelListForPredict)
m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict)
+ m.Get("/query_train_job", repo.QueryTrainJobList)
+ m.Get("/query_train_job_version", repo.QueryTrainJobVersionList)
m.Get("/query_train_model", repo.QueryTrainModelList)
m.Post("/create_model_convert", repo.CreateModelConvert)
+ m.Post("/convert_stop", repo.StopModelConvert)
m.Get("/show_model_convert_page", repo.ShowModelConvertPage)
m.Get("/query_model_convert_byId", repo.QueryModelConvertById)
+ m.Get("/query_model_convert_byName", repo.QueryModelConvertByName)
m.Get("/:id", repo.GetCloudbrainModelConvertTask)
m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog)
@@ -1053,6 +1066,9 @@ func RegisterRoutes(m *macaron.Macaron) {
})
}, reqRepoReader(models.UnitTypeCloudBrain))
m.Group("/grampus", func() {
+ m.Group("/notebook", func() {
+ m.Get("/:id", repo_ext.GetGrampusNotebook)
+ })
m.Group("/train-job", func() {
m.Group("/:jobid", func() {
m.Get("", repo.GetModelArtsTrainJobVersion)
diff --git a/routers/api/v1/notify/pipeline.go b/routers/api/v1/notify/pipeline.go
new file mode 100644
index 000000000..021af20dc
--- /dev/null
+++ b/routers/api/v1/notify/pipeline.go
@@ -0,0 +1,15 @@
+package notify
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/context"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+func PipelineNotify(ctx *context.APIContext, form api.PipelineNotification) {
+
+ ctx.JSON(http.StatusOK, models.BaseOKMessageApi)
+
+}
diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go
index cd8340c41..250a08423 100755
--- a/routers/api/v1/repo/cloudbrain.go
+++ b/routers/api/v1/repo/cloudbrain.go
@@ -9,6 +9,7 @@ import (
"bufio"
"encoding/json"
"io"
+ "io/ioutil"
"net/http"
"os"
"path"
@@ -17,6 +18,8 @@ import (
"strings"
"time"
+ "code.gitea.io/gitea/modules/grampus"
+
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
"code.gitea.io/gitea/modules/convert"
@@ -80,9 +83,36 @@ func CloudBrainShow(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)})
}
+func GeneralCloudBrainJobStop(ctx *context.APIContext) {
+ task := ctx.Cloudbrain
+ if task.IsTerminal() {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("cloudbrain.Already_stopped"))
+ return
+ }
+ var err error
+
+ if ctx.Cloudbrain.Type == models.TypeCloudBrainOne {
+ err = cloudbrain.StopJob(task.JobID)
+ } else if ctx.Cloudbrain.Type == models.TypeCloudBrainTwo {
+ _, err = modelarts.StopTrainJob(task.JobID, strconv.FormatInt(task.VersionID, 10))
+ } else {
+ _, err = grampus.StopJob(task.JobID)
+ }
+
+ if err != nil {
+ log.Warn("cloud brain stopped failed.", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("cloudbrain.Stopped_failed"))
+ return
+ }
+
+ ctx.JSON(http.StatusOK, models.BaseOKMessageApi)
+}
func CreateFileNoteBook(ctx *context.APIContext, option api.CreateFileNotebookJobOption) {
cloudbrainTask.FileNotebookCreate(ctx.Context, option)
}
+func FileNoteBookStatus(ctx *context.APIContext, option api.CreateFileNotebookJobOption) {
+ cloudbrainTask.FileNotebookStatus(ctx.Context, option)
+}
func GetFileNoteBookInfo(ctx *context.APIContext) {
//image description spec description waiting count
@@ -211,7 +241,7 @@ func GetCloudbrainTask(ctx *context.APIContext) {
ID := ctx.Params(":id")
- job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID)
+ job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID, "id")
if err != nil {
ctx.NotFound(err)
@@ -621,6 +651,19 @@ func CloudbrainDownloadLogFile(ctx *context.Context) {
}
}
+ existStr := ""
+ if job.JobType == string(models.JobTypeTrain) || job.JobType == string(models.JobTypeInference) {
+ if job.Type == models.TypeCloudBrainOne {
+ result, err := cloudbrain.GetJob(job.JobID)
+ if err == nil && result != nil {
+ jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
+ taskRoles := jobRes.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ existStr = taskRes.TaskStatuses[0].ExitDiagnostics
+ }
+ }
+ }
+
logDir := "/model"
if job.JobType == string(models.JobTypeInference) || job.JobType == string(models.JobTypeModelSafety) {
logDir = cloudbrain.ResultPath
@@ -638,17 +681,30 @@ func CloudbrainDownloadLogFile(ctx *context.Context) {
}
}
if fileName != "" {
- prefix := "/" + setting.CBCodePathPrefix + job.JobName + logDir
- url, err := storage.Attachments.PresignedGetURL(prefix+"/"+fileName, fileName)
+ prefix := "/" + setting.CBCodePathPrefix + job.JobName + "/model"
+ filePath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + prefix + "/" + fileName
+ // Read the file contents into a byte slice
+ data, err := ioutil.ReadFile(filePath)
if err != nil {
- log.Error("Get minio get SignedUrl failed: %v", err.Error(), ctx.Data["msgID"])
+ ctx.ServerError("ReadFile", err)
+ return
+ }
+
+ // Set the appropriate response headers
+ ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
+ ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName)
+
+ // Write the file contents to the response
+ if _, err := ctx.Resp.Write(data); err != nil {
+ ctx.ServerError("Write", err)
+ return
+ }
+ if _, err := ctx.Resp.Write([]byte(existStr)); err != nil {
+ log.Error("Write failed: %v", err.Error(), ctx.Data["msgID"])
return
}
- log.Info("fileName=" + fileName)
- http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusTemporaryRedirect)
} else {
log.Info("fileName is null.")
-
}
}
@@ -734,8 +790,28 @@ func CloudbrainGetLog(ctx *context.APIContext) {
content = result["Content"].(string)
}
- if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
- content = content + ctx.Data["existStr"].(string)
+ if (job.JobType == string(models.JobTypeTrain) || job.JobType == string(models.JobTypeInference)) && job.Type == models.TypeCloudBrainOne && job.Status == string(models.JobFailed) {
+ if ctx.Data["existStr"] != nil {
+ if baseLine == "" && order == "desc" && result["Lines"].(int) == 0 {
+ result["Lines"] = 1
+ result["EndLine"] = 1
+ content = content + ctx.Data["existStr"].(string)
+ }
+
+ if result["Lines"].(int) == 0 && result["StartLine"] == result["EndLine"] && result["StartLine"].(int) != 0 {
+ content = content + ctx.Data["existStr"].(string)
+ result["Lines"] = 1
+ result["StartLine"] = result["StartLine"].(int) - 1
+ }
+ if result["Lines"].(int) == 1 && result["StartLine"] == result["EndLine"] {
+ result["Lines"] = 0
+ result["StartLine"] = result["StartLine"].(int) + 1
+ }
+ }
+ } else {
+ if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
+ content = content + ctx.Data["existStr"].(string)
+ }
}
logFileName := result["FileName"]
diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go
index b9c75e73f..272410110 100755
--- a/routers/api/v1/repo/cloudbrain_dashboard.go
+++ b/routers/api/v1/repo/cloudbrain_dashboard.go
@@ -103,86 +103,62 @@ func GetAllCloudbrainsOverview(ctx *context.Context) {
})
}
func GetOverviewDuration(ctx *context.Context) {
- recordCloudbrain, err := models.GetRecordBeginTime()
- if err != nil {
- log.Error("Can not get recordCloudbrain", err)
- ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err"))
- return
- }
- recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix
- now := time.Now()
- endTime := now
- var workServerNumber int64
- var cardNum int64
+ durationSum := 0
+ cardDurationSum := 0
- durationAllSum := int64(0)
- cardDuSum := int64(0)
+ cloudBrainOneCardDuSum := 0
+ cloudBrainTwoCardDuSum := 0
+ c2NetCardDuSum := 0
+ cDNetCardDuSum := 0
- cloudBrainOneCardDuSum := int64(0)
- cloudBrainTwoCardDuSum := int64(0)
- c2NetCardDuSum := int64(0)
- cDNetCardDuSum := int64(0)
+ cloudBrainOneDuration := 0
+ cloudBrainTwoDuration := 0
+ c2NetDuration := 0
+ cDCenterDuration := 0
- cloudBrainOneDuration := int64(0)
- cloudBrainTwoDuration := int64(0)
- c2NetDuration := int64(0)
- cDCenterDuration := int64(0)
-
- cloudbrains, _, err := models.CloudbrainAllKanBan(&models.CloudbrainsOptions{
- Type: models.TypeCloudBrainAll,
- BeginTimeUnix: int64(recordBeginTime),
- EndTimeUnix: endTime.Unix(),
- })
+ cloudbrainTypeDuration, err := models.GetCloudbrainTypeCardDuration()
if err != nil {
- ctx.ServerError("Get cloudbrains failed:", err)
+ log.Error("GetCloudbrainTypeCardDuration err!", err)
return
}
- models.LoadSpecs4CloudbrainInfo(cloudbrains)
-
- for _, cloudbrain := range cloudbrains {
- cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
- if cloudbrain.Cloudbrain.Spec != nil {
- cardNum = int64(cloudbrain.Cloudbrain.Spec.AccCardsNum)
- } else {
- cardNum = 1
+ for _, result := range cloudbrainTypeDuration {
+ if result.Type == models.TypeCloudBrainOne {
+ cloudBrainOneDuration = result.DurationSum
+ cloudBrainOneCardDuSum = result.CardDurationSum
}
- if cloudbrain.Cloudbrain.WorkServerNumber >= 1 {
- workServerNumber = int64(cloudbrain.Cloudbrain.WorkServerNumber)
- } else {
- workServerNumber = 1
+ if result.Type == models.TypeCloudBrainTwo {
+ cloudBrainTwoDuration = result.DurationSum
+ cloudBrainTwoCardDuSum = result.CardDurationSum
}
- duration := models.ConvertStrToDuration(cloudbrain.TrainJobDuration)
- CardDuration := workServerNumber * int64(cardNum) * duration
-
- if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne {
- cloudBrainOneDuration += duration
- cloudBrainOneCardDuSum += CardDuration
- } else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo {
- cloudBrainTwoDuration += duration
- cloudBrainTwoCardDuSum += CardDuration
- } else if cloudbrain.Cloudbrain.Type == models.TypeC2Net {
- c2NetDuration += duration
- c2NetCardDuSum += CardDuration
- } else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter {
- cDCenterDuration += duration
- cDNetCardDuSum += CardDuration
+ if result.Type == models.TypeC2Net {
+ c2NetDuration = result.DurationSum
+ c2NetCardDuSum = result.CardDurationSum
}
-
- durationAllSum += duration
- cardDuSum += CardDuration
+ if result.Type == models.TypeCDCenter {
+ cDCenterDuration = result.DurationSum
+ cDNetCardDuSum = result.CardDurationSum
+ }
+ }
+ cloudbrainAllDuration, err := models.GetCloudbrainAllCardDuration()
+ if err != nil {
+ log.Error("GetCloudbrainAllCardDuration err!", err)
+ return
}
+ durationSum = cloudbrainAllDuration.DurationSum
+ cardDurationSum = cloudbrainAllDuration.CardDurationSum
+
ctx.JSON(http.StatusOK, map[string]interface{}{
"cloudBrainOneCardDuSum": cloudBrainOneCardDuSum,
"cloudBrainTwoCardDuSum": cloudBrainTwoCardDuSum,
"c2NetCardDuSum": c2NetCardDuSum,
"cDNetCardDuSum": cDNetCardDuSum,
- "cardDuSum": cardDuSum,
+ "cardDuSum": cardDurationSum,
"cloudBrainOneDuration": cloudBrainOneDuration,
"cloudBrainTwoDuration": cloudBrainTwoDuration,
"c2NetDuration": c2NetDuration,
"cDCenterDuration": cDCenterDuration,
- "durationSum": durationAllSum,
+ "durationSum": durationSum,
})
}
@@ -647,7 +623,7 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) {
}
jobTypeList := []string{string(models.JobTypeDebug), string(models.JobTypeTrain), string(models.JobTypeInference), string(models.JobTypeBenchmark),
- string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet)}
+ string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeSnn4Ecoset)}
for _, v := range jobTypeList {
if _, ok := cloudOneJobTypeRes[v]; !ok {
cloudOneJobTypeRes[v] = 0
@@ -780,7 +756,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
var jobTypes []string
jobTypeNot := false
if jobType == string(models.JobTypeBenchmark) {
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeSnn4Ecoset))
} else if jobType != "all" && jobType != "" {
jobTypes = append(jobTypes, jobType)
}
diff --git a/routers/api/v1/repo/images.go b/routers/api/v1/repo/images.go
index f0cb62980..e09ca260a 100644
--- a/routers/api/v1/repo/images.go
+++ b/routers/api/v1/repo/images.go
@@ -88,7 +88,7 @@ func getModelArtsImages(ctx *context.APIContext) {
}
func getC2netNpuImages(ctx *context.APIContext) {
- images, err := grampus.GetImages(grampus.ProcessorTypeNPU)
+ images, err := grampus.GetImages(grampus.ProcessorTypeNPU, string(models.JobTypeTrain))
var npuImageInfos []NPUImageINFO
if err != nil {
log.Error("GetImages failed:", err.Error())
diff --git a/routers/api/v1/repo/mlops.go b/routers/api/v1/repo/mlops.go
index 43969330d..322edc3e5 100644
--- a/routers/api/v1/repo/mlops.go
+++ b/routers/api/v1/repo/mlops.go
@@ -69,3 +69,17 @@ func GetRight(ctx *context.APIContext) {
})
}
+
+func GetCloudBrainJobId(ctx *context.APIContext) {
+ cloudbrains, err := models.GetCloudbrainsByDisplayJobName(ctx.Repo.Repository.ID, ctx.Query("jobType"), ctx.Query("name"))
+ if err != nil {
+ log.Warn("get cloudbrain by display name failed", err)
+ ctx.JSON(http.StatusOK, map[string]string{"jobId": ""})
+ return
+ }
+ if len(cloudbrains) > 0 {
+ ctx.JSON(http.StatusOK, map[string]string{"jobId": cloudbrains[0].JobID})
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]string{"jobId": ""})
+}
diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go
index 127ddd835..a0abab38b 100755
--- a/routers/api/v1/repo/modelarts.go
+++ b/routers/api/v1/repo/modelarts.go
@@ -39,7 +39,7 @@ func GetModelArtsNotebook2(ctx *context.APIContext) {
ID := ctx.Params(":id")
- job,err := cloudbrain.GetCloudBrainByIdOrJobId(ID)
+ job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID, "id")
if err != nil {
ctx.NotFound(err)
diff --git a/routers/api/v1/repo/modelmanage.go b/routers/api/v1/repo/modelmanage.go
index 3989ec56c..696231fea 100644
--- a/routers/api/v1/repo/modelmanage.go
+++ b/routers/api/v1/repo/modelmanage.go
@@ -4,8 +4,10 @@ import (
"net/http"
"code.gitea.io/gitea/modules/context"
+ "code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
+ api "code.gitea.io/gitea/modules/structs"
routerRepo "code.gitea.io/gitea/routers/repo"
)
@@ -43,12 +45,32 @@ func QueryModelById(ctx *context.APIContext) {
routerRepo.QueryModelById(ctx.Context)
}
+func QueryModelByName(ctx *context.APIContext) {
+ log.Info("QueryModelByName by api.")
+ routerRepo.ShowSingleModel(ctx.Context)
+}
+
func QueryModelListForPredict(ctx *context.APIContext) {
log.Info("QueryModelListForPredict by api.")
ctx.Context.SetParams("isOnlyThisRepo", "true")
routerRepo.QueryModelListForPredict(ctx.Context)
}
+func QueryTrainJobList(ctx *context.APIContext) {
+ result, err := routerRepo.QueryTrainJobListApi(ctx.Context)
+ if err != nil {
+ log.Info("query error." + err.Error())
+ ctx.JSON(http.StatusOK, nil)
+ } else {
+ re := make([]*api.Cloudbrain, 0)
+ for _, task := range result {
+ conRe := convert.ToCloudBrain(task)
+ re = append(re, conRe)
+ }
+ ctx.JSON(http.StatusOK, re)
+ }
+}
+
func QueryTrainModelList(ctx *context.APIContext) {
result, err := routerRepo.QueryTrainModelFileById(ctx.Context)
if err != nil {
@@ -58,6 +80,21 @@ func QueryTrainModelList(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, re)
}
+func QueryTrainJobVersionList(ctx *context.APIContext) {
+ result, err := routerRepo.QueryTrainJobVersionListApi(ctx.Context)
+ if err != nil {
+ log.Info("query error." + err.Error())
+ ctx.JSON(http.StatusOK, nil)
+ } else {
+ re := make([]*api.Cloudbrain, 0)
+ for _, task := range result {
+ conRe := convert.ToCloudBrain(task)
+ re = append(re, conRe)
+ }
+ ctx.JSON(http.StatusOK, re)
+ }
+}
+
func convertFileFormat(result []storage.FileInfo) []FileInfo {
re := make([]FileInfo, 0)
if result != nil {
@@ -89,6 +126,11 @@ func CreateModelConvert(ctx *context.APIContext) {
routerRepo.SaveModelConvert(ctx.Context)
}
+func StopModelConvert(ctx *context.APIContext) {
+ log.Info("StopModelConvert by api.")
+ routerRepo.StopModelConvertApi(ctx.Context)
+}
+
func ShowModelConvertPage(ctx *context.APIContext) {
log.Info("ShowModelConvertPage by api.")
modelResult, count, err := routerRepo.GetModelConvertPageData(ctx.Context)
@@ -114,3 +156,12 @@ func QueryModelConvertById(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, nil)
}
}
+
+func QueryModelConvertByName(ctx *context.APIContext) {
+ modelResult, err := routerRepo.GetModelConvertByName(ctx.Context)
+ if err == nil {
+ ctx.JSON(http.StatusOK, modelResult)
+ } else {
+ ctx.JSON(http.StatusOK, nil)
+ }
+}
diff --git a/routers/api/v1/repo/repo_dashbord.go b/routers/api/v1/repo/repo_dashbord.go
index b3a01cff1..e3a54b4fa 100644
--- a/routers/api/v1/repo/repo_dashbord.go
+++ b/routers/api/v1/repo/repo_dashbord.go
@@ -601,7 +601,7 @@ func getSummaryFileName(ctx *context.Context, beginTime time.Time, endTime time.
func allProjectsPeroidHeader(ctx *context.Context) map[string]string {
return map[string]string{"A1": ctx.Tr("admin.repos.id"), "B1": ctx.Tr("admin.repos.projectName"), "C1": ctx.Tr("repo.owner"), "D1": ctx.Tr("admin.repos.isPrivate"), "E1": ctx.Tr("admin.repos.openi"), "F1": ctx.Tr("admin.repos.visit"), "G1": ctx.Tr("admin.repos.download"), "H1": ctx.Tr("admin.repos.pr"), "I1": ctx.Tr("admin.repos.commit"),
- "J1": ctx.Tr("admin.repos.watches"), "K1": ctx.Tr("admin.repos.stars"), "L1": ctx.Tr("admin.repos.forks"), "M1": ctx.Tr("admin.repos.issues"), "N1": ctx.Tr("admin.repos.closedIssues"), "O1": ctx.Tr("admin.repos.contributor"), "P1": ctx.Tr("admin.repos.isFork"), "Q1": ctx.Tr("admin.repos.isMirror"), "R1": ctx.Tr("admin.repos.create")}
+ "J1": ctx.Tr("admin.repos.watches"), "K1": ctx.Tr("admin.repos.stars"), "L1": ctx.Tr("admin.repos.forks"), "M1": ctx.Tr("admin.repos.issues"), "N1": ctx.Tr("admin.repos.closedIssues"), "O1": ctx.Tr("admin.repos.contributor"), "P1": ctx.Tr("admin.repos.numDataset"), "Q1": ctx.Tr("admin.repos.numCloudbrain"), "R1": ctx.Tr("admin.repos.numModel"), "S1": ctx.Tr("admin.repos.numModelConvert"), "T1": ctx.Tr("admin.repos.isFork"), "U1": ctx.Tr("admin.repos.isMirror"), "V1": ctx.Tr("admin.repos.create")}
}
@@ -619,11 +619,13 @@ func allProjectsPeriodSummaryValues(row int, rs *ProjectSummaryBaseData, ctx *co
}
func allProjectsPeroidValues(row int, rs *models.RepoStatistic, ctx *context.Context) map[string]string {
+
return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64),
getCellName("F", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("G", row): strconv.FormatInt(rs.NumDownloads, 10), getCellName("H", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("I", row): strconv.FormatInt(rs.NumCommits, 10),
getCellName("J", row): strconv.FormatInt(rs.NumWatches, 10), getCellName("K", row): strconv.FormatInt(rs.NumStars, 10), getCellName("L", row): strconv.FormatInt(rs.NumForks, 10), getCellName("M", row): strconv.FormatInt(rs.NumIssues, 10),
- getCellName("N", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("O", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("P", row): getBoolDisplay(rs.IsFork, ctx), getCellName("Q", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("R", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT),
+ getCellName("N", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("O", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("P", row): strconv.FormatInt(rs.NumDatasetFile, 10), getCellName("Q", row): strconv.FormatInt(rs.NumCloudbrain, 10), getCellName("R", row): strconv.FormatInt(rs.NumModels, 10), getCellName("S", row): strconv.FormatInt(rs.NumModelConvert, 10), getCellName("T", row): getBoolDisplay(rs.IsFork, ctx), getCellName("U", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("V", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT),
}
+
}
func allProjectsOpenIHeader() map[string]string {
@@ -804,11 +806,11 @@ func generateOpenICountSql(latestDate string) string {
}
func generateTypeAllSql(beginTime time.Time, endTime time.Time, latestDate string, q string, orderBy string, page int, pageSize int) string {
- sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor FROM " +
+ sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor,num_models,num_model_convert,num_cloudbrain,num_dataset_file FROM " +
"(SELECT repo_id,sum(num_visits) as num_visits " +
" FROM repo_statistic where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) +
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + " group by repo_id) A," +
- "(SELECT repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor from public.repo_statistic where date='" + latestDate + "') B" +
+ "(SELECT repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor,num_models,num_model_convert,num_cloudbrain,num_dataset_file from public.repo_statistic where date='" + latestDate + "') B" +
" where A.repo_id=B.repo_id"
if q != "" {
@@ -828,8 +830,8 @@ func generateTypeAllOpenISql(latestDate string, page int, pageSize int) string {
func generatePageSql(beginTime time.Time, endTime time.Time, latestDate string, q string, orderBy string, page int, pageSize int) string {
- sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor FROM " +
- "(SELECT repo_id,sum(num_watches_added) as num_watches,sum(num_visits) as num_visits, sum(num_downloads_added) as num_downloads,sum(num_pulls_added) as num_pulls,sum(num_commits_added) as num_commits,sum(num_stars_added) as num_stars,sum(num_forks_added) num_forks,sum(num_issues_added) as num_issues,sum(num_closed_issues_added) as num_closed_issues,sum(num_contributor_added) as num_contributor " +
+ sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor,num_models,num_model_convert,num_cloudbrain,num_dataset_file FROM " +
+ "(SELECT repo_id,sum(num_watches_added) as num_watches,sum(num_visits) as num_visits, sum(num_downloads_added) as num_downloads,sum(num_pulls_added) as num_pulls,sum(num_commits_added) as num_commits,sum(num_stars_added) as num_stars,sum(num_forks_added) num_forks,sum(num_issues_added) as num_issues,sum(num_closed_issues_added) as num_closed_issues,sum(num_contributor_added) as num_contributor,sum(num_models_added) as num_models,sum(num_model_convert_added) as num_model_convert,sum(num_dataset_file_added) as num_dataset_file, sum(num_cloudbrain_added) as num_cloudbrain " +
" FROM repo_statistic where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) +
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + " group by repo_id) A," +
"(SELECT repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total from public.repo_statistic where date='" + latestDate + "') B" +
diff --git a/routers/api/v1/repo/topic.go b/routers/api/v1/repo/topic.go
index f4ff7a329..d2522c9ce 100644
--- a/routers/api/v1/repo/topic.go
+++ b/routers/api/v1/repo/topic.go
@@ -177,13 +177,25 @@ func AddTopic(ctx *context.APIContext) {
return
}
- _, err = models.AddTopic(ctx.Repo.Repository.ID, topicName)
+ topic, err := models.AddTopic(ctx.Repo.Repository.ID, topicName)
if err != nil {
log.Error("AddTopic failed: %v", err)
ctx.InternalServerError(err)
return
}
-
+ found := false
+ topicNames := make([]string, len(topics))
+ for i, t := range topics {
+ topicNames[i] = t.Name
+ if strings.EqualFold(topic.Name, t.Name) {
+ found = true
+ break
+ }
+ }
+ if !found && topic.Name != "" {
+ topicNames = append(topicNames, topic.Name)
+ }
+ models.UpdateRepoTopics(ctx.Repo.Repository.ID, topicNames)
ctx.Status(http.StatusNoContent)
}
diff --git a/routers/api/v1/user/repo.go b/routers/api/v1/user/repo.go
index 4024bf96c..e9b8c6675 100644
--- a/routers/api/v1/user/repo.go
+++ b/routers/api/v1/user/repo.go
@@ -5,6 +5,7 @@
package user
import (
+ "code.gitea.io/gitea/modules/modelarts"
"net/http"
"code.gitea.io/gitea/models"
@@ -146,3 +147,22 @@ func ListOrgRepos(ctx *context.APIContext) {
listUserRepos(ctx, ctx.Org.Organization, ctx.IsSigned)
}
+
+func GetComputeNodes(ctx *context.APIContext) {
+ taskeType := ctx.QueryInt("type")
+ if taskeType == 2 {
+ ctx.JSON(http.StatusOK, []int{1})
+ } else {
+ modelarts.InitMultiNode()
+ if modelarts.MultiNodeConfig != nil {
+ for _, info := range modelarts.MultiNodeConfig.Info {
+ if isInOrg, _ := models.IsOrganizationMemberByOrgName(info.Org, ctx.User.ID); isInOrg {
+ ctx.JSON(http.StatusOK, info.Node)
+ return
+ }
+ }
+ }
+ ctx.JSON(http.StatusOK, []int{1})
+ }
+
+}
diff --git a/routers/home.go b/routers/home.go
index 092b30fe3..40a41bd68 100755
--- a/routers/home.go
+++ b/routers/home.go
@@ -7,6 +7,7 @@ package routers
import (
"bytes"
+ "code.gitea.io/gitea/routers/response"
"encoding/json"
"net/http"
"strconv"
@@ -43,6 +44,8 @@ const (
tplHomeTerm base.TplName = "terms"
tplHomePrivacy base.TplName = "privacy"
tplResoruceDesc base.TplName = "resource_desc"
+ tplRepoSquare base.TplName = "explore/repos/square"
+ tplRepoSearch base.TplName = "explore/repos/search"
)
// Home render home page
@@ -296,6 +299,109 @@ func ExploreRepos(ctx *context.Context) {
})
}
+func GetRepoSquarePage(ctx *context.Context) {
+ ctx.Data["SquareBanners"] = repository.GetBanners()
+ ctx.Data["SquareTopics"] = repository.GetTopics()
+ ctx.Data["SquareRecommendRepos"] = repository.GetRecommendRepos()
+
+ repos, _ := repository.GetPreferredRepos()
+ ctx.Data["SquarePreferredRepos"] = repos
+ ctx.HTML(200, tplRepoSquare)
+}
+func GetRepoSearchPage(ctx *context.Context) {
+ ctx.Data["SquareTopics"] = repository.GetTopics()
+ ctx.HTML(200, tplRepoSearch)
+}
+
+func RepoSquare(ctx *context.Context) {
+ var result []*models.Repository4Card
+ var err error
+ switch ctx.Query("type") {
+ case "preferred":
+ result, err = repository.GetPreferredRepos()
+ case "incubation":
+ result, err = repository.GetIncubationRepos()
+ case "hot-paper":
+ result, err = repository.GetHotPaperRepos()
+ default:
+ result, err = repository.GetPreferredRepos()
+ }
+ if err != nil {
+ ctx.JSON(http.StatusOK, response.ResponseError(err))
+ return
+ }
+ resultMap := make(map[string]interface{}, 0)
+ resultMap["Repos"] = result
+ ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
+}
+
+func ActiveUser(ctx *context.Context) {
+ var err error
+ var currentUserId int64
+ if ctx.User != nil {
+ currentUserId = ctx.User.ID
+ }
+ result, err := repository.GetActiveUser4Square(currentUserId)
+ if err != nil {
+ log.Error("ActiveUser err. %v", err)
+ ctx.JSON(http.StatusOK, response.Success())
+ return
+ }
+ resultMap := make(map[string]interface{}, 0)
+ resultMap["Users"] = result
+ ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
+}
+func ActiveOrg(ctx *context.Context) {
+ result, err := repository.GetActiveOrgs()
+ if err != nil {
+ log.Error("ActiveOrg err. %v", err)
+ ctx.JSON(http.StatusOK, response.Success())
+ return
+ }
+ resultMap := make(map[string]interface{}, 0)
+ resultMap["Orgs"] = result
+ ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
+}
+
+func RepoFind(ctx *context.Context) {
+ keyword := strings.Trim(ctx.Query("q"), " ")
+ topic := strings.Trim(ctx.Query("topic"), " ")
+ sort := strings.Trim(ctx.Query("sort"), " ")
+ page := ctx.QueryInt("page")
+ pageSize := ctx.QueryInt("pageSize")
+ if pageSize == 0 {
+ pageSize = 15
+ }
+ if pageSize > 100 {
+ ctx.JSON(http.StatusOK, response.ServerError("pageSize illegal"))
+ return
+ }
+ if page <= 0 {
+ page = 1
+ }
+
+ var ownerID int64
+ if ctx.User != nil && !ctx.User.IsAdmin {
+ ownerID = ctx.User.ID
+ }
+
+ result, err := repository.FindRepos(repository.FindReposOptions{
+ ListOptions: models.ListOptions{Page: page, PageSize: pageSize},
+ Actor: ctx.User,
+ Sort: sort,
+ Keyword: keyword,
+ Topic: topic,
+ Private: ctx.User != nil,
+ OwnerID: ownerID,
+ })
+ if err != nil {
+ log.Error("RepoFind error. %v", err)
+ ctx.JSON(http.StatusOK, response.ResponseError(err))
+ return
+ }
+ ctx.JSON(http.StatusOK, response.SuccessWithData(result))
+}
+
func ExploreDatasets(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("explore")
ctx.Data["PageIsExplore"] = true
diff --git a/routers/private/internal.go b/routers/private/internal.go
index 14b0f05de..0b8ae600a 100755
--- a/routers/private/internal.go
+++ b/routers/private/internal.go
@@ -6,6 +6,7 @@
package private
import (
+ "code.gitea.io/gitea/services/repository"
"strings"
"code.gitea.io/gitea/routers/admin"
@@ -55,7 +56,9 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration)
m.Post("/task/history_handle/aicenter", repo.HandleTaskWithAiCenter)
m.Post("/resources/specification/handle_historical_task", admin.RefreshHistorySpec)
+ m.Post("/repos/cnt_stat/handle_historical_task", admin.RefreshHistorySpec)
m.Post("/duration_statisctic/history_handle", repo.CloudbrainUpdateHistoryData)
+ m.Post("/square/repo/stat/refresh", repository.RefreshRepoStatData)
}, CheckInternalToken)
}
diff --git a/routers/repo/ai_model_convert.go b/routers/repo/ai_model_convert.go
index dda410def..36664458e 100644
--- a/routers/repo/ai_model_convert.go
+++ b/routers/repo/ai_model_convert.go
@@ -573,13 +573,10 @@ func deleteCloudBrainTask(task *models.AiModelConvert) {
}
}
-func StopModelConvert(ctx *context.Context) {
- id := ctx.Params(":id")
- log.Info("stop model convert start.id=" + id)
+func stopModelConvert(id string) error {
job, err := models.QueryModelConvertById(id)
if err != nil {
- ctx.ServerError("Not found task.", err)
- return
+ return err
}
if job.IsGpuTrainTask() {
err = cloudbrain.StopJob(job.CloudBrainTaskId)
@@ -600,6 +597,35 @@ func StopModelConvert(ctx *context.Context) {
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateModelConvert failed:", err)
+ return err
+ }
+ return nil
+}
+
+func StopModelConvertApi(ctx *context.Context) {
+ id := ctx.Query("id")
+ log.Info("stop model convert start.id=" + id)
+ err := stopModelConvert(id)
+ if err == nil {
+ ctx.JSON(200, map[string]string{
+ "code": "0",
+ "msg": "succeed",
+ })
+ } else {
+ ctx.JSON(200, map[string]string{
+ "code": "1",
+ "msg": err.Error(),
+ })
+ }
+}
+
+func StopModelConvert(ctx *context.Context) {
+ id := ctx.Params(":id")
+ log.Info("stop model convert start.id=" + id)
+ err := stopModelConvert(id)
+ if err != nil {
+ ctx.ServerError("Not found task.", err)
+ return
}
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelmanage/convert_model")
}
@@ -732,6 +758,11 @@ func GetModelConvertById(ctx *context.Context) (*models.AiModelConvert, error) {
return models.QueryModelConvertById(id)
}
+func GetModelConvertByName(ctx *context.Context) ([]*models.AiModelConvert, error) {
+ name := ctx.Query("name")
+ return models.QueryModelConvertByName(name, ctx.Repo.Repository.ID)
+}
+
func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, int64, error) {
page := ctx.QueryInt("page")
if page <= 0 {
diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go
index 6e6889c32..a074119fc 100644
--- a/routers/repo/ai_model_manage.go
+++ b/routers/repo/ai_model_manage.go
@@ -11,6 +11,8 @@ import (
"regexp"
"strings"
+ "code.gitea.io/gitea/services/repository"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
@@ -170,10 +172,17 @@ func updateStatus(id string, modelSize int64, status int, modelPath string, stat
if len(statusDesc) > 400 {
statusDesc = statusDesc[0:400]
}
+ m, _ := models.QueryModelById(id)
err := models.ModifyModelStatus(id, modelSize, status, modelPath, statusDesc)
if err != nil {
log.Info("update status error." + err.Error())
}
+ if m != nil {
+ if modelSize > 0 && m.Size == 0 {
+ go repository.ResetRepoModelNum(m.RepoId)
+ }
+ }
+
}
func SaveNewNameModel(ctx *context.Context) {
@@ -308,13 +317,14 @@ func getSize(files []storage.FileInfo) int64 {
func UpdateModelSize(modeluuid string) {
model, err := models.QueryModelById(modeluuid)
if err == nil {
+ var size int64
if model.Type == models.TypeCloudBrainOne {
if strings.HasPrefix(model.Path, setting.Attachment.Minio.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, model.Path[len(setting.Attachment.Minio.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from minio. id=" + modeluuid)
}
- size := getSize(files)
+ size = getSize(files)
models.ModifyModelSize(modeluuid, size)
}
} else if model.Type == models.TypeCloudBrainTwo {
@@ -323,10 +333,13 @@ func UpdateModelSize(modeluuid string) {
if err != nil {
log.Info("Failed to query model size from obs. id=" + modeluuid)
}
- size := getSize(files)
+ size = getSize(files)
models.ModifyModelSize(modeluuid, size)
}
}
+ if model.Size == 0 && size > 0 {
+ go repository.ResetRepoModelNum(model.RepoId)
+ }
} else {
log.Info("not found model,uuid=" + modeluuid)
}
@@ -441,13 +454,14 @@ func DeleteModelFile(ctx *context.Context) {
fileName := ctx.Query("fileName")
model, err := models.QueryModelById(id)
if err == nil {
+ var totalSize int64
if model.ModelType == MODEL_LOCAL_TYPE {
if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
objectName := model.Path[len(bucketName)+1:] + fileName
log.Info("delete bucket=" + bucketName + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
- totalSize := storage.MinioGetFilesSize(bucketName, []string{objectName})
+ totalSize = storage.MinioGetFilesSize(bucketName, []string{objectName})
err := storage.Attachments.DeleteDir(objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
@@ -467,7 +481,7 @@ func DeleteModelFile(ctx *context.Context) {
objectName := model.Path[len(setting.Bucket)+1:] + fileName
log.Info("delete bucket=" + setting.Bucket + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
- totalSize := storage.ObsGetFilesSize(bucketName, []string{objectName})
+ totalSize = storage.ObsGetFilesSize(bucketName, []string{objectName})
err := storage.ObsRemoveObject(bucketName, objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
@@ -484,6 +498,9 @@ func DeleteModelFile(ctx *context.Context) {
}
}
}
+ if (model.Size - totalSize) <= 0 {
+ go repository.ResetRepoModelNum(model.RepoId)
+ }
}
ctx.JSON(200, map[string]string{
"code": "0",
@@ -552,6 +569,9 @@ func deleteModelByID(ctx *context.Context, id string) error {
}
}
}
+ if model.Size > 0 {
+ go repository.ResetRepoModelNum(model.RepoId)
+ }
}
}
return err
@@ -691,36 +711,42 @@ func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *con
}
func QueryTrainJobVersionList(ctx *context.Context) {
+
+ VersionListTasks, err := QueryTrainJobVersionListApi(ctx)
+ if err != nil {
+ ctx.JSON(200, nil)
+ } else {
+ ctx.JSON(200, VersionListTasks)
+ }
+}
+
+func QueryTrainJobVersionListApi(ctx *context.Context) ([]*models.Cloudbrain, error) {
log.Info("query train job version list. start.")
JobID := ctx.Query("jobId")
if JobID == "" {
JobID = ctx.Query("JobId")
}
-
VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)
-
log.Info("query return count=" + fmt.Sprint(count))
+ return VersionListTasks, err
+}
+
+func QueryTrainJobList(ctx *context.Context) {
+ VersionListTasks, err := QueryTrainJobListApi(ctx)
if err != nil {
- ctx.ServerError("QueryTrainJobList:", err)
+ ctx.JSON(200, nil)
} else {
ctx.JSON(200, VersionListTasks)
}
}
-func QueryTrainJobList(ctx *context.Context) {
- log.Info("query train job list. start.")
+func QueryTrainJobListApi(ctx *context.Context) ([]*models.Cloudbrain, error) {
repoId := ctx.QueryInt64("repoId")
-
VersionListTasks, count, err := models.QueryModelTrainJobList(repoId)
log.Info("query return count=" + fmt.Sprint(count))
- if err != nil {
- ctx.ServerError("QueryTrainJobList:", err)
- } else {
- ctx.JSON(200, VersionListTasks)
- }
-
+ return VersionListTasks, err
}
func QueryTrainModelFileById(ctx *context.Context) ([]storage.FileInfo, error) {
@@ -1300,19 +1326,25 @@ func QueryModelFileForPredict(ctx *context.Context) {
func QueryModelFileByID(id string) []storage.FileInfo {
model, err := models.QueryModelById(id)
- if err == nil {
- if model.Type == models.TypeCloudBrainTwo {
- prefix := model.Path[len(setting.Bucket)+1:]
- fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
- return fileinfos
- } else if model.Type == models.TypeCloudBrainOne {
- prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
- fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
- return fileinfos
- }
- } else {
+ if err != nil {
log.Error("no such model!", err.Error())
+ return nil
+ }
+ return QueryModelFileByModel(model)
+}
+
+func QueryModelFileByModel(model *models.AiModelManage) []storage.FileInfo {
+
+ if model.Type == models.TypeCloudBrainTwo {
+ prefix := model.Path[len(setting.Bucket)+1:]
+ fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
+ return fileinfos
+ } else if model.Type == models.TypeCloudBrainOne {
+ prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
+ fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
+ return fileinfos
}
+
return nil
}
diff --git a/routers/repo/aisafety.go b/routers/repo/aisafety.go
index 6176fcda5..55f25dba6 100644
--- a/routers/repo/aisafety.go
+++ b/routers/repo/aisafety.go
@@ -847,6 +847,9 @@ func createForGPU(ctx *context.Context, jobName string) error {
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
os.RemoveAll(codePath)
+ gitRepo, _ := git.OpenRepository(repo.RepoPath())
+ commitID, _ := gitRepo.GetBranchCommitID(cloudbrain.DefaultBranchName)
+
if err := downloadCode(repo, codePath, cloudbrain.DefaultBranchName); err != nil {
log.Error("downloadCode failed, %v", err, ctx.Data["MsgID"])
return errors.New("system error")
@@ -891,7 +894,7 @@ func createForGPU(ctx *context.Context, jobName string) error {
BranchName: cloudbrain.DefaultBranchName,
BootFile: BootFile,
Params: Params,
- CommitID: "",
+ CommitID: commitID,
ModelName: modelName,
ModelVersion: modelVersion,
CkptName: CkptName,
diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go
index 240e78acc..e1de71345 100755
--- a/routers/repo/attachment.go
+++ b/routers/repo/attachment.go
@@ -29,6 +29,7 @@ import (
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/upload"
"code.gitea.io/gitea/modules/worker"
+ repo_service "code.gitea.io/gitea/services/repository"
gouuid "github.com/satori/go.uuid"
)
@@ -180,6 +181,7 @@ func DeleteAttachment(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("DeleteAttachment: %v", err))
return
}
+ go repo_service.DecreaseRepoDatasetNum(attach.DatasetID)
attachjson, _ := json.Marshal(attach)
labelmsg.SendDeleteAttachToLabelSys(string(attachjson))
@@ -894,6 +896,7 @@ func CompleteMultipart(ctx *context.Context) {
return
}
attachment.UpdateDatasetUpdateUnix()
+ go repo_service.IncreaseRepoDatasetNum(dataset.ID)
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
if attachment.DatasetID != 0 {
diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index a23cd5462..cf6df6312 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -81,6 +81,7 @@ var (
const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types"
const CLONE_FILE_PREFIX = "file:///"
+const README = "README"
var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0)
@@ -284,7 +285,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
}
var datasetInfos map[string]models.DatasetInfo
var datasetNames string
- //var
+ var attachSize int
if uuids != "" {
datasetInfos, datasetNames, err = models.GetDatasetInfo(uuids)
if err != nil {
@@ -293,6 +294,18 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
return
}
+
+ if jobType == string(models.JobTypeDebug) {
+ for _, infos := range datasetInfos {
+ attachSize += infos.Size
+ }
+ if attachSize > int(setting.DebugAttachSize*1000*1000*1000) {
+ log.Error("The DatasetSize exceeds the limit (%d)", int(setting.DebugAttachSize)) // GB
+ cloudBrainNewDataPrepare(ctx, jobType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.error.debug_datasetsize", int(setting.DebugAttachSize*1000*1000*1000)), tpl, &form)
+ return
+ }
+ }
}
command := cloudbrain.GetCloudbrainDebugCommand()
@@ -373,13 +386,19 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
}
if form.ModelName != "" { //使用预训练模型训练
+ _, err := models.QueryModelByPath(form.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ cloudBrainNewDataPrepare(ctx, jobType)
+ ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form)
+ return
+ }
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
req.ModelVersion = form.ModelVersion
req.PreTrainModelPath = setting.Attachment.Minio.RealPath + form.PreTrainModelUrl
req.PreTrainModelUrl = form.PreTrainModelUrl
-
}
_, err = cloudbrain.GenerateTask(req)
@@ -411,8 +430,13 @@ func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchNa
return "cloudbrain.load_code_failed"
}
+ return initModelPath(jobName, resultPath)
+
+}
+
+func initModelPath(jobName string, resultPath string) string {
modelPath := setting.JobPath + jobName + resultPath + "/"
- err = mkModelPath(modelPath)
+ err := mkModelPath(modelPath)
if err != nil {
return "cloudbrain.load_code_failed"
}
@@ -691,6 +715,17 @@ func CloudBrainRestart(ctx *context.Context) {
break
}
}
+ if !HasModelFile(task) {
+ resultCode = "-1"
+ errorMsg = ctx.Tr("repo.debug.manage.model_not_exist")
+ break
+ }
+
+ if hasDatasetDeleted(task) {
+ resultCode = "-1"
+ errorMsg = ctx.Tr("repo.debug.manage.dataset_not_exist")
+ break
+ }
err = cloudbrain.RestartTask(ctx, task, &ID)
if err != nil {
@@ -712,6 +747,40 @@ func CloudBrainRestart(ctx *context.Context) {
}
+func hasDatasetDeleted(task *models.Cloudbrain) bool {
+ if task.Uuid == "" {
+ return false
+ }
+ uuids := strings.Split(task.Uuid, ";")
+ attachs, _ := models.GetAttachmentsByUUIDs(uuids)
+ return len(attachs) < len(uuids)
+}
+
+func HasModelFile(task *models.Cloudbrain) bool {
+ if task.PreTrainModelUrl == "" {
+ return true
+ }
+
+ model, err := models.QueryModelByPath(task.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ return false
+ }
+
+ fileInfos := QueryModelFileByModel(model)
+ isFind := false
+ if fileInfos != nil {
+ for _, fileInfo := range fileInfos {
+ if fileInfo.FileName == task.CkptName {
+ isFind = true
+ break
+ }
+ }
+
+ }
+ return isFind
+}
+
func getOldJobPath(task *models.Cloudbrain) string {
return setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + task.JobName
}
@@ -786,7 +855,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo
if task.JobType == string(models.JobTypeBenchmark) {
task.BenchmarkType = ctx.Tr("repo.cloudbrain.benchmark.algorithm")
- } else if task.JobType == string(models.JobTypeSnn4imagenet) || task.JobType == string(models.JobTypeBrainScore) {
+ } else if models.IsModelBenchMarkJobType(task.JobType) {
task.BenchmarkType = ctx.Tr("repo.cloudbrain.benchmark.model")
task.BenchmarkTypeName = task.JobType
ctx.Data["BenchmarkTypeName"] = task.JobType
@@ -854,10 +923,13 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo
func CloudBrainDebug(ctx *context.Context) {
task := ctx.Cloudbrain
debugUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName
- if task.BootFile!=""{
- ctx.Redirect(getFileUrl(debugUrl,task.BootFile))
- }else{
+ if ctx.QueryTrim("file") != "" {
+ ctx.Redirect(getFileUrl(debugUrl, ctx.QueryTrim("file")))
+ } else {
+ if task.BootFile != "" {
+ go cloudbrainTask.UploadNotebookFiles(task)
+ }
ctx.Redirect(debugUrl)
}
@@ -1581,6 +1653,21 @@ func CloudBrainDownloadModel(ctx *context.Context) {
ctx.Resp.Header().Set("Cache-Control", "max-age=0")
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently)
}
+
+func CloudBrainDownloadMultiModel(ctx *context.Context) {
+ parentDir := ctx.Query("parentDir")
+ jobName := ctx.Query("jobName")
+ filePath := "jobs/" + jobName + "/model/" + parentDir
+ allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, filePath)
+ if err == nil {
+ returnFileName := jobName + ".zip"
+ MinioDownloadManyFile(filePath, ctx, returnFileName, allFile)
+ } else {
+ log.Info("error,msg=" + err.Error())
+ ctx.ServerError("no file to download.", err)
+ }
+}
+
func CloudBrainDownloadInferenceResult(ctx *context.Context) {
parentDir := ctx.Query("parentDir")
fileName := ctx.Query("fileName")
@@ -1617,6 +1704,8 @@ func GetRate(ctx *context.Context) {
ctx.Redirect(setting.Snn4imagenetServerHost)
} else if job.JobType == string(models.JobTypeBrainScore) {
ctx.Redirect(setting.BrainScoreServerHost)
+ } else if job.JobType == string(models.JobTypeSnn4Ecoset) {
+ ctx.Redirect(setting.Snn4EcosetServerHost)
} else {
log.Error("JobType error:%s", job.JobType, ctx.Data["msgID"])
}
@@ -1758,7 +1847,7 @@ func mkPathAndReadMeFile(path string, text string) error {
return err
}
- fileName := path + "README"
+ fileName := path + README
f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
log.Error("OpenFile failed", err.Error())
@@ -1816,6 +1905,7 @@ func SyncCloudbrainStatus() {
if task.JobType == string(models.JobTypeModelSafety) {
continue
}
+
if task.Type == models.TypeCloudBrainOne {
task, err = cloudbrainTask.SyncCloudBrainOneStatus(task)
@@ -1824,32 +1914,7 @@ func SyncCloudbrainStatus() {
continue
}
- if task.Status != string(models.JobWaiting) {
- if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) {
- log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
- err = cloudbrain.StopJob(task.JobID)
- if err != nil {
- log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
- continue
- }
- oldStatus := task.Status
- task.Status = string(models.JobStopped)
- if task.EndTime == 0 {
- task.EndTime = timeutil.TimeStampNow()
- }
- task.ComputeAndSetDuration()
- if oldStatus != task.Status {
- notification.NotifyChangeCloudbrainStatus(task, oldStatus)
- }
- err = models.UpdateJob(task)
- if err != nil {
- log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
- continue
- }
- }
-
- }
- } else if task.Type == models.TypeCloudBrainTwo {
+ } else if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
if task.JobType == string(models.JobTypeDebug) {
err := modelarts.HandleNotebookInfo(task)
if err != nil {
@@ -1866,48 +1931,77 @@ func SyncCloudbrainStatus() {
log.Error("task.JobType(%s) is error:%s", task.DisplayJobName, task.JobType)
}
} else if task.Type == models.TypeC2Net {
- result, err := grampus.GetJob(task.JobID)
- if err != nil {
- log.Error("GetTrainJob(%s) failed:%v", task.DisplayJobName, err)
- continue
- }
-
- if result != nil {
- if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
- task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ if task.JobType == string(models.JobTypeDebug) {
+ cloudbrainTask.SyncGrampusNotebookStatus(task)
+ } else {
+ result, err := grampus.GetJob(task.JobID)
+ if err != nil {
+ log.Error("GetTrainJob(%s) failed:%v", task.DisplayJobName, err)
+ continue
}
- oldStatus := task.Status
- task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
- task.Duration = result.JobInfo.RunSec
- if task.Duration < 0 {
- task.Duration = 0
- }
- task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+ if result != nil {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
+ task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ }
+ oldStatus := task.Status
+ task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
+ task.Duration = result.JobInfo.RunSec
+
+ if task.Duration < 0 {
+ task.Duration = 0
+ }
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
- if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
- task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
+ task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ }
+ if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
+ task.EndTime = task.StartTime.Add(task.Duration)
+ }
+ task.CorrectCreateUnix()
+ if oldStatus != task.Status {
+ notification.NotifyChangeCloudbrainStatus(task, oldStatus)
+ if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 {
+ urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
+ }
+ }
+ }
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ continue
+ }
}
- if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
- task.EndTime = task.StartTime.Add(task.Duration)
+ }
+ } else {
+ log.Error("task.Type(%s) is error:%d", task.JobName, task.Type)
+ }
+ if task.Status != string(models.JobWaiting) {
+ if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) {
+ log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
+ err = cloudbrainTask.StopDebugJob(task)
+ if err != nil {
+ log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
+ continue
}
- task.CorrectCreateUnix()
+ oldStatus := task.Status
+ task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
- if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
- if len(result.JobInfo.Tasks[0].CenterID) == 1 {
- urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
- }
- }
}
err = models.UpdateJob(task)
if err != nil {
- log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
}
- } else {
- log.Error("task.Type(%s) is error:%d", task.JobName, task.Type)
+
}
}
@@ -2084,7 +2178,7 @@ func CloudBrainBenchmarkIndex(ctx *context.Context) {
}
var jobTypes []string
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeModelSafety))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeSnn4Ecoset), string(models.JobTypeModelSafety))
ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{
ListOptions: models.ListOptions{
Page: page,
@@ -2117,14 +2211,16 @@ func CloudBrainBenchmarkIndex(ctx *context.Context) {
ciTasks[i].BenchmarkTypeName = ""
if ciTasks[i].JobType == string(models.JobTypeBenchmark) {
ciTasks[i].BenchmarkType = ctx.Tr("repo.cloudbrain.benchmark.algorithm")
- } else if ciTasks[i].JobType == string(models.JobTypeSnn4imagenet) || ciTasks[i].JobType == string(models.JobTypeBrainScore) {
+ } else if models.IsModelBenchMarkJobType(ciTasks[i].JobType) {
ciTasks[i].BenchmarkType = ctx.Tr("repo.cloudbrain.benchmark.model")
ciTasks[i].BenchmarkTypeName = ciTasks[i].JobType
if ciTasks[i].JobType == string(models.JobTypeSnn4imagenet) {
ciTasks[i].BenchmarkTypeRankLink = setting.Snn4imagenetServerHost
- } else {
+ } else if ciTasks[i].JobType == string(models.JobTypeBrainScore) {
ciTasks[i].BenchmarkTypeRankLink = setting.BrainScoreServerHost
+ } else {
+ ciTasks[i].BenchmarkTypeRankLink = setting.Snn4EcosetServerHost
}
}
@@ -2474,7 +2570,6 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
displayJobName := form.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
image := form.Image
- uuid := form.Attachment
jobType := form.JobType
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
branchName := cloudbrain.DefaultBranchName
@@ -2516,7 +2611,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
return
}
- if jobType != string(models.JobTypeSnn4imagenet) && jobType != string(models.JobTypeBrainScore) {
+ if !models.IsModelBenchMarkJobType(jobType) {
log.Error("jobtype error:", jobType, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr("jobtype error", tpl, &form)
@@ -2545,29 +2640,41 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
mkModelPath(modelPath)
uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/")
- snn4imagenetPath := setting.JobPath + jobName + cloudbrain.Snn4imagenetMountPath
+ benchmarkPath := setting.JobPath + jobName + cloudbrain.BenchMarkMountPath
if setting.IsSnn4imagenetEnabled && jobType == string(models.JobTypeSnn4imagenet) {
- downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "", ctx.User.Name)
- uploadCodeToMinio(snn4imagenetPath+"/", jobName, cloudbrain.Snn4imagenetMountPath+"/")
- command = fmt.Sprintf(cloudbrain.Snn4imagenetCommand, displayJobName, trimSpaceNewlineInString(form.Description))
+ downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, benchmarkPath, "", "", ctx.User.Name)
+ uploadCodeToMinio(benchmarkPath+"/", jobName, cloudbrain.BenchMarkMountPath+"/")
+ command = fmt.Sprintf(cloudbrain.Snn4imagenetCommand, displayJobName, form.CkptName, trimSpaceNewlineInString(form.Description))
}
benchmarkChildTypeID := 0
- brainScorePath := setting.JobPath + jobName + cloudbrain.BrainScoreMountPath
+
if setting.IsBrainScoreEnabled && jobType == string(models.JobTypeBrainScore) {
- downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "", ctx.User.Name)
- uploadCodeToMinio(brainScorePath+"/", jobName, cloudbrain.BrainScoreMountPath+"/")
+ downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, benchmarkPath, "", "", ctx.User.Name)
+ uploadCodeToMinio(benchmarkPath+"/", jobName, cloudbrain.BenchMarkMountPath+"/")
benchmarkChildTypeID = form.BenchmarkChildTypeID
- command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, trimSpaceNewlineInString(form.Description))
+ command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, form.CkptName, trimSpaceNewlineInString(form.Description))
}
+ var uuid string
+ var datasetInfos map[string]models.DatasetInfo
+ var datasetNames string
+ if setting.IsSnn4EcosetEnabled && jobType == string(models.JobTypeSnn4Ecoset) {
+ downloadRateCode(repo, jobName, setting.Snn4EcosetOwner, setting.Snn4EcosetName, benchmarkPath, "", "", ctx.User.Name)
+ uploadCodeToMinio(benchmarkPath+"/", jobName, cloudbrain.BenchMarkMountPath+"/")
+ command = fmt.Sprintf(cloudbrain.Snn4EcosetCommand, displayJobName, form.CkptName, trimSpaceNewlineInString(form.Description))
+
+ attachment, err := getEcosetAttachment()
+ if err != nil {
+ log.Error("load benchmark code failed", err)
+ cloudBrainNewDataPrepare(ctx, jobType)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain.morethanonejob"), tpl, &form)
+ return
+ }
+ uuid = attachment.UUID
+ datasetInfos, datasetNames, _ = models.GetDatasetInfo(uuid)
- datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid)
- if err != nil {
- log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
- cloudBrainNewDataPrepare(ctx, jobType)
- ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
- return
}
+
spec, err := resource.GetAndCheckSpec(ctx.User.ID, form.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeBenchmark,
ComputeResource: models.GPU,
@@ -2599,8 +2706,6 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
ModelPath: storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"),
BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"),
- Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
- BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"),
JobType: jobType,
Description: form.Description,
BranchName: branchName,
@@ -2612,6 +2717,14 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"),
Spec: spec,
}
+ if form.ModelName != "" {
+ req.ModelName = form.ModelName
+ req.LabelName = form.LabelName
+ req.CkptName = form.CkptName
+ req.ModelVersion = form.ModelVersion
+ req.PreTrainModelPath = setting.Attachment.Minio.RealPath + form.PreTrainModelUrl
+ req.PreTrainModelUrl = form.PreTrainModelUrl
+ }
_, err = cloudbrain.GenerateTask(req)
if err != nil {
@@ -2623,6 +2736,21 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/cloudbrain/benchmark")
}
+func getEcosetAttachment() (*models.Attachment, error) {
+ ecosetRepo, err := models.GetRepositoryByOwnerAndName(setting.Snn4EcosetOwner, setting.Snn4EcosetName)
+ if err != nil {
+ return nil, err
+ }
+
+ datasetInfo, err := models.GetDatasetByRepo(ecosetRepo)
+ if err != nil {
+ return nil, err
+ }
+
+ return models.GetAttachmentByDatasetIdFileName(setting.Snn4AttachmentName, datasetInfo.ID)
+
+}
+
func getBrainRegion(benchmarkChildTypeID int) string {
values := []string{"V1", "V2", "V4", "IT"}
return values[benchmarkChildTypeID]
@@ -2683,18 +2811,24 @@ func InferenceCloudBrainJobShow(ctx *context.Context) {
cloudBrainShow(ctx, tplCloudBrainInferenceJobShow, models.JobTypeInference)
}
-func DownloadInferenceResultFile(ctx *context.Context) {
+func DownloadGPUInferenceResultFile(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
- var versionName = ctx.Query("version_name")
- task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName)
+ task, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID(%s) failed:%v", task.JobName, err.Error())
return
}
-
- allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, task.ResultUrl)
- returnFileName := task.DisplayJobName + ".zip"
- MinioDownloadManyFile(task.ResultUrl, ctx, returnFileName, allFile)
+ parentDir := ctx.Query("parentDir")
+ filePath := "jobs/" + task.JobName + "/result/" + parentDir
+ log.Info("prefix=" + filePath)
+ allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, filePath)
+ if err == nil {
+ returnFileName := task.DisplayJobName + ".zip"
+ MinioDownloadManyFile(filePath, ctx, returnFileName, allFile)
+ } else {
+ log.Info("error,msg=" + err.Error())
+ ctx.ServerError("no file to download.", err)
+ }
}
func getInferenceJobCommand(form auth.CreateCloudBrainInferencForm) (string, error) {
diff --git a/routers/repo/cloudbrain_statistic.go b/routers/repo/cloudbrain_statistic.go
index 6ff377491..43c1ab1a4 100644
--- a/routers/repo/cloudbrain_statistic.go
+++ b/routers/repo/cloudbrain_statistic.go
@@ -29,17 +29,16 @@ func CloudbrainDurationStatisticHour() {
statisticTime = currentTime
}
- err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(statisticTime.Add(-1*time.Hour).Unix()), timeutil.TimeStamp(currentTime.Unix()))
+ err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(statisticTime.Unix()), timeutil.TimeStamp(currentTime.Unix()))
if err != nil {
log.Error("DeleteCloudbrainDurationStatistic failed", err)
}
-
+ statisticTime = statisticTime.Add(+1 * time.Hour)
for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) {
countEach := summaryDurationStat(statisticTime)
count += countEach
statisticTime = statisticTime.Add(+1 * time.Hour)
}
- log.Info("summaryDurationStat count: %v", count)
}
}
func UpdateDurationStatisticHistoryData(beginTime time.Time, endTime time.Time) int64 {
@@ -65,15 +64,18 @@ func summaryDurationStat(statisticTime time.Time) int64 {
ciTasks, err := models.GetCloudbrainByTime(beginTime, endTime)
if err != nil {
- log.Info("GetCloudbrainByTime err: %v", err)
+ log.Error("GetCloudbrainByTime err: %v", err)
return 0
}
- models.LoadSpecs4CloudbrainInfo(ciTasks)
- cloudBrainCenterCodeAndCardTypeInfo, cloudbrainMap := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime)
+ err = models.LoadSpecs4CloudbrainInfo(ciTasks)
+ if err != nil {
+ log.Error("LoadSpecs4CloudbrainInfo err: %v", err)
+ }
+ cloudBrainCenterCodeAndCardTypeInfo := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, int(beginTime), int(endTime))
resourceQueues, err := models.GetCanUseCardInfo()
if err != nil {
- log.Info("GetCanUseCardInfo err: %v", err)
+ log.Error("GetCanUseCardInfo err: %v", err)
return 0
}
@@ -85,56 +87,45 @@ func summaryDurationStat(statisticTime time.Time) int64 {
cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType] += resourceQueue.CardsTotalNum * 1 * 60 * 60
}
}
-
- for centerCode, CardTypes := range cloudBrainCenterCodeAndCardTypeInfo {
- for cardType, cardDuration := range CardTypes {
- cloudbrainTable := cloudbrainMap[centerCode+"/"+cardType]
- if cloudbrainTable != nil {
- if _, ok := cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType]; !ok {
- cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType] = 0
- }
- cloudbrainDurationStat := models.CloudbrainDurationStatistic{
- DateTimeUnix: dateTimeUnix,
- DayTime: dayTime,
- HourTime: hourTime,
- Cluster: cloudbrainTable.Cluster,
- AiCenterName: GetAiCenterNameByCode(centerCode, "zh-CN"),
- AiCenterCode: centerCode,
- AccCardType: cardType,
- CardsUseDuration: cardDuration,
- CardsTotalDuration: cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType],
- CreatedUnix: timeutil.TimeStampNow(),
- }
- if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
- log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
- }
- count++
- delete(cardsTotalDurationMap, cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType)
- }
- }
- }
-
for key, cardsTotalDuration := range cardsTotalDurationMap {
- cloudbrainDurationStat := models.CloudbrainDurationStatistic{
- DateTimeUnix: dateTimeUnix,
- DayTime: dayTime,
- HourTime: hourTime,
- Cluster: strings.Split(key, "/")[0],
- AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
- AiCenterCode: strings.Split(key, "/")[1],
- AccCardType: strings.Split(key, "/")[2],
- CardsUseDuration: 0,
- CardsTotalDuration: cardsTotalDuration,
- CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
- CreatedUnix: timeutil.TimeStampNow(),
- }
- if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
- log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
+ if _, ok := cloudBrainCenterCodeAndCardTypeInfo[strings.Split(key, "/")[0]+"/"+strings.Split(key, "/")[1]][strings.Split(key, "/")[2]]; ok {
+ cloudbrainDurationStat := models.CloudbrainDurationStatistic{
+ DateTimeUnix: dateTimeUnix,
+ DayTime: dayTime,
+ HourTime: hourTime,
+ Cluster: strings.Split(key, "/")[0],
+ AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
+ AiCenterCode: strings.Split(key, "/")[1],
+ AccCardType: strings.Split(key, "/")[2],
+ CardsUseDuration: cloudBrainCenterCodeAndCardTypeInfo[strings.Split(key, "/")[0]+"/"+strings.Split(key, "/")[1]][strings.Split(key, "/")[2]],
+ CardsTotalDuration: cardsTotalDuration,
+ CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
+ log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
+ }
+ count++
+ } else {
+ cloudbrainDurationStat := models.CloudbrainDurationStatistic{
+ DateTimeUnix: dateTimeUnix,
+ DayTime: dayTime,
+ HourTime: hourTime,
+ Cluster: strings.Split(key, "/")[0],
+ AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
+ AiCenterCode: strings.Split(key, "/")[1],
+ AccCardType: strings.Split(key, "/")[2],
+ CardsUseDuration: 0,
+ CardsTotalDuration: cardsTotalDuration,
+ CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
+ log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
+ }
+ count++
}
- count++
}
-
- log.Info("finish summary cloudbrainDurationStat")
return count
}
@@ -153,33 +144,21 @@ func GetAiCenterNameByCode(centerCode string, language string) string {
return aiCenterName
}
-func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) (map[string]map[string]int, map[string]*models.Cloudbrain) {
+func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, hourBeginTime int, hourEndTime int) map[string]map[string]int {
var WorkServerNumber int
var AccCardsNum int
- cloudbrainMap := make(map[string]*models.Cloudbrain)
cloudBrainCenterCodeAndCardType := make(map[string]map[string]int)
for _, cloudbrain := range ciTasks {
- if cloudbrain.Cloudbrain.StartTime == 0 {
- cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix
- }
- if cloudbrain.Cloudbrain.EndTime == 0 {
- cloudbrain.Cloudbrain.EndTime = timeutil.TimeStamp(time.Now().Unix())
- }
- cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
- if cloudbrain.Cloudbrain.Spec != nil {
- if _, ok := cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
- if cloudbrain.Cloudbrain.Spec != nil {
- cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = &cloudbrain.Cloudbrain
- }
- }
- }
-
cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
if cloudbrain.Cloudbrain.StartTime == 0 {
cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix
}
if cloudbrain.Cloudbrain.EndTime == 0 {
- cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.UpdatedUnix
+ if cloudbrain.Cloudbrain.Status == string(models.JobRunning) {
+ cloudbrain.Cloudbrain.EndTime = timeutil.TimeStamp(time.Now().Unix())
+ } else {
+ cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.StartTime + timeutil.TimeStamp(cloudbrain.Cloudbrain.Duration)
+ }
}
if cloudbrain.Cloudbrain.WorkServerNumber >= 1 {
WorkServerNumber = cloudbrain.Cloudbrain.WorkServerNumber
@@ -191,55 +170,36 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be
} else {
AccCardsNum = cloudbrain.Cloudbrain.Spec.AccCardsNum
}
- if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter]; !ok {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter] = make(map[string]int)
+ if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter]; !ok {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter] = make(map[string]int)
}
+ taskStartTime := int(cloudbrain.Cloudbrain.StartTime)
+ taskEndTime := int(cloudbrain.Cloudbrain.EndTime)
if cloudbrain.Cloudbrain.Spec != nil {
- if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) && cloudbrain.Cloudbrain.DeletedAt.IsZero() {
- if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
- if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = 0
- }
- } else {
- if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += 0
- }
+ if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
+ if taskStartTime < hourBeginTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (taskEndTime - hourBeginTime)
+ } else if taskStartTime < hourBeginTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (hourEndTime - hourBeginTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (taskEndTime - taskStartTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (hourEndTime - taskStartTime)
}
} else {
- if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
- if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- }
- } else {
- if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- }
+ if taskStartTime < hourBeginTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (taskEndTime - hourBeginTime)
+ } else if taskStartTime < hourBeginTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (hourEndTime - hourBeginTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (taskEndTime - taskStartTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (hourEndTime - taskStartTime)
}
}
}
}
-
- return cloudBrainCenterCodeAndCardType, cloudbrainMap
+ return cloudBrainCenterCodeAndCardType
}
func CloudbrainUpdateHistoryData(ctx *context.Context) {
diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go
index 1f445492d..32c884b5e 100755
--- a/routers/repo/dataset.go
+++ b/routers/repo/dataset.go
@@ -373,6 +373,7 @@ func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) {
}
data, err := json.Marshal(datasets)
+ log.Info("datakey", string(data))
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go
index 8f3182758..b3f84c169 100755
--- a/routers/repo/grampus.go
+++ b/routers/repo/grampus.go
@@ -44,14 +44,37 @@ import (
const (
tplGrampusTrainJobShow base.TplName = "repo/grampus/trainjob/show"
+ tplGrampusNotebookShow base.TplName = "repo/grampus/notebook/show"
//GPU
+ tplGrampusNotebookGPUNew base.TplName = "repo/grampus/notebook/gpu/new"
tplGrampusTrainJobGPUNew base.TplName = "repo/grampus/trainjob/gpu/new"
//NPU
+ tplGrampusNotebookNPUNew base.TplName = "repo/grampus/notebook/npu/new"
tplGrampusTrainJobNPUNew base.TplName = "repo/grampus/trainjob/npu/new"
)
+func GrampusNotebookNew(ctx *context.Context) {
+ ctx.Data["IsCreate"] = true
+ notebookType := ctx.QueryInt("type")
+ processType := grampus.ProcessorTypeGPU
+ if notebookType == 1 {
+ processType = grampus.ProcessorTypeNPU
+ }
+ err := grampusNotebookNewDataPrepare(ctx, processType)
+ if err != nil {
+ ctx.ServerError("get new notebook-job info failed", err)
+ return
+ }
+ if processType == grampus.ProcessorTypeGPU {
+ ctx.HTML(http.StatusOK, tplGrampusNotebookGPUNew)
+ } else {
+ ctx.HTML(http.StatusOK, tplGrampusNotebookNPUNew)
+ }
+
+}
+
func GrampusTrainJobGPUNew(ctx *context.Context) {
ctx.Data["IsCreate"] = true
err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -72,57 +95,262 @@ func GrampusTrainJobNPUNew(ctx *context.Context) {
}
ctx.HTML(200, tplGrampusTrainJobNPUNew)
}
+func GrampusNotebookCreate(ctx *context.Context, form auth.CreateGrampusNotebookForm) {
+ ctx.Data["IsCreate"] = true
+ displayJobName := form.DisplayJobName
+ jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
+ uuid := form.Attachment
+ description := form.Description
+ repo := ctx.Repo.Repository
+ branchName := form.BranchName
+ image := strings.TrimSpace(form.Image)
-func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) error {
+ codeStoragePath := setting.CBCodePathPrefix + jobName + cloudbrain.CodeMountPath + "/"
+
+ tpl := tplGrampusNotebookGPUNew
+ processType := grampus.ProcessorTypeGPU
+ computeSource := models.GPUResource
+ computeSourceSimple := models.GPU
+ if form.Type == 1 {
+ tpl = tplGrampusNotebookNPUNew
+ processType = grampus.ProcessorTypeNPU
+ computeSource = models.NPUResource
+ computeSourceSimple = models.NPU
+ codeStoragePath = grampus.JobPath + jobName + modelarts.CodePath
+ }
+
+ lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeDebug), displayJobName))
+ defer lock.UnLock()
+ isOk, err := lock.Lock(models.CloudbrainKeyDuration)
+ if !isOk {
+ log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tpl, &form)
+ return
+ }
+
+ if !jobNamePattern.MatchString(displayJobName) {
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tpl, &form)
+ return
+ }
+
+ //check count limit
+ count, err := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeC2Net, string(models.JobTypeDebug), computeSource)
+ if err != nil {
+ log.Error("GetGrampusCountByUserID failed:%v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("system error", tpl, &form)
+ return
+ } else {
+ if count >= 1 {
+ log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("you have already a running or waiting task, can not create more", tpl, &form)
+ return
+ }
+ }
+
+ //check whether the task name in the project is duplicated
+ tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeDebug), displayJobName)
+ if err == nil {
+ if len(tasks) != 0 {
+ log.Error("the job name did already exist", ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("the job name did already exist", tpl, &form)
+ return
+ }
+ } else {
+ if !models.IsErrJobNotExist(err) {
+ log.Error("system error, %v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("system error", tpl, &form)
+ return
+ }
+ }
+
+ //check specification
+ spec, err := resource.GetAndCheckSpec(ctx.User.ID, form.SpecId, models.FindSpecsOptions{
+ JobType: models.JobTypeDebug,
+ ComputeResource: computeSourceSimple,
+ Cluster: models.C2NetCluster,
+ })
+ if err != nil || spec == nil {
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("Resource specification not available", tpl, &form)
+ return
+ }
+
+ if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
+ log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tpl, &form)
+ return
+ }
+
+ var datasetInfos map[string]models.DatasetInfo
+ var datasetNames string
+ //var
+ if uuid != "" {
+ datasetInfos, datasetNames, err = models.GetDatasetInfo(uuid, computeSourceSimple)
+ if err != nil {
+ log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
+ return
+ }
+ }
+
+ //prepare code and out path
+ codeLocalPath := setting.JobPath + jobName + cloudbrain.CodeMountPath + "/"
+ _, err = ioutil.ReadDir(codeLocalPath)
+ if err == nil {
+ os.RemoveAll(codeLocalPath)
+ }
+
+ if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil {
+ log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
+ return
+ }
+
+ if processType == grampus.ProcessorTypeGPU {
+ if err := uploadCodeToMinio(codeLocalPath+"/", jobName, cloudbrain.CodeMountPath+"/"); err != nil {
+ log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
+ return
+ }
+
+ } else {
+
+ if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
+ log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
+ return
+ }
+ }
+
+ commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)
+
+ req := &grampus.GenerateNotebookJobReq{
+ JobName: jobName,
+ DisplayJobName: displayJobName,
+ ComputeResource: computeSource,
+ ProcessType: processType,
+ ImageUrl: image,
+ ImageId: form.ImageID,
+ Description: description,
+ Uuid: uuid,
+ CommitID: commitID,
+ BranchName: branchName,
+ DatasetNames: datasetNames,
+ DatasetInfos: datasetInfos,
+ Spec: spec,
+ CodeStoragePath: codeStoragePath,
+ CodeName: strings.ToLower(repo.Name),
+ }
+
+ if form.ModelName != "" { //使用预训练模型训练
+
+ _, err := models.QueryModelByPath(form.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form)
+ return
+ }
+ req.ModelName = form.ModelName
+ req.LabelName = form.LabelName
+ req.CkptName = form.CkptName
+ req.ModelVersion = form.ModelVersion
+ req.PreTrainModelUrl = form.PreTrainModelUrl
+ req.PreTrainModelPath = getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName)
+
+ }
+
+ _, err = grampus.GenerateNotebookJob(ctx, req)
+ if err != nil {
+ log.Error("GenerateNotebookJob failed:%v", err.Error(), ctx.Data["MsgID"])
+ grampusTrainJobNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(err.Error(), tpl, &form)
+ return
+ }
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=all")
+}
+func grampusNotebookNewDataPrepare(ctx *context.Context, processType string) error {
ctx.Data["PageIsCloudBrain"] = true
var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name)
ctx.Data["display_job_name"] = displayJobName
//get valid images
- images, err := grampus.GetImages(processType)
+ if processType == grampus.ProcessorTypeNPU {
+ images, err := grampus.GetImages(processType, string(models.JobTypeDebug))
+ if err != nil {
+ log.Error("GetImages failed:", err.Error())
+ } else {
+ ctx.Data["images"] = images.Infos
+ }
+ }
+ //prepare available specs
+ computeResourceSimple := models.GPU
+ datasetType := models.TypeCloudBrainOne
+ computeResource := models.GPUResource
+ if processType == grampus.ProcessorTypeNPU {
+ computeResourceSimple = models.NPU
+ datasetType = models.TypeCloudBrainTwo
+ computeResource = models.NPUResource
+ }
+
+ prepareGrampusSpecs(ctx, computeResourceSimple, models.JobTypeDebug)
+
+ //get branches
+ branches, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
- log.Error("GetImages failed:", err.Error())
+ log.Error("GetBranches error:", err.Error())
} else {
- ctx.Data["images"] = images.Infos
+ ctx.Data["branches"] = branches
}
- grampus.InitSpecialPool()
+ ctx.Data["branchName"] = ctx.Repo.BranchName
- ctx.Data["GPUEnabled"] = true
- ctx.Data["NPUEnabled"] = true
- includeCenters := make(map[string]struct{})
- excludeCenters := make(map[string]struct{})
- if grampus.SpecialPools != nil {
- for _, pool := range grampus.SpecialPools.Pools {
- if pool.IsExclusive {
- if !IsUserInOrgPool(ctx.User.ID, pool) {
- ctx.Data[pool.Type+"Enabled"] = false
- }
- } else {
- if strings.Contains(strings.ToLower(processType), strings.ToLower(pool.Type)) {
- if IsUserInOrgPool(ctx.User.ID, pool) {
- for _, center := range pool.Pool {
- includeCenters[center.Queue] = struct{}{}
- }
- } else {
- for _, center := range pool.Pool {
- excludeCenters[center.Queue] = struct{}{}
- }
+ ctx.Data["datasetType"] = datasetType
+ waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeC2Net, computeResource, models.JobTypeDebug)
+ ctx.Data["WaitCount"] = waitCount
+ NotStopTaskCount, _ := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeC2Net, string(models.JobTypeDebug), computeResource)
+ ctx.Data["NotStopTaskCount"] = NotStopTaskCount
- }
+ ctx.Data["code_path"] = cloudbrain.CodeMountPath
+ ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath
+ ctx.Data["model_path"] = cloudbrain.ModelMountPath
- }
+ return nil
+}
- }
+func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) error {
+ ctx.Data["PageIsCloudBrain"] = true
+
+ var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name)
+ ctx.Data["display_job_name"] = displayJobName
+
+ //get valid images
+ if processType == grampus.ProcessorTypeNPU {
+ images, err := grampus.GetImages(processType, string(models.JobTypeTrain))
+ if err != nil {
+ log.Error("GetImages failed:", err.Error())
+ } else {
+ ctx.Data["images"] = images.Infos
}
}
//prepare available specs
if processType == grampus.ProcessorTypeNPU {
- prepareGrampusTrainSpecs(ctx, models.NPU)
+ prepareGrampusSpecs(ctx, models.NPU)
} else if processType == grampus.ProcessorTypeGPU {
- prepareGrampusTrainSpecs(ctx, models.GPU)
+ prepareGrampusSpecs(ctx, models.GPU)
}
//get branches
@@ -201,55 +429,19 @@ func GrampusTrainJobVersionNew(ctx *context.Context) {
}
}
-func prepareGrampusTrainSpecs(ctx *context.Context, computeResource string) {
+func prepareGrampusSpecs(ctx *context.Context, computeResource string, jobType ...models.JobType) {
+ tempJobType := models.JobTypeTrain
+ if len(jobType) > 0 {
+ tempJobType = jobType[0]
+ }
noteBookSpecs, _ := resource.FindAvailableSpecs(ctx.User.ID, models.FindSpecsOptions{
- JobType: models.JobTypeTrain,
+ JobType: tempJobType,
ComputeResource: computeResource,
Cluster: models.C2NetCluster,
})
ctx.Data["Specs"] = noteBookSpecs
}
-func getFilterSpecBySpecialPool(specs *models.GetGrampusResourceSpecsResult, includeCenters map[string]struct{}, excludeCenters map[string]struct{}) []models.GrampusSpec {
- if len(includeCenters) == 0 && len(excludeCenters) == 0 {
- return specs.Infos
- }
- var grampusSpecs []models.GrampusSpec
- for _, info := range specs.Infos {
- if isInIncludeCenters(info, includeCenters) || (len(excludeCenters) != 0 && isNotAllInExcludeCenters(info, excludeCenters)) {
- grampusSpecs = append(grampusSpecs, info)
- }
-
- }
- return grampusSpecs
-}
-
-func isInIncludeCenters(grampusSpec models.GrampusSpec, centers map[string]struct{}) bool {
- for _, center := range grampusSpec.Centers {
- if _, ok := centers[center.ID]; ok {
- return true
- }
- }
- return false
-}
-func isNotAllInExcludeCenters(grampusSpec models.GrampusSpec, centers map[string]struct{}) bool {
- for _, center := range grampusSpec.Centers {
- if _, ok := centers[center.ID]; !ok {
- return true
- }
- }
- return false
-}
-
-func IsUserInOrgPool(userId int64, pool *models.SpecialPool) bool {
- org, _ := models.GetOrgByName(pool.Org)
- if org != nil {
- isOrgMember, _ := models.IsOrganizationMember(org.ID, userId)
- return isOrgMember
- }
- return false
-}
-
func grampusParamCheckCreateTrainJob(form auth.CreateGrampusTrainJobForm) error {
if !strings.HasSuffix(strings.TrimSpace(form.BootFile), ".py") {
log.Error("the boot file(%s) must be a python file", form.BootFile)
@@ -721,30 +913,64 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job")
}
+func GetGrampusNotebook(ctx *context.APIContext) {
+ var (
+ err error
+ )
+
+ ID := ctx.Params(":id")
+ job, err := models.GetCloudbrainByID(ID)
+ if err != nil {
+ ctx.NotFound("", err)
+ log.Error("GetCloudbrainByID failed:", err)
+ return
+ }
+
+ jobAfter, err := cloudbrainTask.SyncGrampusNotebookStatus(job)
+
+ aiCenterName := cloudbrainService.GetAiCenterShow(jobAfter.AiCenter, ctx.Context)
+
+ if err != nil {
+ ctx.NotFound(err)
+ log.Error("Sync cloud brain one status failed:", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "ID": ID,
+ "JobName": jobAfter.JobName,
+ "JobStatus": jobAfter.Status,
+ "AiCenter": aiCenterName,
+ "CreatedTime": jobAfter.CreatedUnix.Format("2006-01-02 15:04:05"),
+ "CompletedTime": jobAfter.UpdatedUnix.Format("2006-01-02 15:04:05"),
+ "JobDuration": jobAfter.TrainJobDuration,
+ })
+}
+
func GrampusStopJob(ctx *context.Context) {
- var ID = ctx.Params(":jobid")
+ var ID = ctx.Params(":id")
var resultCode = "0"
var errorMsg = ""
var status = ""
task := ctx.Cloudbrain
for {
- if task.Status == string(models.GrampusStatusStopped) || task.Status == string(models.GrampusStatusFailed) || task.Status == string(models.GrampusStatusSucceeded) {
+ if task.Status == models.GrampusStatusStopped || task.Status == models.GrampusStatusFailed || task.Status == models.GrampusStatusSucceeded {
log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"])
resultCode = "-1"
- errorMsg = "system error"
+ errorMsg = ctx.Tr("cloudbrain.Already_stopped")
break
}
- res, err := grampus.StopJob(task.JobID)
+ res, err := grampus.StopJob(task.JobID, task.JobType)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
resultCode = strconv.Itoa(res.ErrorCode)
- errorMsg = res.ErrorMsg
+ errorMsg = ctx.Tr("cloudbrain.Stopped_failed")
break
}
oldStatus := task.Status
- task.Status = string(models.GrampusStatusStopped)
+ task.Status = getStopJobResponseStatus(res)
if task.EndTime == 0 {
task.EndTime = timeutil.TimeStampNow()
}
@@ -773,6 +999,33 @@ func GrampusStopJob(ctx *context.Context) {
})
}
+func getStopJobResponseStatus(res *models.GrampusStopJobResponse) string {
+ newStatus := models.GrampusStatusStopping
+ if res.Status != "" {
+ newStatus = grampus.TransTrainJobStatus(res.Status)
+ }
+ return newStatus
+}
+
+func GrampusNotebookDel(ctx *context.Context) {
+ var listType = ctx.Query("listType")
+ if err := deleteGrampusJob(ctx); err != nil {
+ log.Error("deleteGrampusJob failed: %v", err, ctx.Data["msgID"])
+ ctx.ServerError(err.Error(), err)
+ return
+ }
+
+ var isAdminPage = ctx.Query("isadminpage")
+ var isHomePage = ctx.Query("ishomepage")
+ if ctx.IsUserSiteAdmin() && isAdminPage == "true" {
+ ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains")
+ } else if isHomePage == "true" {
+ ctx.Redirect(setting.AppSubURL + "/cloudbrains")
+ } else {
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=" + listType)
+ }
+}
+
func GrampusTrainJobDel(ctx *context.Context) {
var listType = ctx.Query("listType")
if err := deleteGrampusJob(ctx); err != nil {
@@ -795,9 +1048,9 @@ func GrampusTrainJobDel(ctx *context.Context) {
func deleteGrampusJob(ctx *context.Context) error {
task := ctx.Cloudbrain
- if task.Status != string(models.GrampusStatusStopped) && task.Status != string(models.GrampusStatusSucceeded) && task.Status != string(models.GrampusStatusFailed) {
+ if task.Status != models.GrampusStatusStopped && task.Status != models.GrampusStatusSucceeded && task.Status != models.GrampusStatusFailed {
log.Error("the job(%s) has not been stopped", task.JobName, ctx.Data["msgID"])
- return errors.New("the job has not been stopped")
+ return errors.New(ctx.Tr("cloudbrain.Not_Stopped"))
}
err := models.DeleteJob(task)
@@ -815,6 +1068,166 @@ func deleteGrampusJob(ctx *context.Context) error {
return nil
}
+type NotebookDataset struct {
+ DatasetUrl string `json:"dataset_url"`
+}
+
+func GrampusNotebookShow(ctx *context.Context) {
+ ctx.Data["PageIsCloudBrain"] = true
+
+ var task *models.Cloudbrain
+ task, err := models.GetCloudbrainByIDWithDeleted(ctx.Params(":id"))
+ if err != nil {
+ log.Error("GetCloudbrainByID failed:" + err.Error())
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ task.ContainerIp = ""
+
+ if task.DeletedAt.IsZero() && cloudbrainTask.IsTaskNotStop(task) { //normal record
+ result, err := grampus.GetNotebookJob(task.JobID)
+ if err != nil {
+ log.Error("GetJob failed:" + err.Error())
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+
+ if result != nil {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
+ task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ }
+ oldStatus := task.Status
+ task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
+ if task.Status != oldStatus || task.Status == models.GrampusStatusRunning {
+ task.Duration = result.JobInfo.RunSec
+ if task.Duration < 0 {
+ task.Duration = 0
+ }
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+
+ if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
+ task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ }
+ if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
+ task.EndTime = task.StartTime.Add(task.Duration)
+ }
+ task.CorrectCreateUnix()
+ if oldStatus != task.Status {
+ notification.NotifyChangeCloudbrainStatus(task, oldStatus)
+ if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 {
+ urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
+ }
+ }
+ }
+ }
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob failed:" + err.Error())
+ }
+ }
+ }
+
+ if len(task.Parameters) > 0 {
+ var parameters models.Parameters
+ err := json.Unmarshal([]byte(task.Parameters), ¶meters)
+ if err != nil {
+ log.Error("Failed to Unmarshal Parameters: %s (%v)", task.Parameters, err)
+ ctx.ServerError("system error", err)
+ return
+ }
+
+ if len(parameters.Parameter) > 0 {
+ paramTemp := ""
+ for _, Parameter := range parameters.Parameter {
+ param := Parameter.Label + " = " + Parameter.Value + "; "
+ paramTemp = paramTemp + param
+ }
+ task.Parameters = paramTemp[:len(paramTemp)-2]
+ } else {
+ task.Parameters = ""
+ }
+ }
+ user, err := models.GetUserByID(task.UserID)
+ if err == nil {
+ task.User = user
+ }
+
+ prepareSpec4Show(ctx, task)
+
+ ctx.Data["task"] = task
+ ctx.Data["datasetDownload"] = getDatasetDownloadInfo(ctx, task)
+ ctx.Data["modelDownload"] = getModelDownloadInfo(ctx, task)
+ ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task)
+ ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx)
+ ctx.Data["code_path"] = cloudbrain.CodeMountPath
+ ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath
+ ctx.Data["model_path"] = cloudbrain.ModelMountPath
+ ctx.HTML(http.StatusOK, tplGrampusNotebookShow)
+}
+
+func getDatasetDownloadInfo(ctx *context.Context, task *models.Cloudbrain) []*models.DatasetDownload {
+ datasetDownload := make([]*models.DatasetDownload, 0)
+ if ctx.IsSigned {
+ if task.Uuid != "" && task.UserID == ctx.User.ID {
+ if task.IsGPUTask() {
+ return GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)
+ } else {
+ datasetDownload = GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)
+ datasetObsUrlList := make([]NotebookDataset, 0)
+ _ = json.Unmarshal([]byte(task.DataUrl), &datasetObsUrlList)
+
+ for _, datasetInfo := range datasetDownload {
+
+ for _, datasetObs := range datasetObsUrlList {
+ log.Info("datasetObsUrl:" + datasetObs.DatasetUrl + "datasetName:" + datasetInfo.DatasetName)
+ if strings.Contains(datasetObs.DatasetUrl, datasetInfo.DatasetName) {
+ datasetInfo.DatasetDownloadLink = datasetObs.DatasetUrl
+ break
+ }
+ }
+
+ }
+
+ }
+
+ }
+ }
+
+ return datasetDownload
+}
+
+func getModelDownloadInfo(ctx *context.Context, task *models.Cloudbrain) *models.ModelDownload {
+ var modelDownload models.ModelDownload
+ if ctx.IsSigned {
+ if task.ModelName != "" && task.UserID == ctx.User.ID {
+ if task.IsNPUTask() {
+ modelDownload = models.ModelDownload{
+ Name: task.CkptName,
+ DownloadLink: "",
+ IsDelete: false,
+ }
+ if !HasModelFile(task) {
+ modelDownload.IsDelete = true
+ }
+ datasetObsUrlList := make([]NotebookDataset, 0)
+ _ = json.Unmarshal([]byte(task.DataUrl), &datasetObsUrlList)
+ for _, datasetObs := range datasetObsUrlList {
+ if strings.Contains(datasetObs.DatasetUrl, task.CkptName) {
+ modelDownload.DownloadLink = datasetObs.DatasetUrl
+ break
+ }
+ }
+
+ }
+
+ }
+
+ }
+
+ return &modelDownload
+}
+
func GrampusTrainJobShow(ctx *context.Context) {
ctx.Data["PageIsCloudBrain"] = true
@@ -826,7 +1239,7 @@ func GrampusTrainJobShow(ctx *context.Context) {
return
}
task.ContainerIp = ""
-
+ task.User, _ = models.GetUserByID(task.UserID)
if task.DeletedAt.IsZero() { //normal record
result, err := grampus.GetJob(task.JobID)
if err != nil {
@@ -895,6 +1308,7 @@ func GrampusTrainJobShow(ctx *context.Context) {
taskList := make([]*models.Cloudbrain, 0)
taskList = append(taskList, task)
prepareSpec4Show(ctx, task)
+
ctx.Data["version_list_task"] = taskList
ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task)
@@ -1158,3 +1572,176 @@ func HandleTaskWithAiCenter(ctx *context.Context) {
r["updateCounts"] = updateCounts
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}
+
+func GrampusNotebookDebug(ctx *context.Context) {
+
+ result, err := grampus.GetNotebookJob(ctx.Cloudbrain.JobID)
+
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplDebugJobIndex, nil)
+ return
+ }
+ if len(result.JobInfo.Tasks) > 0 {
+
+ ctx.Redirect(result.JobInfo.Tasks[0].Url + "?token=" + result.JobInfo.Tasks[0].Token)
+ return
+ }
+ ctx.NotFound("Can not find the job.", nil)
+
+}
+
+func GrampusNotebookRestart(ctx *context.Context) {
+ var id = ctx.Params(":id")
+ var resultCode = "-1"
+ var errorMsg = ""
+ var status = ""
+ var spec *models.Specification
+
+ task := ctx.Cloudbrain
+ if ctx.Written() {
+ return
+ }
+
+ for {
+
+ if task.Status != models.GrampusStatusStopped && task.Status != models.GrampusStatusSucceeded && task.Status != models.GrampusStatusFailed {
+ log.Error("the job(%s) is not stopped", task.JobName, ctx.Data["MsgID"])
+ errorMsg = "the job is not stopped"
+ break
+ }
+
+ count, err := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeC2Net, string(models.JobTypeDebug), task.ComputeResource)
+
+ if err != nil {
+ log.Error("GetCloudbrainNotebookCountByUserID failed:%v", err, ctx.Data["MsgID"])
+ errorMsg = "system error"
+ break
+ } else {
+ if count >= 1 {
+ log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
+ resultCode = "2"
+ errorMsg = ctx.Tr("repo.cloudbrain.morethanonejob")
+ break
+ }
+ }
+
+ oldSpec, err := resource.GetCloudbrainSpec(task.ID)
+ if err != nil || oldSpec == nil {
+ log.Error("NotebookManage GetCloudbrainSpec error.%v", err)
+ errorMsg = "Resource specification not available"
+ break
+ }
+
+ computeSourceSimple := models.GPU
+ action := models.ActionCreateGrampusGPUDebugTask
+ if task.ComputeResource == models.NPUResource {
+ computeSourceSimple = models.NPU
+ action = models.ActionCreateGrampusNPUDebugTask
+ }
+ spec, err = resource.GetAndCheckSpec(ctx.User.ID, oldSpec.ID, models.FindSpecsOptions{
+ JobType: models.JobType(task.JobType),
+ ComputeResource: computeSourceSimple,
+ Cluster: models.C2NetCluster,
+ })
+ if err != nil || spec == nil {
+ log.Error("NotebookManage GetAndCheckSpec error.task.id = %d", task.ID)
+ errorMsg = "Resource specification not support any more"
+ break
+ }
+ if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
+ log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
+ errorMsg = ctx.Tr("points.insufficient_points_balance")
+ break
+ }
+ if task.IsGPUTask() {
+ if _, err := os.Stat(getOldJobPath(task)); err != nil {
+ log.Error("Can not find job minio path", err)
+ resultCode = "-1"
+ errorMsg = ctx.Tr("cloudbrain.result_cleared")
+ break
+ }
+ }
+
+ if !HasModelFile(task) { //使用预训练模型训练
+ errorMsg = ctx.Tr("repo.debug.manage.model_not_exist")
+ break
+ }
+ if hasDatasetDeleted(task) {
+ errorMsg = ctx.Tr("repo.debug.manage.dataset_not_exist")
+ break
+ }
+
+ createTime := timeutil.TimeStampNow()
+
+ res, err := grampus.RestartNotebookJob(task.JobID)
+ if err != nil {
+ log.Error("ManageNotebook2(%s) failed:%v", task.DisplayJobName, err.Error(), ctx.Data["MsgID"])
+ errorMsg = ctx.Tr("repo.debug_again_fail")
+ break
+ }
+
+ if res.GrampusResult.ErrorCode != 0 || res.NewId == "" {
+ log.Error("ManageNotebook2 failed:" + res.GrampusResult.ErrorMsg)
+ errorMsg = ctx.Tr("repo.debug_again_fail")
+ if res.GrampusResult.ErrorCode == 5005 {
+ errorMsg = ctx.Tr("repo.debug_again_fail_forever")
+ }
+
+ break
+ }
+
+ newTask := &models.Cloudbrain{
+ Status: res.Status,
+ UserID: task.UserID,
+ RepoID: task.RepoID,
+ JobID: res.NewId,
+ JobName: task.JobName,
+ DisplayJobName: task.DisplayJobName,
+ JobType: task.JobType,
+ Type: task.Type,
+ Uuid: task.Uuid,
+ Image: task.Image,
+ ImageID: task.ImageID,
+ EngineID: task.EngineID,
+ CommitID: task.CommitID,
+ EngineName: task.EngineName,
+ IsLatestVersion: "1",
+ BranchName: task.BranchName,
+ DatasetName: task.DatasetName,
+ ComputeResource: task.ComputeResource,
+ Description: task.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: spec,
+ ModelName: task.ModelName,
+ ModelVersion: task.ModelVersion,
+ LabelName: task.LabelName,
+ PreTrainModelUrl: task.PreTrainModelUrl,
+ CkptName: task.CkptName,
+ WorkServerNumber: 1,
+ }
+
+ err = models.RestartCloudbrain(task, newTask)
+ if err != nil {
+ log.Error("RestartCloudbrain(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
+ errorMsg = "system error"
+ break
+ }
+
+ id = strconv.FormatInt(newTask.ID, 10)
+
+ status = res.Status
+ resultCode = "0"
+
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, newTask.DisplayJobName, action)
+
+ break
+ }
+
+ ctx.JSON(200, map[string]string{
+ "result_code": resultCode,
+ "error_msg": errorMsg,
+ "status": status,
+ "id": id,
+ })
+}
diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go
index 4e30e625d..e0b9cd1b6 100755
--- a/routers/repo/modelarts.go
+++ b/routers/repo/modelarts.go
@@ -218,6 +218,22 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
return
}
}
+
+ var datasetInfos map[string]models.DatasetInfo
+ var attachSize int
+ if uuid != "" {
+ datasetInfos, _, err = models.GetDatasetInfo(uuid)
+ for _, infos := range datasetInfos {
+ attachSize += infos.Size
+ }
+ if attachSize > int(setting.DebugAttachSize*1000*1000*1000) {
+ log.Error("The DatasetSize exceeds the limit (%d)", int(setting.DebugAttachSize)) //GB
+ notebookNewDataPrepare(ctx)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.error.debug_datasetsize", int(setting.DebugAttachSize*1000*1000*1000)), tplModelArtsNotebookNew, &form)
+ return
+ }
+ }
+
var aiCenterCode = models.AICenterOfCloudBrainTwo
if setting.ModelartsCD.Enabled {
aiCenterCode = models.AICenterOfChengdu
@@ -239,10 +255,37 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
return
}
+ req := cloudbrain.GenerateModelArtsNotebookReq{
+ DisplayJobName: displayJobName,
+ JobName: jobName,
+ Description: description,
+ Uuid: uuid,
+ ImageId: imageId,
+ Spec: spec,
+ BootFile: "",
+ AutoStopDurationMs: modelarts.AutoStopDurationMs,
+ }
+
+ if form.ModelName != "" { //使用预训练模型训练
+ _, err := models.QueryModelByPath(form.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ notebookNewDataPrepare(ctx)
+ ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tplModelArtsNotebookNew, &form)
+ return
+ }
+ req.ModelName = form.ModelName
+ req.LabelName = form.LabelName
+ req.CkptName = form.CkptName
+ req.ModelVersion = form.ModelVersion
+ req.PreTrainModelUrl = form.PreTrainModelUrl
+
+ }
+
if setting.ModelartsCD.Enabled {
- _, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs)
+ _, err = modelarts_cd.GenerateNotebook(ctx, req)
} else {
- _, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs)
+ _, err = modelarts.GenerateNotebook2(ctx, req)
}
if err != nil {
@@ -279,11 +322,17 @@ func NotebookShow(ctx *context.Context) {
}
- datasetDownload := make([]models.DatasetDownload, 0)
+ datasetDownload := make([]*models.DatasetDownload, 0)
+ var modelDownload models.ModelDownload
if ctx.IsSigned {
if task.Uuid != "" && task.UserID == ctx.User.ID {
datasetDownload = GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, true)
}
+ if task.ModelName != "" && task.UserID == ctx.User.ID {
+ modelDownload = GetModelDownload(task)
+
+ }
+
}
user, err := models.GetUserByID(task.UserID)
if err == nil {
@@ -304,6 +353,7 @@ func NotebookShow(ctx *context.Context) {
}
ctx.Data["duration"] = task.TrainJobDuration
ctx.Data["datasetDownload"] = datasetDownload
+ ctx.Data["modelDownload"] = modelDownload
ctx.Data["task"] = task
ctx.Data["ID"] = ID
ctx.Data["jobName"] = task.JobName
@@ -311,8 +361,25 @@ func NotebookShow(ctx *context.Context) {
ctx.HTML(200, tplModelArtsNotebookShow)
}
-func GetCloudBrainDataSetInfo(uuid string, datasetname string, isNeedDown bool) []models.DatasetDownload {
- datasetDownload := make([]models.DatasetDownload, 0)
+func GetModelDownload(task *models.Cloudbrain) models.ModelDownload {
+ index := strings.Index(task.PreTrainModelUrl, "/")
+ key := task.PreTrainModelUrl[index+1:] + task.CkptName
+ url, _ := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, key)
+ modelDownload := models.ModelDownload{
+ Name: task.CkptName,
+ DownloadLink: url,
+ IsDelete: false,
+ }
+
+ if !HasModelFile(task) {
+ log.Warn("Can not get model by path:" + task.PreTrainModelUrl)
+ modelDownload.IsDelete = true
+ }
+ return modelDownload
+}
+
+func GetCloudBrainDataSetInfo(uuid string, datasetname string, isNeedDown bool) []*models.DatasetDownload {
+ datasetDownload := make([]*models.DatasetDownload, 0)
if len(uuid) == 0 {
return datasetDownload
}
@@ -349,7 +416,7 @@ func GetCloudBrainDataSetInfo(uuid string, datasetname string, isNeedDown bool)
}
}
- datasetDownload = append(datasetDownload, models.DatasetDownload{
+ datasetDownload = append(datasetDownload, &models.DatasetDownload{
DatasetName: name,
DatasetDownloadLink: url,
RepositoryLink: link,
@@ -388,9 +455,13 @@ func NotebookDebug2(ctx *context.Context) {
ctx.RenderWithErr(err.Error(), tplModelArtsNotebookIndex, nil)
return
}
- if task.BootFile != "" {
- ctx.Redirect(getFileUrl(result.Url, task.BootFile) + "?token=" + result.Token)
+
+ if ctx.QueryTrim("file") != "" {
+ ctx.Redirect(getFileUrl(result.Url, ctx.QueryTrim("file")) + "?token=" + result.Token)
} else {
+ if task.BootFile != "" {
+ go cloudbrainTask.UploadNotebookFiles(task)
+ }
ctx.Redirect(result.Url + "?token=" + result.Token)
}
@@ -412,7 +483,7 @@ func getFileUrl(url string, filename string) string {
}
}
- return url + middle + path.Base(filename)
+ return url + middle + filename
}
func NotebookRestart(ctx *context.Context) {
@@ -476,6 +547,16 @@ func NotebookRestart(ctx *context.Context) {
errorMsg = ctx.Tr("points.insufficient_points_balance")
break
}
+ if !HasModelFile(task) { //使用预训练模型训练
+ errorMsg = ctx.Tr("repo.debug.manage.model_not_exist")
+ break
+ }
+
+ if hasDatasetDeleted(task) {
+ errorMsg = ctx.Tr("repo.debug.manage.dataset_not_exist")
+ break
+ }
+
createTime := timeutil.TimeStampNow()
param := models.NotebookAction{
Action: models.ActionStart,
@@ -511,21 +592,26 @@ func NotebookRestart(ctx *context.Context) {
}
newTask := &models.Cloudbrain{
- Status: res.Status,
- UserID: task.UserID,
- RepoID: task.RepoID,
- JobID: task.JobID,
- JobName: task.JobName,
- DisplayJobName: task.DisplayJobName,
- JobType: task.JobType,
- Type: task.Type,
- Uuid: task.Uuid,
- Image: task.Image,
- ComputeResource: task.ComputeResource,
- Description: task.Description,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- Spec: spec,
+ Status: res.Status,
+ UserID: task.UserID,
+ RepoID: task.RepoID,
+ JobID: task.JobID,
+ JobName: task.JobName,
+ DisplayJobName: task.DisplayJobName,
+ JobType: task.JobType,
+ Type: task.Type,
+ Uuid: task.Uuid,
+ Image: task.Image,
+ ComputeResource: task.ComputeResource,
+ Description: task.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: spec,
+ ModelName: task.ModelName,
+ ModelVersion: task.ModelVersion,
+ LabelName: task.LabelName,
+ PreTrainModelUrl: task.PreTrainModelUrl,
+ CkptName: task.CkptName,
}
err = models.RestartCloudbrain(task, newTask)
@@ -564,21 +650,11 @@ func NotebookStop(ctx *context.Context) {
if task.Status != string(models.ModelArtsRunning) {
log.Error("the job(%s) is not running", task.JobName, ctx.Data["MsgID"])
resultCode = "-1"
- errorMsg = "the job is not running"
+ errorMsg = ctx.Tr("cloudbrain.Already_stopped")
break
}
- param := models.NotebookAction{
- Action: models.ActionStop,
- }
-
- var err error
- var res *models.NotebookActionResult
- if task.Type == models.TypeCloudBrainTwo {
- res, err = modelarts.ManageNotebook2(task.JobID, param)
- } else if task.Type == models.TypeCDCenter {
- res, err = modelarts_cd.ManageNotebook(task.JobID, param)
- }
+ err, res := StopModelArtsNotebook(task)
if err != nil {
log.Error("ManageNotebook2(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
@@ -619,6 +695,21 @@ func NotebookStop(ctx *context.Context) {
})
}
+func StopModelArtsNotebook(task *models.Cloudbrain) (error, *models.NotebookActionResult) {
+ param := models.NotebookAction{
+ Action: models.ActionStop,
+ }
+
+ var err error
+ var res *models.NotebookActionResult
+ if task.Type == models.TypeCloudBrainTwo {
+ res, err = modelarts.ManageNotebook2(task.JobID, param)
+ } else if task.Type == models.TypeCDCenter {
+ res, err = modelarts_cd.ManageNotebook(task.JobID, param)
+ }
+ return err, res
+}
+
func NotebookDel(ctx *context.Context) {
var listType = ctx.Query("debugListType")
task := ctx.Cloudbrain
@@ -1791,7 +1882,7 @@ func TrainJobShow(ctx *context.Context) {
return
}
ctx.Data["canNewJob"] = canNewJob
- datasetList := make([][]models.DatasetDownload, 0)
+ datasetList := make([][]*models.DatasetDownload, 0)
//将运行参数转化为epoch_size = 3, device_target = Ascend的格式
for i, task := range VersionListTasks {
@@ -2518,7 +2609,8 @@ func inferenceJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModel
ctx.Data["datasetType"] = models.TypeCloudBrainTwo
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "")
ctx.Data["WaitCount"] = waitCount
-
+ NotStopTaskCount, _ := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeInference))
+ ctx.Data["NotStopTaskCount"] = NotStopTaskCount
return nil
}
func InferenceJobShow(ctx *context.Context) {
@@ -2582,6 +2674,46 @@ func InferenceJobShow(ctx *context.Context) {
ctx.HTML(http.StatusOK, tplModelArtsInferenceJobShow)
}
+func MultiModelDownload(ctx *context.Context) {
+ var (
+ err error
+ )
+ jobID := ctx.Params(":jobid")
+ versionName := ctx.Query("version_name")
+ parentDir := ctx.Query("parent_dir")
+
+ task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName)
+ if err != nil {
+ log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", task.JobName, err.Error())
+ return
+ }
+
+ if task.ComputeResource == models.NPUResource {
+ path := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, setting.OutPutPath, versionName, parentDir), "/")
+ path = strings.TrimSuffix(path, "/")
+ path += "/"
+ allFile, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, path)
+ if err == nil {
+ returnFileName := task.DisplayJobName + ".zip"
+ ObsDownloadManyFile(path, ctx, returnFileName, allFile)
+ } else {
+ log.Info("error,msg=" + err.Error())
+ ctx.ServerError("no file to download.", err)
+ }
+ } else if task.ComputeResource == models.GPUResource {
+ filePath := setting.CBCodePathPrefix + task.JobName + cloudbrain.ModelMountPath + "/" + parentDir
+ allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, filePath)
+ if err == nil {
+ returnFileName := task.DisplayJobName + ".zip"
+ MinioDownloadManyFile(filePath, ctx, returnFileName, allFile)
+ } else {
+ log.Info("error,msg=" + err.Error())
+ ctx.ServerError("no file to download.", err)
+ }
+ }
+
+}
+
func ModelDownload(ctx *context.Context) {
var (
err error
diff --git a/routers/repo/repo_statistic.go b/routers/repo/repo_statistic.go
index 468e6fa85..c889046e3 100755
--- a/routers/repo/repo_statistic.go
+++ b/routers/repo/repo_statistic.go
@@ -75,7 +75,7 @@ func RepoStatisticDaily(date string) {
if repo.NumIssues != 0 {
issueFixedRate = float32(repo.NumClosedIssues) / float32(repo.NumIssues)
} else {
- issueFixedRate = 1.0
+ issueFixedRate = float32(setting.RadarMap.ProjectHealth0IssueCloseRatio)
}
var numVersions int64
@@ -124,7 +124,7 @@ func RepoStatisticDaily(date string) {
NumDevMonths: numDevMonths,
RepoSize: repo.Size,
DatasetSize: datasetSize,
- NumModels: 0,
+ NumModels: repo.ModelCnt,
NumWikiViews: numWikiViews,
NumCommits: numCommits,
NumIssues: int64(repo.NumIssues),
@@ -135,6 +135,9 @@ func RepoStatisticDaily(date string) {
NumCommitsGrowth: numCommitsGrowth,
NumCommitLinesGrowth: numCommitLinesGrowth,
NumContributorsGrowth: numContributorsGrowth,
+ NumCloudbrain: repo.AiTaskCnt,
+ NumDatasetFile: repo.DatasetCnt,
+ NumModelConvert: models.QueryModelConvertCountByRepoID(repo.ID),
}
dayBeforeDate := t.AddDate(0, 0, -1).Format("2006-01-02")
@@ -155,6 +158,10 @@ func RepoStatisticDaily(date string) {
repoStat.NumIssuesAdded = repoStat.NumIssues - repoStatisticBefore.NumIssues
repoStat.NumPullsAdded = repoStat.NumPulls - repoStatisticBefore.NumPulls
repoStat.NumContributorAdded = repoStat.NumContributor - repoStatisticBefore.NumContributor
+ repoStat.NumModelsAdded = repoStat.NumModels - repoStatisticBefore.NumModels
+ repoStat.NumCloudbrainAdded = repoStat.NumCloudbrain - repoStatisticBefore.NumCloudbrain
+ repoStat.NumModelConvertAdded = repoStat.NumModelConvert - repoStatisticBefore.NumModelConvert
+ repoStat.NumDatasetFileAdded = repoStat.NumDatasetFile - repoStatisticBefore.NumDatasetFile
}
}
day4MonthsAgo := t.AddDate(0, -4, 0)
@@ -166,6 +173,8 @@ func RepoStatisticDaily(date string) {
repoStat.NumIssuesGrowth = repoStat.NumIssues - repoStatisticFourMonthsAgo.NumIssues
}
+ models.SyncStatDataToRepo(repo)
+
if _, err = models.InsertRepoStat(&repoStat); err != nil {
log.Error("InsertRepoStat failed(%s): %v", projectName, err)
log.Error("failed statistic: %s", projectName)
diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go
index a6de283a4..8121f167c 100755
--- a/routers/repo/user_data_analysis.go
+++ b/routers/repo/user_data_analysis.go
@@ -21,6 +21,7 @@ import (
const (
PAGE_SIZE = 2000
Excel_File_Path = "/useranalysis/"
+ USER_YEAR = 2022
)
func getUserMetricsExcelHeader(ctx *context.Context) map[string]string {
@@ -104,6 +105,7 @@ func getExcelHeader(ctx *context.Context) map[string]string {
excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount"))
+ excelHeader = append(excelHeader, ctx.Tr("user.static.ModelConvertCount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset"))
@@ -178,6 +180,8 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ModelConvertCount)
+ tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
@@ -256,6 +260,8 @@ func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord *
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ModelConvertCount)
+ tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
@@ -714,6 +720,12 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) {
log.Info("startTime time:" + startTime.Format("2006-01-02 15:04:05"))
log.Info("endTime time:" + endTime.Format("2006-01-02 15:04:05"))
warnEmailMessage := "用户统计信息入库失败,请尽快定位。"
+
+ startYear := time.Date(USER_YEAR, 1, 1, 0, 0, 0, 1, t.Location())
+ endYear := startYear.AddDate(1, 0, 0)
+
+ models.RefreshUserYearTable(startYear, endYear)
+
//query wiki data
log.Info("start to time count data")
wikiMap, err := queryWikiCountMap(startTime, endTime)
diff --git a/routers/routes/routes.go b/routers/routes/routes.go
index 80ff96364..85e1b8a25 100755
--- a/routers/routes/routes.go
+++ b/routers/routes/routes.go
@@ -371,7 +371,18 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/images/custom", repo.GetCustomImages)
m.Get("/images/star", repo.GetStarImages)
- m.Get("/repos", routers.ExploreRepos)
+ m.Group("/repos", func() {
+ //m.Get("", routers.ExploreRepos)
+ m.Get("", routers.GetRepoSearchPage)
+ m.Group("/square", func() {
+ m.Get("", routers.GetRepoSquarePage)
+ m.Get("/tab", routers.RepoSquare)
+ m.Get("/active-user", routers.ActiveUser)
+ m.Get("/active-org", routers.ActiveOrg)
+ })
+
+ m.Get("/search", routers.RepoFind)
+ })
m.Get("/datasets", routers.ExploreDatasets)
m.Get("/users", routers.ExploreUsers)
m.Get("/organizations", routers.ExploreOrganizations)
@@ -1175,6 +1186,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/rate", reqRepoCloudBrainReader, repo.GetRate)
m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels)
m.Get("/download_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadModel)
+ m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadMultiModel)
})
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.CloudBrainNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate)
@@ -1198,6 +1210,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainTrainJobDel)
//m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels)
m.Get("/download_model", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainDownloadModel)
+ m.Get("/download_multi_model", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainDownloadMultiModel)
//m.Get("/get_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo.GetLogFromModelDir)
//m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.CloudBrainTrainJobVersionNew)
@@ -1210,20 +1223,34 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/:jobid", func() {
m.Get("", reqRepoCloudBrainReader, repo.InferenceCloudBrainJobShow)
m.Get("/result_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.CloudBrainDownloadInferenceResult)
-
- m.Get("/downloadall", repo.DownloadInferenceResultFile)
+ m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRightForTrain, repo.CloudBrainDownloadMultiModel)
+ m.Get("/downloadall", cloudbrain.AdminOrJobCreaterRightForTrain, repo.DownloadGPUInferenceResultFile)
})
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.InferenceCloudBrainJobNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainInferencForm{}), repo.CloudBrainInferenceJobCreate)
})
}, context.RepoRef())
m.Group("/grampus", func() {
+ m.Group("/notebook", func() {
+ m.Group("/:id", func() {
+ m.Get("", reqRepoCloudBrainReader, repo.GrampusNotebookShow)
+ m.Get("/debug", reqWechatBind, cloudbrain.AdminOrJobCreaterRight, repo.GrampusNotebookDebug)
+ m.Post("/restart", reqWechatBind, cloudbrain.AdminOrJobCreaterRight, repo.GrampusNotebookRestart)
+ m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusStopJob)
+ m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusNotebookDel)
+ })
+
+ m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.GrampusNotebookNew)
+ m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateGrampusNotebookForm{}), repo.GrampusNotebookCreate)
+ })
+
m.Group("/train-job", func() {
m.Group("/:jobid", func() {
m.Get("", reqRepoCloudBrainReader, repo.GrampusTrainJobShow)
- m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusStopJob)
+ m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GrampusStopJob)
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GrampusTrainJobDel)
m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload)
+ m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRightForTrain, repo.MultiModelDownload)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.GrampusTrainJobVersionNew)
m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateGrampusTrainJobForm{}), repo.GrampusTrainJobVersionCreate)
})
@@ -1291,16 +1318,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/modelarts", func() {
m.Group("/notebook", func() {
- /* v1.0
- m.Group("/:jobid", func() {
- m.Get("", reqRepoCloudBrainReader, repo.NotebookShow)
- m.Get("/debug", cloudbrain.AdminOrJobCreaterRight, repo.NotebookDebug)
- m.Post("/:action", reqRepoCloudBrainWriter, repo.NotebookManage)
- m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.NotebookDel)
- })
- m.Get("/create", reqRepoCloudBrainWriter, repo.NotebookNew)
- m.Post("/create", reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsNotebookForm{}), repo.NotebookCreate)
- */
m.Group("/:id", func() {
m.Get("", reqRepoCloudBrainReader, repo.NotebookShow)
m.Get("/debug", cloudbrain.AdminOrJobCreaterRight, repo.NotebookDebug2)
@@ -1319,6 +1336,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.TrainJobStop)
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.TrainJobDel)
m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload)
+ m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRightForTrain, repo.MultiModelDownload)
m.Get("/download_log_file", cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobDownloadLogFile)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, context.PointAccount(), repo.TrainJobNewVersion)
m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion)
@@ -1334,7 +1352,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/:jobid", func() {
m.Get("", reqRepoCloudBrainReader, repo.InferenceJobShow)
m.Get("/result_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ResultDownload)
- m.Get("/downloadall", repo.DownloadMultiResultFile)
+ m.Get("/downloadall", cloudbrain.AdminOrJobCreaterRightForTrain, repo.DownloadMultiResultFile)
})
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.InferenceJobNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsInferenceJobForm{}), repo.InferenceJobCreate)
diff --git a/routers/user/home.go b/routers/user/home.go
index 62b0357ad..e4ff50a45 100755
--- a/routers/user/home.go
+++ b/routers/user/home.go
@@ -779,7 +779,7 @@ func Cloudbrains(ctx *context.Context) {
var jobTypes []string
jobTypeNot := false
if jobType == string(models.JobTypeBenchmark) {
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeSnn4Ecoset))
} else if jobType != "all" && jobType != "" {
jobTypes = append(jobTypes, jobType)
}
diff --git a/services/cloudbrain/clear.go b/services/cloudbrain/clear.go
index 44613ee3c..e14414c74 100644
--- a/services/cloudbrain/clear.go
+++ b/services/cloudbrain/clear.go
@@ -14,21 +14,21 @@ import (
func ClearCloudbrainResultSpace() {
log.Info("clear cloudbrain one result space begin.")
- if !setting.ClearStrategy.Enabled{
+ if !setting.ClearStrategy.Enabled {
return
}
- tasks, err := models.GetCloudBrainOneStoppedNotDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.BatchSize)
+ tasks, err := models.GetGPUStoppedNotDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.BatchSize)
if err != nil {
log.Warn("Failed to get cloudbrain, clear result failed.", err)
return
}
- debugTasks, err := models.GetCloudBrainOneStoppedDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.DebugJobSize)
+ debugTasks, err := models.GetGPUStoppedDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.DebugJobSize)
if err != nil {
log.Warn("Failed to get debug cloudbrain.", err)
}
- tasks=append(tasks,debugTasks...)
+ tasks = append(tasks, debugTasks...)
if err != nil {
log.Warn("Failed to get cloudbrain, clear result failed.", err)
@@ -38,7 +38,7 @@ func ClearCloudbrainResultSpace() {
for _, task := range tasks {
err := DeleteCloudbrainOneJobStorage(task.JobName)
if err == nil {
- log.Info("clear job in cloudbrain table:"+task.JobName)
+ log.Info("clear job in cloudbrain table:" + task.JobName)
ids = append(ids, task.ID)
}
}
@@ -69,10 +69,10 @@ func clearMinioHistoryTrashFile() {
SortModTimeAscend(miniofiles)
for _, file := range miniofiles {
- if file.Name()!="" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) {
+ if file.Name() != "" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) {
- has,err:=models.IsCloudbrainExistByJobName(file.Name())
- if err==nil && !has {
+ has, err := models.IsCloudbrainExistByJobName(file.Name())
+ if err == nil && !has {
dirPath := setting.CBCodePathPrefix + file.Name() + "/"
log.Info("clear job in minio trash:" + file.Name())
storage.Attachments.DeleteDir(dirPath)
@@ -90,7 +90,7 @@ func clearMinioHistoryTrashFile() {
}
}
-func clearLocalHistoryTrashFile() {
+func clearLocalHistoryTrashFile() {
files, err := ioutil.ReadDir(setting.JobPath)
processCount := 0
if err != nil {
@@ -99,11 +99,11 @@ func clearLocalHistoryTrashFile() {
SortModTimeAscend(files)
for _, file := range files {
//清理n天前的历史垃圾数据,清理job目录
- if file.Name()!="" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) {
- has,err:=models.IsCloudbrainExistByJobName(file.Name())
- if err==nil && !has{
+ if file.Name() != "" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) {
+ has, err := models.IsCloudbrainExistByJobName(file.Name())
+ if err == nil && !has {
os.RemoveAll(setting.JobPath + file.Name())
- log.Info("clear job in local trash:"+file.Name())
+ log.Info("clear job in local trash:" + file.Name())
processCount++
}
if processCount == setting.ClearStrategy.BatchSize {
@@ -127,7 +127,7 @@ func SortModTimeAscend(files []os.FileInfo) {
func DeleteCloudbrainOneJobStorage(jobName string) error {
- if jobName==""{
+ if jobName == "" {
return nil
}
//delete local
diff --git a/services/cloudbrain/cloudbrainTask/count.go b/services/cloudbrain/cloudbrainTask/count.go
index 985706911..172fa1502 100644
--- a/services/cloudbrain/cloudbrainTask/count.go
+++ b/services/cloudbrain/cloudbrainTask/count.go
@@ -34,7 +34,7 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s
ComputeResource: models.GPUResource,
}, string(models.JobTypeBenchmark) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
- JobType: []models.JobType{models.JobTypeBenchmark, models.JobTypeBrainScore, models.JobTypeSnn4imagenet},
+ JobType: []models.JobType{models.JobTypeBenchmark, models.JobTypeBrainScore, models.JobTypeSnn4imagenet, models.JobTypeSnn4Ecoset},
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
@@ -62,11 +62,21 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.NPUResource,
+}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.GPUResource: {
+ CloudBrainTypes: []int{models.TypeC2Net},
+ JobType: []models.JobType{models.JobTypeDebug},
+ NotFinalStatuses: GrampusNotFinalStatuses,
+ ComputeResource: models.GPUResource,
+}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.NPUResource: {
+ CloudBrainTypes: []int{models.TypeC2Net},
+ JobType: []models.JobType{models.JobTypeDebug},
+ NotFinalStatuses: GrampusNotFinalStatuses,
+ ComputeResource: models.NPUResource,
}}
func GetNotFinalStatusTaskCount(uid int64, cloudbrainType int, jobType string, computeResource ...string) (int, error) {
jobNewType := jobType
- if jobType == string(models.JobTypeSnn4imagenet) || jobType == string(models.JobTypeBrainScore) {
+ if models.IsModelBenchMarkJobType(jobType) {
jobNewType = string(models.JobTypeBenchmark)
}
diff --git a/services/cloudbrain/cloudbrainTask/notebook.go b/services/cloudbrain/cloudbrainTask/notebook.go
index 6b2fcf707..3526f6549 100644
--- a/services/cloudbrain/cloudbrainTask/notebook.go
+++ b/services/cloudbrain/cloudbrainTask/notebook.go
@@ -4,6 +4,9 @@ import (
"fmt"
"net/http"
"path"
+ "strings"
+
+ "code.gitea.io/gitea/modules/notebook"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/modelarts_cd"
@@ -29,6 +32,9 @@ import (
)
const NoteBookExtension = ".ipynb"
+const CPUType = 0
+const GPUType = 1
+const NPUType = 2
func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption) {
@@ -66,7 +72,7 @@ func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOp
}
//create repo if not exist
- repo, err := models.GetRepositoryByName(ctx.User.ID, setting.FileNoteBook.ProjectName)
+ repo, _ := models.GetRepositoryByName(ctx.User.ID, setting.FileNoteBook.ProjectName)
if repo == nil {
repo, err = repo_service.CreateRepository(ctx.User, ctx.User, models.CreateRepoOptions{
Name: setting.FileNoteBook.ProjectName,
@@ -80,17 +86,220 @@ func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOp
AutoInit: true,
DefaultBranch: "master",
})
+ if err != nil {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo", setting.FileNoteBook.ProjectName)))
+ return
+ }
+ } else {
+
+ noteBook, _ := models.GetWaitOrRunFileNotebookByRepo(repo.ID, getCloudbrainType(option.Type))
+ if noteBook != nil {
+
+ if isRepoConfilcts(option, noteBook) {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_repo_conflict")))
+ return
+ }
+
+ if isNotebookSpecMath(option, noteBook) {
+ if !isRepoMatch(option, noteBook) {
+ err = downloadCode(sourceRepo, getCodePath(noteBook.JobName, sourceRepo), option.BranchName)
+ if err != nil {
+ log.Error("download code failed", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
+ return
+ }
+ }
+ if !isRepoFileMatch(option, noteBook) {
+ noteBook.BootFile += ";" + getBootFile(option.File, option.OwnerName, option.ProjectName)
+ noteBook.BranchName += ";" + option.BranchName
+ noteBook.Description += ";" + getDescription(option)
+
+ err := models.UpdateJob(noteBook)
+ if err != nil {
+ log.Error("GenerateNotebook2 failed, %v", err, ctx.Data["MsgID"])
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
+ return
+ }
+ }
+
+ ctx.JSON(http.StatusOK, models.BaseMessageApi{
+ Code: 0,
+ Message: noteBook.JobID,
+ })
+ return
+ }
+
+ }
+ }
+
+ if option.Type <= GPUType {
+ cloudBrainFileNoteBookCreate(ctx, option, repo, sourceRepo)
+ } else {
+ modelartsFileNoteBookCreate(ctx, option, repo, sourceRepo)
+ }
+
+}
+func FileNotebookStatus(ctx *context.Context, option api.CreateFileNotebookJobOption) {
+ if ctx.Written() {
+ return
+ }
+
+ if path.Ext(option.File) != NoteBookExtension {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_select_wrong")))
+ return
+ }
+
+ isNotebookFileExist, _ := isNoteBookFileExist(ctx, option)
+ if !isNotebookFileExist {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist")))
+ return
}
+
+ task, err := models.GetCloudbrainByJobID(option.JobId)
if err != nil {
- ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo",setting.FileNoteBook.ProjectName)))
+ log.Error("job not found:"+option.JobId, err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Job id may not be right. can not find job."))
return
}
- if option.Type <= 1 {
- cloudBrainFileNoteBookCreate(ctx, option, repo, sourceRepo)
+ if task.BootFile == "" || task.Status != string(models.ModelArtsRunning) {
+ log.Warn("Boot file is empty or status is running. ")
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Boot file is empty or status is running."))
+ return
+ }
+ if !isRepoFileMatch(option, task) {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("can not math repo file."))
+ return
+ }
+ debugBaseUrl, token, err := getBaseUrlAndToken(task)
+ if err != nil {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
+ return
+ }
+
+ if uploadNotebookFileIfCannotBroswer(debugBaseUrl, getBootFile(option.File, option.OwnerName, option.ProjectName), task, token) {
+ ctx.JSON(http.StatusOK, models.BaseOKMessageApi)
} else {
- modelartsFileNoteBookCreate(ctx, option, repo, sourceRepo)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("upload failed."))
+
+ }
+
+}
+
+func getBaseUrlAndToken(task *models.Cloudbrain) (string, string, error) {
+ var debugBaseUrl string
+ var token string
+ if task.Type == models.TypeCloudBrainOne {
+ debugBaseUrl = setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName + "/lab"
+
+ } else {
+ var result *models.GetNotebook2Result
+ var err error
+ if task.Type == models.TypeCloudBrainTwo {
+ result, err = modelarts.GetNotebook2(task.JobID)
+ } else if task.Type == models.TypeCDCenter {
+ result, err = modelarts_cd.GetNotebook(task.JobID)
+ }
+ if err != nil || result == nil || result.Status != string(models.ModelArtsRunning) || result.Url == "" {
+ log.Error("notebook job not found:"+task.JobID, err)
+ return "", "", fmt.Errorf("can not get job or job is invalid.")
+ }
+
+ debugBaseUrl = result.Url
+ token = result.Token
+
+ }
+ return debugBaseUrl, token, nil
+}
+
+func uploadNotebookFileIfCannotBroswer(debugBaseUrl string, bootFile string, task *models.Cloudbrain, token string) bool {
+ c := ¬ebook.NotebookContent{
+ Url: debugBaseUrl,
+ Path: bootFile,
+ PathType: "file",
+ Token: token,
+ }
+ if c.IsNotebookFileCanBrowser() {
+ return true
+ } else {
+ c.SetCookiesAndCsrf()
+ c.UploadNoteBookFile(task)
+ return c.IsNotebookFileCanBrowser()
+ }
+
+}
+
+func isNotebookSpecMath(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool {
+ if option.Type == NPUType || option.Type == CPUType {
+ return true
+ }
+ spec, err := models.GetCloudbrainSpecByID(book.ID)
+ if err != nil {
+ log.Warn("can not get spec ", err)
+ return false
+ }
+ return spec.AccCardsNum > 0
+}
+
+func isRepoConfilcts(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool {
+ bootFiles := strings.Split(book.BootFile, ";")
+ branches := strings.Split(book.BranchName, ";")
+
+ for i, bootFile := range bootFiles {
+ splits := strings.Split(bootFile, "/")
+ if len(splits) >= 3 {
+ if splits[0] == option.OwnerName && splits[1] == option.ProjectName && branches[i] != option.BranchName {
+ return true
+ }
+ }
+ }
+
+ return false
+
+}
+
+func isRepoMatch(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool {
+ bootFiles := strings.Split(book.BootFile, ";")
+
+ for _, bootFile := range bootFiles {
+ splits := strings.Split(bootFile, "/")
+ if len(splits) >= 3 {
+ if splits[0] == option.OwnerName && splits[1] == option.ProjectName {
+ return true
+ }
+ }
+ }
+ return false
+
+}
+
+func isRepoFileMatch(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool {
+ bootFiles := strings.Split(book.BootFile, ";")
+ branches := strings.Split(book.BranchName, ";")
+
+ for i, bootFile := range bootFiles {
+ if branches[i] == option.BranchName && getBootFile(option.File, option.OwnerName, option.ProjectName) == bootFile {
+ return true
+ }
}
+ return false
+
+}
+func UploadNotebookFiles(task *models.Cloudbrain) {
+ if task.Status == string(models.JobRunning) && task.BootFile != "" {
+
+ debugBaseUrl, token, err := getBaseUrlAndToken(task)
+ if err != nil {
+ log.Error("can not get base url:", err)
+ return
+ }
+ bootFiles := strings.Split(task.BootFile, ";")
+
+ for _, bootFile := range bootFiles {
+ uploadNotebookFileIfCannotBroswer(debugBaseUrl, bootFile, task, token)
+ }
+
+ }
}
func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption, repo *models.Repository, sourceRepo *models.Repository) {
@@ -131,17 +340,18 @@ func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNot
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
- ctx.JSON(http.StatusOK,models.BaseMessageApi{
- Code: 2,
+ ctx.JSON(http.StatusOK, models.BaseMessageApi{
+ Code: 2,
Message: ctx.Tr("repo.cloudbrain.morethanonejob"),
})
return
}
}
- errStr := uploadCodeFile(sourceRepo, getCodePath(jobName), option.BranchName, option.File, jobName)
- if errStr != "" {
- ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist")))
+ err = downloadCode(sourceRepo, getCodePath(jobName, sourceRepo), option.BranchName)
+ if err != nil {
+ log.Error("download code failed", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}
command := cloudbrain.GetCloudbrainDebugCommand()
@@ -185,7 +395,7 @@ func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNot
JobType: jobType,
Description: getDescription(option),
BranchName: option.BranchName,
- BootFile: option.File,
+ BootFile: getBootFile(option.File, option.OwnerName, option.ProjectName),
Params: "{\"parameter\":[]}",
CommitID: "",
BenchmarkTypeID: 0,
@@ -206,8 +416,18 @@ func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNot
}
-func getCodePath(jobName string) string {
- return setting.JobPath + jobName + cloudbrain.CodeMountPath
+func getCloudbrainType(optionType int) int {
+ if optionType < 1 {
+ return models.TypeCloudBrainOne
+ }
+ if setting.ModelartsCD.Enabled {
+ return models.TypeCDCenter
+ }
+ return models.TypeCloudBrainTwo
+}
+
+func getCodePath(jobName string, repo *models.Repository) string {
+ return setting.JobPath + jobName + cloudbrain.CodeMountPath + "/" + repo.OwnerName + "/" + repo.Name
}
func getDescription(option api.CreateFileNotebookJobOption) string {
@@ -237,8 +457,8 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
- ctx.JSON(http.StatusOK,models.BaseMessageApi{
- Code: 2,
+ ctx.JSON(http.StatusOK, models.BaseMessageApi{
+ Code: 2,
Message: ctx.Tr("repo.cloudbrain.morethanonejob"),
})
return
@@ -260,7 +480,7 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote
}
}
- err = downloadCode(sourceRepo, getCodePath(jobName), option.BranchName)
+ err = downloadCode(sourceRepo, getCodePath(jobName, sourceRepo), option.BranchName)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
@@ -291,10 +511,22 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote
}
var jobId string
+ req := cloudbrain.GenerateModelArtsNotebookReq{
+ DisplayJobName: displayJobName,
+ JobName: jobName,
+ Description: getDescription(option),
+ ImageId: setting.FileNoteBook.ImageIdNPU,
+ Spec: spec,
+ BootFile: getBootFile(option.File, option.OwnerName, option.ProjectName),
+ AutoStopDurationMs: modelarts.AutoStopDurationMs / 4,
+ BranchName: option.BranchName,
+ }
+
if setting.ModelartsCD.Enabled {
- jobId, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPUCD, spec, option.File,modelarts.AutoStopDurationMs/4)
+ req.ImageId = setting.FileNoteBook.ImageIdNPUCD
+ jobId, err = modelarts_cd.GenerateNotebook(ctx, req)
} else {
- jobId, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPU, spec, option.File,modelarts.AutoStopDurationMs/4)
+ jobId, err = modelarts.GenerateNotebook2(ctx, req)
}
if err != nil {
@@ -336,17 +568,8 @@ func isNoteBookFileExist(ctx *context.Context, option api.CreateFileNotebookJobO
return true, nil
}
-func uploadCodeFile(repo *models.Repository, codePath string, branchName string, filePath string, jobName string) string {
- err := downloadCode(repo, codePath, branchName)
- if err != nil {
- return "cloudbrain.load_code_failed"
- }
-
- err = uploadOneFileToMinio(codePath, filePath, jobName, cloudbrain.CodeMountPath+"/")
- if err != nil {
- return "cloudbrain.load_code_failed"
- }
- return ""
+func getBootFile(filePath string, ownerName string, projectName string) string {
+ return ownerName + "/" + projectName + "/" + filePath
}
func fileExists(gitRepo *git.Repository, path string, branch string) (bool, error) {
diff --git a/services/cloudbrain/cloudbrainTask/sync_status.go b/services/cloudbrain/cloudbrainTask/sync_status.go
index 973b9bbc2..3bc09071c 100644
--- a/services/cloudbrain/cloudbrainTask/sync_status.go
+++ b/services/cloudbrain/cloudbrainTask/sync_status.go
@@ -3,9 +3,13 @@ package cloudbrainTask
import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
+ "code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/modelarts"
+ "code.gitea.io/gitea/modules/modelarts_cd"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
"net/http"
"strconv"
)
@@ -58,6 +62,55 @@ func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error
}
+func SyncGrampusNotebookStatus(job *models.Cloudbrain) (*models.Cloudbrain, error) {
+ result, err := grampus.GetNotebookJob(job.JobID)
+ if err != nil {
+
+ log.Error("GetJob(%s) failed:%v", job.JobName, err)
+
+ return job, err
+ }
+
+ if job.StartTime == 0 && result.JobInfo.StartedAt > 0 {
+ job.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ }
+ oldStatus := job.Status
+ job.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
+ job.Duration = result.JobInfo.RunSec
+ job.TrainJobDuration = models.ConvertDurationToStr(job.Duration)
+
+ if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 {
+ job.EndTime = job.StartTime.Add(job.Duration)
+ }
+ job.CorrectCreateUnix()
+
+ if len(job.AiCenter) == 0 {
+ if len(result.JobInfo.Tasks) > 0 {
+ if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 {
+ job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ }
+ }
+ }
+
+ if job.Status != models.GrampusStatusWaiting {
+ if oldStatus != job.Status {
+ notification.NotifyChangeCloudbrainStatus(job, oldStatus)
+ }
+ if job.ComputeResource == models.NPUResource {
+ job.TrainUrl = result.JobInfo.Tasks[0].CodeUrl
+ job.DataUrl = result.JobInfo.Tasks[0].DataUrl
+ }
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ return nil, err
+ }
+ }
+
+ return job, nil
+
+}
+
func isNoteBookReady(task *models.Cloudbrain) bool {
if task.JobType != string(models.JobTypeDebug) {
return true
@@ -90,3 +143,28 @@ func isNoteBookReady(task *models.Cloudbrain) bool {
return false
}
+
+func StopDebugJob(task *models.Cloudbrain) error {
+ param := models.NotebookAction{
+ Action: models.ActionStop,
+ }
+ var err error = nil
+
+ if task.JobType == string(models.JobTypeDebug) {
+ if task.Type == models.TypeCloudBrainOne {
+ return cloudbrain.StopJob(task.JobID)
+ } else if task.Type == models.TypeCloudBrainTwo {
+ _, err = modelarts.ManageNotebook2(task.JobID, param)
+
+ } else if task.Type == models.TypeCDCenter {
+ _, err = modelarts_cd.ManageNotebook(task.JobID, param)
+
+ } else if task.Type == models.TypeC2Net {
+ _, err = grampus.StopJob(task.JobID, task.JobType)
+
+ }
+
+ }
+ return err
+
+}
diff --git a/services/repository/contributor.go b/services/repository/contributor.go
new file mode 100644
index 000000000..9a86b91dc
--- /dev/null
+++ b/services/repository/contributor.go
@@ -0,0 +1,88 @@
+package repository
+
+import (
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/redis/redis_client"
+ "code.gitea.io/gitea/modules/redis/redis_key"
+ "encoding/json"
+ "github.com/patrickmn/go-cache"
+ "time"
+)
+
+var repoContributorCache = cache.New(5*time.Minute, 1*time.Minute)
+
+type ContributorCacheVal struct {
+ Contributors []*models.ContributorInfo
+ Total int
+}
+
+func GetRepoTopNContributors(repo *models.Repository, N int) ([]*models.ContributorInfo, int) {
+ val, _ := redis_client.Get(redis_key.RepoTopNContributors(repo.ID, N))
+ if val != "" {
+ log.Debug("Get RepoTopNContributors from redis,repo.ID = %d value = %v", repo.ID, val)
+ temp := &ContributorCacheVal{}
+ json.Unmarshal([]byte(val), temp)
+ return temp.Contributors, temp.Total
+ }
+
+ contributorInfos, total := getRepoTopNContributorsFromDisk(repo, N)
+ log.Debug("Get RepoTopNContributors from disk,repo.ID = %d ", repo.ID)
+ jsonVal, err := json.Marshal(&ContributorCacheVal{Contributors: contributorInfos, Total: total})
+ if err == nil {
+ redis_client.Setex(redis_key.RepoTopNContributors(repo.ID, N), string(jsonVal), 2*time.Minute)
+ }
+ return contributorInfos, total
+}
+
+func getRepoTopNContributorsFromDisk(repo *models.Repository, N int) ([]*models.ContributorInfo, int) {
+ contributorInfos := make([]*models.ContributorInfo, 0)
+
+ branchName := GetDefaultBranchName(repo)
+ if branchName == "" {
+ return contributorInfos, 0
+ }
+
+ contributors, err := git.GetContributors(repo.RepoPath(), branchName)
+ if err == nil && contributors != nil {
+ contributorInfoHash := make(map[string]*models.ContributorInfo)
+ for _, c := range contributors {
+ if len(contributorInfos) >= N {
+ break
+ }
+ if c.Email == "" {
+ continue
+ }
+ // get user info from committer email
+ user, err := models.GetUserByActivateEmail(c.Email)
+ if err == nil {
+ // committer is system user, get info through user's primary email
+ if existedContributorInfo, ok := contributorInfoHash[user.Email]; ok {
+ // existed: same primary email, different committer name
+ existedContributorInfo.CommitCnt += c.CommitCnt
+ } else {
+ // new committer info
+ var newContributor = &models.ContributorInfo{
+ user.RelAvatarLink(), user.Name, user.Email, c.CommitCnt,
+ }
+ contributorInfos = append(contributorInfos, newContributor)
+ contributorInfoHash[user.Email] = newContributor
+ }
+ } else {
+ // committer is not system user
+ if existedContributorInfo, ok := contributorInfoHash[c.Email]; ok {
+ // existed: same primary email, different committer name
+ existedContributorInfo.CommitCnt += c.CommitCnt
+ } else {
+ var newContributor = &models.ContributorInfo{
+ "", "", c.Email, c.CommitCnt,
+ }
+ contributorInfos = append(contributorInfos, newContributor)
+ contributorInfoHash[c.Email] = newContributor
+ }
+ }
+ }
+ }
+ return contributorInfos, len(contributors)
+}
diff --git a/services/repository/repository.go b/services/repository/repository.go
index db25010ea..a5c7c2fc4 100644
--- a/services/repository/repository.go
+++ b/services/repository/repository.go
@@ -5,18 +5,19 @@
package repository
import (
- "fmt"
- "io/ioutil"
- "net/http"
- "os"
- "strings"
-
"code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
pull_service "code.gitea.io/gitea/services/pull"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "strings"
+ "xorm.io/xorm"
)
const SHELL_FLAG_ON = 1
@@ -328,3 +329,47 @@ func IsUploadFileInvalidErr(err error) bool {
_, ok := err.(UploadFileInvalidErr)
return ok
}
+
+func IncreaseRepoDatasetNum(datasetID int64, engines ...*xorm.Engine) error {
+ dataset, err := models.GetDatasetByID(datasetID)
+ if err != nil {
+ return err
+ }
+ return models.OperateRepoDatasetNum(dataset.RepoID, 1, engines...)
+}
+
+func IncreaseRepoModelNum(repoId int64, engines ...*xorm.Engine) error {
+ return models.OperateRepoModelNum(repoId, 1, engines...)
+}
+
+func ResetRepoModelNum(repoId int64) error {
+ return models.ResetRepoModelNum(repoId)
+}
+
+func DecreaseRepoDatasetNum(datasetID int64, engines ...*xorm.Engine) error {
+ dataset, err := models.GetDatasetByID(datasetID)
+ if err != nil {
+ return err
+ }
+ return models.OperateRepoDatasetNum(dataset.RepoID, -1, engines...)
+}
+
+func DecreaseRepoModelNum(repoId int64, engines ...*xorm.Engine) error {
+ return models.OperateRepoModelNum(repoId, -1, engines...)
+}
+
+func GetDefaultBranchName(repo *models.Repository) string {
+ gitRepo, err := git.OpenRepository(repo.RepoPath())
+ if err != nil {
+ return ""
+ }
+ defer gitRepo.Close()
+ if len(repo.DefaultBranch) > 0 && gitRepo.IsBranchExist(repo.DefaultBranch) {
+ return repo.DefaultBranch
+ }
+ brs, _, err := gitRepo.GetBranches(0, 0)
+ if len(brs) > 0 {
+ return brs[0]
+ }
+ return ""
+}
diff --git a/services/repository/square.go b/services/repository/square.go
new file mode 100644
index 000000000..d68e5b189
--- /dev/null
+++ b/services/repository/square.go
@@ -0,0 +1,315 @@
+package repository
+
+import (
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "encoding/json"
+ "github.com/patrickmn/go-cache"
+ "time"
+)
+
+var repoSquareCache = cache.New(2*time.Minute, 1*time.Minute)
+
+const (
+ RREFERED_CACHE = "PreferredRepos"
+ REPO_BANNER_CACHE = "RepoBanner"
+ TOPICS_CACHE = "RepoTopics"
+ RECOMMEND_CACHE = "RecommendRepos"
+)
+
+func GetBanners() []map[string]string {
+ v, success := repoSquareCache.Get(REPO_BANNER_CACHE)
+ if success {
+ log.Debug("GetBanners from cache,value = %v", v)
+ if v == nil {
+ return nil
+ }
+ r := v.([]map[string]string)
+ return r
+ }
+ repoMap := getMapContent("repos/square_banner")
+ repoSquareCache.Set(REPO_BANNER_CACHE, repoMap, 1*time.Minute)
+ return repoMap
+}
+
+func GetTopics() []string {
+ v, success := repoSquareCache.Get(TOPICS_CACHE)
+ if success {
+ log.Debug("GetTopics from cache,value = %v", v)
+ if v == nil {
+ return nil
+ }
+ r := v.([]string)
+ return r
+ }
+ topics := getArrayContent("repos/recommend_topics")
+ repoSquareCache.Set(TOPICS_CACHE, topics, 1*time.Minute)
+ return topics
+}
+
+func getMapContent(fileName string) []map[string]string {
+ url := setting.RecommentRepoAddr + fileName
+ result, err := RecommendContentFromPromote(url)
+ remap := make([]map[string]string, 0)
+ if err == nil {
+ json.Unmarshal([]byte(result), &remap)
+ }
+ return remap
+}
+
+func getArrayContent(fileName string) []string {
+ url := setting.RecommentRepoAddr + fileName
+ result, err := RecommendContentFromPromote(url)
+ r := make([]string, 0)
+ if err == nil {
+ json.Unmarshal([]byte(result), &r)
+ }
+ return r
+}
+
+func GetRecommendRepos() []map[string]interface{} {
+ v, success := repoSquareCache.Get(RECOMMEND_CACHE)
+ if success {
+ log.Debug("GetRecommendRepos from cache,value = %v", v)
+ if v == nil {
+ return nil
+ }
+ r := v.([]map[string]interface{})
+ return r
+ }
+ repoMap := getMapContent("home/projects")
+ r, _ := GetRecommendRepoFromPromote(repoMap)
+ repoSquareCache.Set(RECOMMEND_CACHE, r, 1*time.Minute)
+ return r
+}
+
+func GetPreferredRepos() ([]*models.Repository4Card, error) {
+ v, success := repoSquareCache.Get(RREFERED_CACHE)
+ if success {
+ log.Debug("GetPreferredRepos from cache,value = %v", v)
+ if v == nil {
+ return nil, nil
+ }
+ r := v.([]*models.Repository4Card)
+ return r, nil
+ }
+
+ repos, err := models.GetSelectedRepos(models.FindSelectedReposOpts{
+ ListOptions: models.ListOptions{
+ PageSize: 10,
+ Page: 1,
+ },
+ OnlyPublic: true,
+ })
+ if err != nil {
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(repos))
+ for i, r := range repos {
+ result[i] = r.ToCardFormat()
+ }
+
+ repoSquareCache.Set(RREFERED_CACHE, result, 1*time.Minute)
+ return result, nil
+}
+
+func GetIncubationRepos() ([]*models.Repository4Card, error) {
+ org, err := models.GetOrgByName(setting.IncubationSourceOrgName)
+ if models.IsErrOrgNotExist(err) {
+ return make([]*models.Repository4Card, 0), nil
+ }
+ if err != nil {
+ return nil, err
+ }
+ repos, err := models.GetSelectedRepos(models.FindSelectedReposOpts{
+ ListOptions: models.ListOptions{
+ PageSize: 10,
+ Page: 1,
+ },
+ OrgId: org.ID,
+ OnlyPublic: true,
+ })
+ if err != nil {
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(repos))
+ for i, r := range repos {
+ result[i] = r.ToCardFormat()
+ }
+ return result, nil
+}
+
+func GetHotPaperRepos() ([]*models.Repository4Card, error) {
+ rlist, _, err := models.SearchRepository(&models.SearchRepoOptions{
+ ListOptions: models.ListOptions{
+ Page: 1,
+ PageSize: 10,
+ },
+ OrderBy: models.SearchOrderByLastMonthVisitsReverse + "," + models.SearchOrderByRecentUpdated,
+ TopicOnly: true,
+ TopicName: setting.PaperRepoTopicName,
+ AllPublic: true,
+ })
+ if err != nil {
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(rlist))
+ for i, r := range rlist {
+ result[i] = r.ToCardFormat()
+ }
+ return result, nil
+}
+
+type FindReposOptions struct {
+ models.ListOptions
+ Actor *models.User
+ Sort string
+ Keyword string
+ Topic string
+ Private bool
+ OwnerID int64
+}
+
+func FindRepos(opts FindReposOptions) (*models.FindReposResponse, error) {
+
+ var (
+ repos []*models.Repository
+ count int64
+ err error
+ orderBy models.SearchOrderBy
+ )
+
+ switch opts.Sort {
+ //1.近期热门:按最近1个月浏览量倒序排序,最近1个月浏览量>最近更新>项目名称升序
+ case "mostpopular":
+ orderBy = models.SearchOrderByLastMonthVisitsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //2.近期活跃:按提交增长量(最近4个月commit数)倒序排序,提交增长量>最近更新>项目名称升序。
+ case "mostactive":
+ orderBy = models.SearchOrderByLastFourMonthCommitsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //3.最近更新:按最近更新>项目名称升序排序。
+ case "recentupdate":
+ orderBy = models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //4.最近创建:按项目创建时间排序,最近的排前面。最近创建>项目名称升序。
+ case "newest":
+ orderBy = models.SearchOrderByNewest + "," + models.SearchOrderByAlphabetically
+ //5.点赞最多:按点赞数倒序排序。点赞数>最近更新>项目名称升序。
+ case "moststars":
+ orderBy = models.SearchOrderByStarsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //6.派生最多:按派生数倒序排序。派生数>最近更新>项目名称升序。
+ case "mostforks":
+ orderBy = models.SearchOrderByForksReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //7.数据集最多:按项目包含的数据集文件数量倒序排序,数据集文件数>最近更新>项目名称升序。
+ case "mostdatasets":
+ orderBy = models.SearchOrderByDatasetCntReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //8.AI任务最多:按项目包含的AI任务数量倒序排序,AI任务数>最近更新>项目名称升序。
+ case "mostaitasks":
+ orderBy = models.SearchOrderByAiTaskCntReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //9.模型最多:按项目包含的模型数量倒序排序,模型大小为0则不统计。模型数>最近更新>项目名称升序。
+ case "mostmodels":
+ orderBy = models.SearchOrderByModelCntReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+
+ default:
+ orderBy = models.SearchOrderByLastMonthVisitsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ }
+
+ repos, count, err = models.SearchRepository(&models.SearchRepoOptions{
+ ListOptions: opts.ListOptions,
+ Actor: opts.Actor,
+ OrderBy: orderBy,
+ Private: opts.Private,
+ Keyword: opts.Keyword,
+ OwnerID: opts.OwnerID,
+ AllPublic: true,
+ AllLimited: true,
+ TopicName: opts.Topic,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ })
+ if err != nil {
+ log.Error("FindRepos error when SearchRepository.%v", err)
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(repos))
+ for i, r := range repos {
+ t := r.ToCardFormat()
+ contributors, _ := GetRepoTopNContributors(r, 6)
+ t.Contributors = contributors
+ result[i] = t
+ }
+
+ return &models.FindReposResponse{
+ Repos: result,
+ Total: count,
+ Page: opts.Page,
+ PageSize: opts.PageSize,
+ }, nil
+}
+
+type ActiveUser struct {
+ User *models.User4Front
+ Followed bool
+ ShowButton bool
+}
+
+func GetActiveUser4Square(currentUserId int64) ([]*ActiveUser, error) {
+ result := make([]*ActiveUser, 0)
+ userIds, err := models.QueryLast30DaysHighestIndexUsers(5)
+ if err != nil {
+ log.Error("ActiveUser err. %v", err)
+ return result, err
+ }
+ if len(userIds) == 0 {
+ return result, nil
+ }
+
+ users, err := models.GetUsersByIDs(userIds)
+ if err != nil {
+ return result, nil
+ }
+ usersMap := make(map[int64]*models.User)
+ for _, v := range users {
+ usersMap[v.ID] = v
+ }
+
+ for i := 0; i < len(userIds); i++ {
+ userId := userIds[i]
+ user := usersMap[userId]
+ if user == nil {
+ continue
+ }
+ isFollowed := false
+ if currentUserId != 0 {
+ isFollowed = models.IsFollowing(currentUserId, userId)
+ }
+ a := &ActiveUser{
+ Followed: isFollowed,
+ User: user.ToFrontFormat(),
+ ShowButton: currentUserId != userId,
+ }
+ result = append(result, a)
+ }
+ return result, nil
+}
+
+func GetActiveOrgs() ([]*models.User4Front, error) {
+ orgScores, err := models.FindTopNOpenIOrgs(5)
+ if err != nil {
+ return nil, err
+ }
+ orgs := make([]*models.User4Front, len(orgScores))
+ for i, v := range orgScores {
+ orgs[i] = v.ToFrontFormat()
+ }
+ return orgs, nil
+}
+
+func RefreshRepoStatData() {
+ repos, err := models.GetAllRepositories()
+ if err != nil {
+ log.Error("RefreshRepoStatData GetAllRepositories failed: %v", err.Error())
+ return
+ }
+ for _, repo := range repos {
+ models.SyncStatDataToRepo(repo)
+ }
+}
diff --git a/services/socketwrap/clientManager.go b/services/socketwrap/clientManager.go
index 7470b1198..7bac92ab8 100755
--- a/services/socketwrap/clientManager.go
+++ b/services/socketwrap/clientManager.go
@@ -10,7 +10,7 @@ import (
"github.com/elliotchance/orderedmap"
)
-var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35}
+var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 39, 40}
type ClientsManager struct {
Clients *orderedmap.OrderedMap
diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl
index 94f80c0fa..f6d20216a 100755
--- a/templates/admin/cloudbrain/list.tmpl
+++ b/templates/admin/cloudbrain/list.tmpl
@@ -98,7 +98,7 @@
- {{if .Cluster}}{{.Cluster}}{{else}}--{{end}}
+ {{if .Cluster}}{{.Cluster}}{{else}}--{{end}}
- {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}
+ {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}
diff --git a/templates/base/head_navbar.tmpl b/templates/base/head_navbar.tmpl
index 5fb9c09d3..de55e6452 100755
--- a/templates/base/head_navbar.tmpl
+++ b/templates/base/head_navbar.tmpl
@@ -35,7 +35,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -48,7 +48,7 @@
{{.i18n.Tr "explore"}}
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -89,7 +89,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
- {{.i18n.Tr "home"}}
+ {{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/base/head_navbar_fluid.tmpl b/templates/base/head_navbar_fluid.tmpl
index 63291d6fb..8a4682e9d 100644
--- a/templates/base/head_navbar_fluid.tmpl
+++ b/templates/base/head_navbar_fluid.tmpl
@@ -32,7 +32,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -45,7 +45,7 @@
{{.i18n.Tr "explore"}}
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -84,7 +84,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
- {{.i18n.Tr "home"}}
+ {{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/base/head_navbar_home.tmpl b/templates/base/head_navbar_home.tmpl
index 334ef5a33..f6741b7c8 100644
--- a/templates/base/head_navbar_home.tmpl
+++ b/templates/base/head_navbar_home.tmpl
@@ -24,7 +24,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -37,7 +37,7 @@
{{.i18n.Tr "explore"}}
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -77,7 +77,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
- {{.i18n.Tr "home"}}
+ {{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/base/head_navbar_pro.tmpl b/templates/base/head_navbar_pro.tmpl
index 55a090128..9e1c1ebf7 100644
--- a/templates/base/head_navbar_pro.tmpl
+++ b/templates/base/head_navbar_pro.tmpl
@@ -34,7 +34,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -47,7 +47,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
-
{{.i18n.Tr "home"}}
+
{{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/custom/select_model.tmpl b/templates/custom/select_model.tmpl
index 81332b873..d5e0a998b 100644
--- a/templates/custom/select_model.tmpl
+++ b/templates/custom/select_model.tmpl
@@ -1,6 +1,6 @@
-
-
+
+
diff --git a/templates/custom/task_wait_count.tmpl b/templates/custom/task_wait_count.tmpl
index fb8ee71fb..997a0f1c3 100644
--- a/templates/custom/task_wait_count.tmpl
+++ b/templates/custom/task_wait_count.tmpl
@@ -1,7 +1,7 @@
- {{.i18n.Tr "repo.wait_count_start"}} {{.WaitCount}} {{.i18n.Tr "repo.wait_count_end"}}
+ {{.i18n.Tr "repo.wait_count_start"}} {{if not .WaitCount}}1{{else}}{{addOne .WaitCount}}{{end}} {{.i18n.Tr "repo.wait_count_end"}}
+
+
+{{template "base/footer" .}}
diff --git a/templates/explore/repos/square.tmpl b/templates/explore/repos/square.tmpl
new file mode 100644
index 000000000..da3698f9f
--- /dev/null
+++ b/templates/explore/repos/square.tmpl
@@ -0,0 +1,16 @@
+{{template "base/head_home" .}}
+{{ if .SquareBanners }}
+ {{ range .SquareBanners }}
+

+ {{ end }}
+{{ end }}
+
+
+
+
+{{template "base/footer" .}}
diff --git a/templates/repo/cloudbrain/benchmark/new.tmpl b/templates/repo/cloudbrain/benchmark/new.tmpl
index d1e42b54a..db7d52ba5 100755
--- a/templates/repo/cloudbrain/benchmark/new.tmpl
+++ b/templates/repo/cloudbrain/benchmark/new.tmpl
@@ -1,37 +1,9 @@
{{template "base/head" .}}
-
{{template "custom/global_mask" .}}
{{template "repo/header" .}}
+
{{if eq .NotStopTaskCount 0}}
{{template "base/alert" .}}
{{end}}
@@ -78,21 +50,12 @@
onkeydown="this.value=this.value.substring(0, 255)"
onkeyup="this.value=this.value.substring(0, 255)">{{.description}}
-
-
-
+
-
+ {{template "custom/select_model" .}}
- {{template "custom/select_dataset_train" .}}
-
+
@@ -199,20 +150,11 @@
onkeyup="this.value=this.value.substring(0, 255)">{{.description}}
-
-
+
-
-
-
{{.i18n.Tr "cloudbrain.view_sample"}}
-
-
-
-
- {{else if eq .benchmarkMode "aisafety"}}
-
{{template "base/footer" .}}
+
+
\ No newline at end of file
diff --git a/templates/repo/cloudbrain/trainjob/new.tmpl b/templates/repo/cloudbrain/trainjob/new.tmpl
index 8c6a6b3b2..804f2bde5 100755
--- a/templates/repo/cloudbrain/trainjob/new.tmpl
+++ b/templates/repo/cloudbrain/trainjob/new.tmpl
@@ -1,11 +1,5 @@
{{template "base/head" .}}
{{template "custom/global_mask" .}}
{{template "repo/header" .}}
-
+
{{if eq .NotStopTaskCount 0}}
{{template "base/alert" .}}
{{end}}
@@ -85,7 +71,7 @@
-
+