diff --git a/models/ai_model_manage.go b/models/ai_model_manage.go new file mode 100644 index 000000000..af96444ac --- /dev/null +++ b/models/ai_model_manage.go @@ -0,0 +1,203 @@ +package models + +import ( + "fmt" + + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/timeutil" + "xorm.io/builder" + "xorm.io/xorm" +) + +type AiModelManage struct { + ID string `xorm:"pk"` + Name string `xorm:"NOT NULL"` + Version string `xorm:"NOT NULL"` + VersionCount int `xorm:"NOT NULL DEFAULT 0"` + New int `xorm:"NOT NULL"` + Type int `xorm:"NOT NULL"` + Size int64 `xorm:"NOT NULL"` + Description string `xorm:"varchar(2000)"` + Label string `xorm:"varchar(1000)"` + Path string `xorm:"varchar(400) NOT NULL"` + DownloadCount int `xorm:"NOT NULL DEFAULT 0"` + Engine int64 `xorm:"NOT NULL DEFAULT 0"` + Status int `xorm:"NOT NULL DEFAULT 0"` + Accuracy string `xorm:"varchar(1000)"` + AttachmentId string `xorm:"NULL"` + RepoId int64 `xorm:"NULL"` + CodeBranch string `xorm:"varchar(400) NULL"` + CodeCommitID string `xorm:"NULL"` + UserId int64 `xorm:"NOT NULL"` + UserName string `xorm:"NULL"` + UserRelAvatarLink string `xorm:"NULL"` + TrainTaskInfo string `xorm:"text NULL"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` + IsCanOper bool +} + +type AiModelQueryOptions struct { + ListOptions + RepoID int64 // include all repos if empty + UserID int64 + ModelID string + SortType string + New int + // JobStatus CloudbrainStatus + Type int +} + +func SaveModelToDb(model *AiModelManage) error { + sess := x.NewSession() + defer sess.Close() + + re, err := sess.Insert(model) + if err != nil { + log.Info("insert error." + err.Error()) + return err + } + log.Info("success to save db.re=" + fmt.Sprint((re))) + return nil +} + +func QueryModelById(id string) (*AiModelManage, error) { + sess := x.NewSession() + defer sess.Close() + sess.Select("*").Table("ai_model_manage"). + Where("id='" + id + "'") + aiModelManageList := make([]*AiModelManage, 0) + err := sess.Find(&aiModelManageList) + if err == nil { + if len(aiModelManageList) == 1 { + return aiModelManageList[0], nil + } + } + return nil, err +} + +func DeleteModelById(id string) error { + sess := x.NewSession() + defer sess.Close() + + re, err := sess.Delete(&AiModelManage{ + ID: id, + }) + if err != nil { + return err + } + log.Info("success to delete from db.re=" + fmt.Sprint((re))) + return nil + +} + +func ModifyModelDescription(id string, description string) error { + var sess *xorm.Session + sess = x.ID(id) + defer sess.Close() + re, err := sess.Cols("description").Update(&AiModelManage{ + Description: description, + }) + if err != nil { + return err + } + log.Info("success to update description from db.re=" + fmt.Sprint((re))) + return nil +} + +func ModifyModelNewProperty(id string, new int, versioncount int) error { + var sess *xorm.Session + sess = x.ID(id) + defer sess.Close() + re, err := sess.Cols("new", "version_count").Update(&AiModelManage{ + New: new, + VersionCount: versioncount, + }) + if err != nil { + return err + } + log.Info("success to update new property from db.re=" + fmt.Sprint((re))) + return nil +} + +func ModifyModelDownloadCount(id string) error { + sess := x.NewSession() + defer sess.Close() + if _, err := sess.Exec("UPDATE `ai_model_manage` SET download_count = download_count + 1 WHERE id = ?", id); err != nil { + return err + } + + return nil +} + +func QueryModelByName(name string, repoId int64) []*AiModelManage { + sess := x.NewSession() + defer sess.Close() + sess.Select("*").Table("ai_model_manage"). + Where("name='" + name + "' and repo_id=" + fmt.Sprint(repoId)).OrderBy("version desc") + aiModelManageList := make([]*AiModelManage, 0) + sess.Find(&aiModelManageList) + return aiModelManageList +} + +func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) { + sess := x.NewSession() + defer sess.Close() + + var cond = builder.NewCond() + if opts.RepoID > 0 { + cond = cond.And( + builder.Eq{"ai_model_manage.repo_id": opts.RepoID}, + ) + } + + if opts.UserID > 0 { + cond = cond.And( + builder.Eq{"ai_model_manage.user_id": opts.UserID}, + ) + } + + if opts.New >= 0 { + cond = cond.And( + builder.Eq{"ai_model_manage.new": opts.New}, + ) + } + + if len(opts.ModelID) > 0 { + cond = cond.And( + builder.Eq{"ai_model_manage.id": opts.ModelID}, + ) + } + + if (opts.Type) >= 0 { + cond = cond.And( + builder.Eq{"ai_model_manage.type": opts.Type}, + ) + } + + count, err := sess.Where(cond).Count(new(AiModelManage)) + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + if opts.Page >= 0 && opts.PageSize > 0 { + var start int + if opts.Page == 0 { + start = 0 + } else { + start = (opts.Page - 1) * opts.PageSize + } + sess.Limit(opts.PageSize, start) + } + + sess.OrderBy("ai_model_manage.created_unix DESC") + aiModelManages := make([]*AiModelManage, 0, setting.UI.IssuePagingNum) + if err := sess.Table("ai_model_manage").Where(cond). + Find(&aiModelManages); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + sess.Close() + + return aiModelManages, count, nil +} diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 8ecacb4fe..6e8ee1505 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -929,6 +929,48 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { return cloudbrains, count, nil } +func QueryModelTrainJobVersionList(jobId string) ([]*CloudbrainInfo, int, error) { + sess := x.NewSession() + defer sess.Close() + + var cond = builder.NewCond() + + cond = cond.And( + builder.Eq{"cloudbrain.job_id": jobId}, + ) + cond = cond.And( + builder.Eq{"cloudbrain.Status": "COMPLETED"}, + ) + + sess.OrderBy("cloudbrain.created_unix DESC") + cloudbrains := make([]*CloudbrainInfo, 0) + if err := sess.Table(&Cloudbrain{}).Where(cond). + Find(&cloudbrains); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + + return cloudbrains, int(len(cloudbrains)), nil +} + +func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) { + sess := x.NewSession() + defer sess.Close() + var cond = builder.NewCond() + cond = cond.And( + builder.Eq{"repo_id": repoId}, + ) + cond = cond.And( + builder.Eq{"Status": "COMPLETED"}, + ) + sess.OrderBy("job_id DESC") + cloudbrains := make([]*CloudbrainInfo, 0) + if err := sess.Distinct("job_id,job_name").Table(&Cloudbrain{}).Where(cond). + Find(&cloudbrains); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + return cloudbrains, int(len(cloudbrains)), nil +} + func CloudbrainsVersionList(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int, error) { sess := x.NewSession() defer sess.Close() diff --git a/models/models.go b/models/models.go index 809f9b0ca..e8a71bbd8 100755 --- a/models/models.go +++ b/models/models.go @@ -133,6 +133,7 @@ func init() { new(FileChunk), new(BlockChain), new(RecommendOrg), + new(AiModelManage), ) tablesStatistic = append(tablesStatistic, diff --git a/models/repo.go b/models/repo.go index 6d73aa28a..7948346c5 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1114,6 +1114,12 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository) (err error Type: tp, Config: &BlockChainConfig{EnableBlockChain: true}, }) + } else if tp == UnitTypeModelManage { + units = append(units, RepoUnit{ + RepoID: repo.ID, + Type: tp, + Config: &ModelManageConfig{EnableModelManage: true}, + }) } else { units = append(units, RepoUnit{ RepoID: repo.ID, diff --git a/models/repo_unit.go b/models/repo_unit.go index 518c4b979..5f118029f 100755 --- a/models/repo_unit.go +++ b/models/repo_unit.go @@ -131,6 +131,20 @@ type CloudBrainConfig struct { EnableCloudBrain bool } +type ModelManageConfig struct { + EnableModelManage bool +} + +// FromDB fills up a CloudBrainConfig from serialized format. +func (cfg *ModelManageConfig) FromDB(bs []byte) error { + return json.Unmarshal(bs, &cfg) +} + +// ToDB exports a CloudBrainConfig to a serialized format. +func (cfg *ModelManageConfig) ToDB() ([]byte, error) { + return json.Marshal(cfg) +} + // FromDB fills up a CloudBrainConfig from serialized format. func (cfg *CloudBrainConfig) FromDB(bs []byte) error { return json.Unmarshal(bs, &cfg) @@ -176,6 +190,8 @@ func (r *RepoUnit) BeforeSet(colName string, val xorm.Cell) { r.Config = new(CloudBrainConfig) case UnitTypeBlockChain: r.Config = new(BlockChainConfig) + case UnitTypeModelManage: + r.Config = new(ModelManageConfig) default: panic("unrecognized repo unit type: " + com.ToStr(*val)) } diff --git a/models/unit.go b/models/unit.go index e2b73841a..381491388 100755 --- a/models/unit.go +++ b/models/unit.go @@ -27,6 +27,7 @@ const ( UnitTypeDatasets UnitType = 10 // 10 Dataset UnitTypeCloudBrain UnitType = 11 // 11 CloudBrain UnitTypeBlockChain UnitType = 12 // 12 BlockChain + UnitTypeModelManage UnitType = 13 // 13 ModelManage ) // Value returns integer value for unit type @@ -56,6 +57,8 @@ func (u UnitType) String() string { return "UnitTypeCloudBrain" case UnitTypeBlockChain: return "UnitTypeBlockChain" + case UnitTypeModelManage: + return "UnitTypeModelManage" } return fmt.Sprintf("Unknown UnitType %d", u) } @@ -80,6 +83,7 @@ var ( UnitTypeDatasets, UnitTypeCloudBrain, UnitTypeBlockChain, + UnitTypeModelManage, } // DefaultRepoUnits contains the default unit types @@ -92,6 +96,7 @@ var ( UnitTypeDatasets, UnitTypeCloudBrain, UnitTypeBlockChain, + UnitTypeModelManage, } // NotAllowedDefaultRepoUnits contains units that can't be default @@ -281,6 +286,14 @@ var ( 7, } + UnitModelManage = Unit{ + UnitTypeModelManage, + "repo.modelmanage", + "/modelmanage", + "repo.modelmanage.desc", + 8, + } + // Units contains all the units Units = map[UnitType]Unit{ UnitTypeCode: UnitCode, @@ -293,6 +306,7 @@ var ( UnitTypeDatasets: UnitDataset, UnitTypeCloudBrain: UnitCloudBrain, UnitTypeBlockChain: UnitBlockChain, + UnitTypeModelManage: UnitModelManage, } ) diff --git a/modules/auth/repo_form.go b/modules/auth/repo_form.go index 8352026fe..8061c6469 100755 --- a/modules/auth/repo_form.go +++ b/modules/auth/repo_form.go @@ -122,6 +122,7 @@ type RepoSettingForm struct { // Advanced settings EnableDataset bool EnableCloudBrain bool + EnableModelManager bool EnableWiki bool EnableExternalWiki bool ExternalWikiURL string diff --git a/modules/context/repo.go b/modules/context/repo.go index 9f8a178fc..de494c1bc 100755 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -821,5 +821,6 @@ func UnitTypes() macaron.Handler { ctx.Data["UnitTypeExternalWiki"] = models.UnitTypeExternalWiki ctx.Data["UnitTypeExternalTracker"] = models.UnitTypeExternalTracker ctx.Data["UnitTypeBlockChain"] = models.UnitTypeBlockChain + ctx.Data["UnitTypeModelManage"] = models.UnitTypeModelManage } } diff --git a/modules/storage/obs.go b/modules/storage/obs.go index a5c463bb0..367ffe1e8 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -5,6 +5,7 @@ package storage import ( + "errors" "io" "net/url" "path" @@ -140,11 +141,51 @@ func ObsMultiPartUpload(uuid string, uploadId string, partNumber int, fileName s } -func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) { +//delete all file under the dir path +func ObsRemoveObject(bucket string, path string) error { + log.Info("Bucket=" + bucket + " path=" + path) + if len(path) == 0 { + return errors.New("path canot be null.") + } + input := &obs.ListObjectsInput{} + input.Bucket = bucket + // 设置每页100个对象 + input.MaxKeys = 100 + input.Prefix = path + index := 1 + log.Info("prefix=" + input.Prefix) + for { + output, err := ObsCli.ListObjects(input) + if err == nil { + log.Info("Page:%d\n", index) + index++ + for _, val := range output.Contents { + log.Info("delete obs file:" + val.Key) + delObj := &obs.DeleteObjectInput{} + delObj.Bucket = setting.Bucket + delObj.Key = val.Key + ObsCli.DeleteObject(delObj) + } + if output.IsTruncated { + input.Marker = output.NextMarker + } else { + break + } + } else { + if obsError, ok := err.(obs.ObsError); ok { + log.Info("Code:%s\n", obsError.Code) + log.Info("Message:%s\n", obsError.Message) + } + return err + } + } + return nil +} + +func ObsDownloadAFile(bucket string, key string) (io.ReadCloser, error) { input := &obs.GetObjectInput{} - input.Bucket = setting.Bucket - input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") - // input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/") + input.Bucket = bucket + input.Key = key output, err := ObsCli.GetObject(input) if err == nil { log.Info("StorageClass:%s, ETag:%s, ContentType:%s, ContentLength:%d, LastModified:%s\n", @@ -158,6 +199,11 @@ func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) { } } +func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) { + + return ObsDownloadAFile(setting.Bucket, strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")) +} + func ObsModelDownload(JobName string, fileName string) (io.ReadCloser, error) { input := &obs.GetObjectInput{} input.Bucket = setting.Bucket @@ -176,6 +222,160 @@ func ObsModelDownload(JobName string, fileName string) (io.ReadCloser, error) { } } +func ObsCopyManyFile(srcBucket string, srcPath string, destBucket string, destPath string) (int64, error) { + input := &obs.ListObjectsInput{} + input.Bucket = srcBucket + // 设置每页100个对象 + input.MaxKeys = 100 + input.Prefix = srcPath + index := 1 + length := len(srcPath) + var fileTotalSize int64 + log.Info("prefix=" + input.Prefix) + for { + output, err := ObsCli.ListObjects(input) + if err == nil { + log.Info("Page:%d\n", index) + index++ + for _, val := range output.Contents { + destKey := destPath + val.Key[length:] + obsCopyFile(srcBucket, val.Key, destBucket, destKey) + fileTotalSize += val.Size + } + if output.IsTruncated { + input.Marker = output.NextMarker + } else { + break + } + } else { + if obsError, ok := err.(obs.ObsError); ok { + log.Info("Code:%s\n", obsError.Code) + log.Info("Message:%s\n", obsError.Message) + } + return 0, err + } + } + return fileTotalSize, nil +} + +func obsCopyFile(srcBucket string, srcKeyName string, destBucket string, destKeyName string) error { + input := &obs.CopyObjectInput{} + input.Bucket = destBucket + input.Key = destKeyName + input.CopySourceBucket = srcBucket + input.CopySourceKey = srcKeyName + _, err := ObsCli.CopyObject(input) + if err == nil { + log.Info("copy success,destBuckName:%s, destkeyname:%s", destBucket, destKeyName) + } else { + log.Info("copy failed,,destBuckName:%s, destkeyname:%s", destBucket, destKeyName) + if obsError, ok := err.(obs.ObsError); ok { + log.Info(obsError.Code) + log.Info(obsError.Message) + } + return err + } + return nil +} + +func GetOneLevelAllObjectUnderDir(bucket string, prefixRootPath string, relativePath string) ([]FileInfo, error) { + input := &obs.ListObjectsInput{} + input.Bucket = bucket + input.Prefix = prefixRootPath + relativePath + if !strings.HasSuffix(input.Prefix, "/") { + input.Prefix += "/" + } + output, err := ObsCli.ListObjects(input) + fileInfos := make([]FileInfo, 0) + prefixLen := len(input.Prefix) + if err == nil { + for _, val := range output.Contents { + log.Info("val key=" + val.Key) + var isDir bool + var fileName string + if val.Key == input.Prefix { + continue + } + if strings.Contains(val.Key[prefixLen:len(val.Key)-1], "/") { + continue + } + if strings.HasSuffix(val.Key, "/") { + isDir = true + fileName = val.Key[prefixLen : len(val.Key)-1] + relativePath += val.Key[prefixLen:] + } else { + isDir = false + fileName = val.Key[prefixLen:] + } + fileInfo := FileInfo{ + ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), + FileName: fileName, + Size: val.Size, + IsDir: isDir, + ParenDir: relativePath, + } + fileInfos = append(fileInfos, fileInfo) + } + return fileInfos, err + } else { + if obsError, ok := err.(obs.ObsError); ok { + log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) + } + return nil, err + } + +} + +func GetAllObjectByBucketAndPrefix(bucket string, prefix string) ([]FileInfo, error) { + input := &obs.ListObjectsInput{} + input.Bucket = bucket + // 设置每页100个对象 + input.MaxKeys = 100 + input.Prefix = prefix + index := 1 + fileInfos := make([]FileInfo, 0) + prefixLen := len(prefix) + log.Info("prefix=" + input.Prefix) + for { + output, err := ObsCli.ListObjects(input) + if err == nil { + log.Info("Page:%d\n", index) + index++ + for _, val := range output.Contents { + var isDir bool + if prefixLen == len(val.Key) { + continue + } + if strings.HasSuffix(val.Key, "/") { + isDir = true + } else { + isDir = false + } + fileInfo := FileInfo{ + ModTime: val.LastModified.Format("2006-01-02 15:04:05"), + FileName: val.Key[prefixLen:], + Size: val.Size, + IsDir: isDir, + ParenDir: "", + } + fileInfos = append(fileInfos, fileInfo) + } + if output.IsTruncated { + input.Marker = output.NextMarker + } else { + break + } + } else { + if obsError, ok := err.(obs.ObsError); ok { + log.Info("Code:%s\n", obsError.Code) + log.Info("Message:%s\n", obsError.Message) + } + return nil, err + } + } + return fileInfos, nil +} + func GetObsListObject(jobName, parentDir, versionName string) ([]FileInfo, error) { input := &obs.ListObjectsInput{} input.Bucket = setting.Bucket @@ -258,27 +458,6 @@ func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, file return output.SignedUrl, nil } -func GetObsCreateSignedUrl(jobName, parentDir, fileName string) (string, error) { - input := &obs.CreateSignedUrlInput{} - input.Bucket = setting.Bucket - input.Key = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir, fileName), "/") - - input.Expires = 60 * 60 - input.Method = obs.HttpMethodGet - - reqParams := make(map[string]string) - fileName = url.QueryEscape(fileName) - reqParams["response-content-disposition"] = "attachment; filename=\"" + fileName + "\"" - input.QueryParams = reqParams - output, err := ObsCli.CreateSignedUrl(input) - if err != nil { - log.Error("CreateSignedUrl failed:", err.Error()) - return "", err - } - log.Info("SignedUrl:%s", output.SignedUrl) - return output.SignedUrl, nil -} - func GetObsCreateSignedUrlByBucketAndKey(bucket, key string) (string, error) { input := &obs.CreateSignedUrlInput{} input.Bucket = bucket @@ -302,7 +481,10 @@ func GetObsCreateSignedUrlByBucketAndKey(bucket, key string) (string, error) { } return output.SignedUrl, nil +} +func GetObsCreateSignedUrl(jobName, parentDir, fileName string) (string, error) { + return GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir, fileName), "/")) } func ObsGetPreSignedUrl(uuid, fileName string) (string, error) { diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 440147d58..3ab3a00c9 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -816,6 +816,11 @@ get_repo_info_error=Can not get the information of the repository. generate_statistic_file_error=Fail to generate file. repo_stat_inspect=ProjectAnalysis all=All +modelarts.status=Status +modelarts.createtime=CreateTime +modelarts.version_nums = Version Nums +modelarts.version = Version +modelarts.computing_resources=compute Resources modelarts.notebook=Debug Task modelarts.train_job=Train Task modelarts.train_job.new_debug= New Debug Task @@ -823,6 +828,10 @@ modelarts.train_job.new_train=New Train Task modelarts.train_job.config=Configuration information modelarts.train_job.new=New train Task modelarts.train_job.new_place=The description should not exceed 256 characters +modelarts.model_name=Model Name +modelarts.model_size=Model Size +modelarts.import_model=Import Model + modelarts.modify=Modify modelarts.current_version=Current version modelarts.parent_version=Parent Version @@ -874,6 +883,20 @@ modelarts.train_job_para_admin=train_job_para_admin modelarts.train_job_para.edit=train_job_para.edit modelarts.train_job_para.connfirm=train_job_para.connfirm +model.manage.import_new_model=Import New Model +model.manage.create_error=Equal Name and Version has existed. +model.manage.model_name = Model Name +model.manage.version = Version +model.manage.label = Label +model.manage.size = Size +model.manage.create_time = Create Time +model.manage.Description = Description +model.manage.Accuracy = Accuracy +model.manage.F1 = F1 +model.manage.Precision = Precision +model.manage.Recall = Recall + + template.items = Template Items template.git_content = Git Content (Default Branch) template.git_hooks = Git Hooks @@ -1552,6 +1575,7 @@ settings.external_wiki_url_error = The external wiki URL is not a valid URL. settings.external_wiki_url_desc = Visitors are redirected to the external wiki URL when clicking the wiki tab. settings.dataset_desc = Enable Repository Dataset settings.cloudbrain_desc = Enable Cloudbarin +settings.model_desc = Enable Model Manage settings.issues_desc = Enable Repository Issue Tracker settings.use_internal_issue_tracker = Use Built-In Issue Tracker settings.use_external_issue_tracker = Use External Issue Tracker diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 54e362a0a..8bd04459a 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -782,6 +782,9 @@ datasets=数据集 datasets.desc=数据集功能 cloudbrain_helper=使用GPU/NPU资源,开启Notebook、模型训练任务等 +model_manager = 模型管理 +model_noright=无权限操作 + debug=调试 stop=停止 delete=删除 @@ -824,6 +827,7 @@ all=所有 modelarts.status=状态 modelarts.createtime=创建时间 modelarts.version_nums=版本数 +modelarts.version=版本 modelarts.computing_resources=计算资源 modelarts.notebook=调试任务 modelarts.train_job=训练任务 @@ -831,6 +835,10 @@ modelarts.train_job.new_debug=新建调试任务 modelarts.train_job.new_train=新建训练任务 modelarts.train_job.config=配置信息 modelarts.train_job.new=新建训练任务 +modelarts.train_job.new_place=描述字数不超过256个字符 +modelarts.model_name=模型名称 +modelarts.model_size=模型大小 +modelarts.import_model=导入模型 modelarts.train_job.new_place=描述字数不超过255个字符 modelarts.modify=修改 modelarts.current_version=当前版本 @@ -887,6 +895,18 @@ modelarts.train_job_para_admin=任务参数管理 modelarts.train_job_para.edit=编辑 modelarts.train_job_para.connfirm=确定 +model.manage.import_new_model=导入新模型 +model.manage.create_error=相同的名称和版本的模型已经存在。 +model.manage.model_name = 模型名称 +model.manage.version = 版本 +model.manage.label = 标签 +model.manage.size = 大小 +model.manage.create_time = 创建时间 +model.manage.description = 描述 +model.manage.Accuracy = 准确率 +model.manage.F1 = F1值 +model.manage.Precision = 精确率 +model.manage.Recall = 召回率 template.items=模板选项 template.git_content=Git数据(默认分支) @@ -1566,6 +1586,7 @@ settings.external_wiki_url_error=外部百科链接无效 settings.external_wiki_url_desc=当点击任务标签时,访问者将被重定向到外部任务系统的URL。 settings.dataset_desc=启用数据集 settings.cloudbrain_desc = 启用云脑 +settings.model_desc = 启用模型管理 settings.issues_desc=启用任务系统 settings.use_internal_issue_tracker=使用内置的轻量级任务管理系统 settings.use_external_issue_tracker=使用外部的任务管理系统 diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go new file mode 100644 index 000000000..c6ec6c6ae --- /dev/null +++ b/routers/repo/ai_model_manage.go @@ -0,0 +1,513 @@ +package repo + +import ( + "archive/zip" + "encoding/json" + "errors" + "fmt" + "net/http" + "path" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" + uuid "github.com/satori/go.uuid" +) + +const ( + Model_prefix = "aimodels/" + tplModelManageIndex = "repo/modelmanage/index" + tplModelManageDownload = "repo/modelmanage/download" + tplModelInfo = "repo/modelmanage/showinfo" + MODEL_LATEST = 1 + MODEL_NOT_LATEST = 0 +) + +func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, ctx *context.Context) error { + aiTask, err := models.GetCloudbrainByJobIDAndVersionName(jobId, versionName) + //aiTask, err := models.GetCloudbrainByJobID(jobId) + if err != nil { + log.Info("query task error." + err.Error()) + return err + } + + uuid := uuid.NewV4() + id := uuid.String() + modelPath := id + var lastNewModelId string + var modelSize int64 + cloudType := models.TypeCloudBrainTwo + + log.Info("find task name:" + aiTask.JobName) + aimodels := models.QueryModelByName(name, aiTask.RepoID) + if len(aimodels) > 0 { + for _, model := range aimodels { + if model.Version == version { + return errors.New(ctx.Tr("repo.model.manage.create_error")) + } + if model.New == MODEL_LATEST { + lastNewModelId = model.ID + } + } + } + cloudType = aiTask.Type + //download model zip //train type + if cloudType == models.TypeCloudBrainTwo { + modelPath, modelSize, err = downloadModelFromCloudBrainTwo(id, aiTask.JobName, "") + if err != nil { + log.Info("download model from CloudBrainTwo faild." + err.Error()) + return err + } + } + accuracy := make(map[string]string) + accuracy["F1"] = "" + accuracy["Recall"] = "" + accuracy["Accuracy"] = "" + accuracy["Precision"] = "" + accuracyJson, _ := json.Marshal(accuracy) + log.Info("accuracyJson=" + string(accuracyJson)) + aiTaskJson, _ := json.Marshal(aiTask) + + //taskConfigInfo,err := models.GetCloudbrainByJobIDAndVersionName(jobId,aiTask.VersionName) + model := &models.AiModelManage{ + ID: id, + Version: version, + VersionCount: len(aimodels) + 1, + Label: label, + Name: name, + Description: description, + New: MODEL_LATEST, + Type: cloudType, + Path: modelPath, + Size: modelSize, + AttachmentId: aiTask.Uuid, + RepoId: aiTask.RepoID, + UserId: ctx.User.ID, + UserName: ctx.User.Name, + UserRelAvatarLink: ctx.User.RelAvatarLink(), + CodeBranch: aiTask.BranchName, + CodeCommitID: aiTask.CommitID, + Engine: aiTask.EngineID, + TrainTaskInfo: string(aiTaskJson), + Accuracy: string(accuracyJson), + } + + err = models.SaveModelToDb(model) + if err != nil { + return err + } + if len(lastNewModelId) > 0 { + //udpate status and version count + models.ModifyModelNewProperty(lastNewModelId, MODEL_NOT_LATEST, 0) + } + + log.Info("save model end.") + + return nil +} + +func SaveModel(ctx *context.Context) { + log.Info("save model start.") + JobId := ctx.Query("JobId") + VersionName := ctx.Query("VersionName") + name := ctx.Query("Name") + version := ctx.Query("Version") + label := ctx.Query("Label") + description := ctx.Query("Description") + + if !ctx.Repo.CanWrite(models.UnitTypeModelManage) { + ctx.ServerError("No right.", errors.New(ctx.Tr("repo.model_noright"))) + return + } + + if JobId == "" || VersionName == "" { + ctx.Error(500, fmt.Sprintf("JobId or VersionName is null.")) + return + } + + if name == "" || version == "" { + ctx.Error(500, fmt.Sprintf("name or version is null.")) + return + } + + err := saveModelByParameters(JobId, VersionName, name, version, label, description, ctx) + + if err != nil { + log.Info("save model error." + err.Error()) + ctx.Error(500, fmt.Sprintf("save model error. %v", err)) + return + } + + log.Info("save model end.") +} + +func downloadModelFromCloudBrainTwo(modelUUID string, jobName string, parentDir string) (string, int64, error) { + + objectkey := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir), "/") + modelDbResult, err := storage.GetOneLevelAllObjectUnderDir(setting.Bucket, objectkey, "") + log.Info("bucket=" + setting.Bucket + " objectkey=" + objectkey) + if err != nil { + log.Info("get TrainJobListModel failed:", err) + return "", 0, err + } + if len(modelDbResult) == 0 { + return "", 0, errors.New("cannot create model, as model is empty.") + } + + prefix := objectkey + "/" + destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/" + + size, err := storage.ObsCopyManyFile(setting.Bucket, prefix, setting.Bucket, destKeyNamePrefix) + + dataActualPath := setting.Bucket + "/" + destKeyNamePrefix + return dataActualPath, size, nil +} + +func DeleteModel(ctx *context.Context) { + log.Info("delete model start.") + id := ctx.Query("ID") + err := deleteModelByID(ctx, id) + if err != nil { + ctx.JSON(500, err.Error()) + } else { + ctx.JSON(200, map[string]string{ + "result_code": "0", + }) + } +} +func isCanDeleteOrDownload(ctx *context.Context, model *models.AiModelManage) bool { + if ctx.User.IsAdmin || ctx.User.ID == model.UserId { + return true + } + if ctx.Repo.IsOwner() { + return true + } + return false +} + +func deleteModelByID(ctx *context.Context, id string) error { + log.Info("delete model start. id=" + id) + model, err := models.QueryModelById(id) + if !isCanDeleteOrDownload(ctx, model) { + return errors.New(ctx.Tr("repo.model_noright")) + } + if err == nil { + log.Info("bucket=" + setting.Bucket + " path=" + model.Path) + if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) { + err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:]) + if err != nil { + log.Info("Failed to delete model. id=" + id) + return err + } + } + err = models.DeleteModelById(id) + if err == nil { //find a model to change new + aimodels := models.QueryModelByName(model.Name, model.RepoId) + if model.New == MODEL_LATEST { + if len(aimodels) > 0 { + //udpate status and version count + models.ModifyModelNewProperty(aimodels[0].ID, MODEL_LATEST, len(aimodels)) + } + } else { + for _, tmpModel := range aimodels { + if tmpModel.New == MODEL_LATEST { + models.ModifyModelNewProperty(tmpModel.ID, MODEL_LATEST, len(aimodels)) + break + } + } + } + } + } + return err +} + +func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, int64, error) { + + return models.QueryModel(&models.AiModelQueryOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.IssuePagingNum, + }, + RepoID: repoId, + Type: -1, + New: MODEL_LATEST, + }) +} + +func DownloadMultiModelFile(ctx *context.Context) { + log.Info("DownloadMultiModelFile start.") + id := ctx.Query("ID") + log.Info("id=" + id) + task, err := models.QueryModelById(id) + if err != nil { + log.Error("no such model!", err.Error()) + ctx.ServerError("no such model:", err) + return + } + if !isCanDeleteOrDownload(ctx, task) { + ctx.ServerError("no right.", errors.New(ctx.Tr("repo.model_noright"))) + return + } + + path := Model_prefix + models.AttachmentRelativePath(id) + "/" + + allFile, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, path) + if err == nil { + //count++ + models.ModifyModelDownloadCount(id) + + returnFileName := task.Name + "_" + task.Version + ".zip" + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+returnFileName) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + w := zip.NewWriter(ctx.Resp) + defer w.Close() + for _, oneFile := range allFile { + if oneFile.IsDir { + log.Info("zip dir name:" + oneFile.FileName) + } else { + log.Info("zip file name:" + oneFile.FileName) + fDest, err := w.Create(oneFile.FileName) + if err != nil { + log.Info("create zip entry error, download file failed: %s\n", err.Error()) + ctx.ServerError("download file failed:", err) + return + } + body, err := storage.ObsDownloadAFile(setting.Bucket, path+oneFile.FileName) + if err != nil { + log.Info("download file failed: %s\n", err.Error()) + ctx.ServerError("download file failed:", err) + return + } else { + defer body.Close() + p := make([]byte, 1024) + var readErr error + var readCount int + // 读取对象内容 + for { + readCount, readErr = body.Read(p) + if readCount > 0 { + fDest.Write(p[:readCount]) + } + if readErr != nil { + break + } + } + } + } + } + } else { + log.Info("error,msg=" + err.Error()) + ctx.ServerError("no file to download.", err) + } +} + +func QueryTrainJobVersionList(ctx *context.Context) { + log.Info("query train job version list. start.") + JobID := ctx.Query("JobID") + + VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID) + + log.Info("query return count=" + fmt.Sprint(count)) + + if err != nil { + ctx.ServerError("QueryTrainJobList:", err) + } else { + ctx.JSON(200, VersionListTasks) + } +} + +func QueryTrainJobList(ctx *context.Context) { + log.Info("query train job list. start.") + repoId := ctx.QueryInt64("repoId") + + VersionListTasks, count, err := models.QueryModelTrainJobList(repoId) + log.Info("query return count=" + fmt.Sprint(count)) + + if err != nil { + ctx.ServerError("QueryTrainJobList:", err) + } else { + ctx.JSON(200, VersionListTasks) + } + +} + +func DownloadSingleModelFile(ctx *context.Context) { + log.Info("DownloadSingleModelFile start.") + id := ctx.Params(":ID") + parentDir := ctx.Query("parentDir") + fileName := ctx.Query("fileName") + path := Model_prefix + models.AttachmentRelativePath(id) + "/" + parentDir + fileName + + if setting.PROXYURL != "" { + body, err := storage.ObsDownloadAFile(setting.Bucket, path) + if err != nil { + log.Info("download error.") + } else { + //count++ + models.ModifyModelDownloadCount(id) + defer body.Close() + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + p := make([]byte, 1024) + var readErr error + var readCount int + // 读取对象内容 + for { + readCount, readErr = body.Read(p) + if readCount > 0 { + ctx.Resp.Write(p[:readCount]) + //fmt.Printf("%s", p[:readCount]) + } + if readErr != nil { + break + } + } + } + } else { + url, err := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, path) + if err != nil { + log.Error("GetObsCreateSignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) + ctx.ServerError("GetObsCreateSignedUrl", err) + return + } + //count++ + models.ModifyModelDownloadCount(id) + http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) + } +} + +func ShowModelInfo(ctx *context.Context) { + ctx.Data["ID"] = ctx.Query("ID") + ctx.Data["name"] = ctx.Query("name") + ctx.Data["isModelManage"] = true + ctx.HTML(200, tplModelInfo) +} + +func ShowSingleModel(ctx *context.Context) { + name := ctx.Query("name") + + log.Info("Show single ModelInfo start.name=" + name) + models := models.QueryModelByName(name, ctx.Repo.Repository.ID) + + ctx.JSON(http.StatusOK, models) +} + +func ShowOneVersionOtherModel(ctx *context.Context) { + repoId := ctx.Repo.Repository.ID + name := ctx.Query("name") + aimodels := models.QueryModelByName(name, repoId) + for _, model := range aimodels { + log.Info("model=" + model.Name) + log.Info("model.UserId=" + fmt.Sprint(model.UserId)) + model.IsCanOper = isOper(ctx, model.UserId) + } + if len(aimodels) > 0 { + ctx.JSON(200, aimodels[1:]) + } else { + ctx.JSON(200, aimodels) + } +} + +func ShowModelTemplate(ctx *context.Context) { + ctx.Data["isModelManage"] = true + ctx.HTML(200, tplModelManageIndex) +} + +func isQueryRight(ctx *context.Context) bool { + if ctx.Repo.Repository.IsPrivate { + if ctx.Repo.CanRead(models.UnitTypeModelManage) || ctx.User.IsAdmin || ctx.Repo.IsAdmin() || ctx.Repo.IsOwner() { + return true + } + return false + } else { + return true + } +} + +func isOper(ctx *context.Context, modelUserId int64) bool { + if ctx.User == nil { + return false + } + if ctx.User.IsAdmin || ctx.Repo.IsAdmin() || ctx.Repo.IsOwner() || ctx.User.ID == modelUserId { + return true + } + return false +} + +func ShowModelPageInfo(ctx *context.Context) { + log.Info("ShowModelInfo start.") + if !isQueryRight(ctx) { + ctx.ServerError("no right.", errors.New(ctx.Tr("repo.model_noright"))) + return + } + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + repoId := ctx.Repo.Repository.ID + Type := -1 + modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.IssuePagingNum, + }, + RepoID: repoId, + Type: Type, + New: MODEL_LATEST, + }) + if err != nil { + ctx.ServerError("Cloudbrain", err) + return + } + + for _, model := range modelResult { + log.Info("model=" + model.Name) + log.Info("model.UserId=" + fmt.Sprint(model.UserId)) + model.IsCanOper = isOper(ctx, model.UserId) + } + + mapInterface := make(map[string]interface{}) + mapInterface["data"] = modelResult + mapInterface["count"] = count + ctx.JSON(http.StatusOK, mapInterface) +} + +func ModifyModel(id string, description string) error { + err := models.ModifyModelDescription(id, description) + if err == nil { + log.Info("modify success.") + } else { + log.Info("Failed to modify.id=" + id + " desc=" + description + " error:" + err.Error()) + } + return err +} + +func ModifyModelInfo(ctx *context.Context) { + log.Info("modify model start.") + id := ctx.Query("ID") + description := ctx.Query("Description") + + task, err := models.QueryModelById(id) + if err != nil { + log.Error("no such model!", err.Error()) + ctx.ServerError("no such model:", err) + return + } + if !isCanDeleteOrDownload(ctx, task) { + ctx.ServerError("no right.", errors.New(ctx.Tr("repo.model_noright"))) + return + } + + err = ModifyModel(id, description) + + if err != nil { + log.Info("modify error," + err.Error()) + ctx.ServerError("error.", err) + } else { + ctx.JSON(200, "success") + } + +} diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 7b61e01e2..293481ddf 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -552,10 +552,10 @@ func CloudBrainShowModels(ctx *context.Context) { } //get dirs - dirs, err := getModelDirs(task.JobName, parentDir) + dirs, err := GetModelDirs(task.JobName, parentDir) if err != nil { - log.Error("getModelDirs failed:%v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("getModelDirs failed:", err) + log.Error("GetModelDirs failed:%v", err.Error(), ctx.Data["msgID"]) + ctx.ServerError("GetModelDirs failed:", err) return } @@ -615,7 +615,7 @@ func getImages(ctx *context.Context, imageType string) { log.Info("Get images end") } -func getModelDirs(jobName string, parentDir string) (string, error) { +func GetModelDirs(jobName string, parentDir string) (string, error) { var req string modelActualPath := setting.JobPath + jobName + "/model/" if parentDir == "" { diff --git a/routers/repo/dir.go b/routers/repo/dir.go index 406f3dc73..81549e76a 100755 --- a/routers/repo/dir.go +++ b/routers/repo/dir.go @@ -12,7 +12,6 @@ import ( "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/obs" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" ) @@ -70,40 +69,10 @@ func DeleteAllUnzipFile(attachment *models.Attachment, parentDir string) { } } if attachment.Type == models.TypeCloudBrainTwo { - - input := &obs.ListObjectsInput{} - input.Bucket = setting.Bucket - // 设置每页100个对象 - input.MaxKeys = 100 - input.Prefix = setting.BasePath + attachment.RelativePath() + attachment.UUID - index := 1 - log.Info("prefix=" + input.Prefix) - for { - output, err := storage.ObsCli.ListObjects(input) - if err == nil { - log.Info("Page:%d\n", index) - index++ - for _, val := range output.Contents { - log.Info("delete obs file:" + val.Key) - delObj := &obs.DeleteObjectInput{} - delObj.Bucket = setting.Bucket - delObj.Key = val.Key - storage.ObsCli.DeleteObject(delObj) - } - if output.IsTruncated { - input.Marker = output.NextMarker - } else { - break - } - } else { - if obsError, ok := err.(obs.ObsError); ok { - log.Info("Code:%s\n", obsError.Code) - log.Info("Message:%s\n", obsError.Message) - } - break - } + err := storage.ObsRemoveObject(setting.Bucket, setting.BasePath+attachment.RelativePath()+attachment.UUID) + if err != nil { + log.Info("delete file error.") } - } } diff --git a/routers/repo/http.go b/routers/repo/http.go index ad2abf567..87406a2c3 100644 --- a/routers/repo/http.go +++ b/routers/repo/http.go @@ -257,7 +257,6 @@ func HTTP(ctx *context.Context) { models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), models.EnvIsDeployKey + "=false", } - if !authUser.KeepEmailPrivate { environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) } @@ -559,6 +558,7 @@ func serviceRPC(h serviceHandler, service string) { if service == "receive-pack" { cmd.Env = append(os.Environ(), h.environ...) } + cmd.Stdout = h.w cmd.Stdin = reqBody cmd.Stderr = &stderr diff --git a/routers/repo/setting.go b/routers/repo/setting.go index bf11f9e5a..f7da8f4a8 100644 --- a/routers/repo/setting.go +++ b/routers/repo/setting.go @@ -239,6 +239,18 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) { deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeCloudBrain) } + if form.EnableModelManager && !models.UnitTypeModelManage.UnitGlobalDisabled() { + units = append(units, models.RepoUnit{ + RepoID: repo.ID, + Type: models.UnitTypeModelManage, + Config: &models.ModelManageConfig{ + EnableModelManage: form.EnableModelManager, + }, + }) + } else if !models.UnitTypeModelManage.UnitGlobalDisabled() { + deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeModelManage) + } + if form.EnableWiki && form.EnableExternalWiki && !models.UnitTypeExternalWiki.UnitGlobalDisabled() { if !validation.IsValidExternalURL(form.ExternalWikiURL) { ctx.Flash.Error(ctx.Tr("repo.settings.external_wiki_url_error")) diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 9552b3139..fdb7fc2a6 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -614,6 +614,8 @@ func RegisterRoutes(m *macaron.Macaron) { reqRepoDatasetWriter := context.RequireRepoWriter(models.UnitTypeDatasets) reqRepoCloudBrainReader := context.RequireRepoReader(models.UnitTypeCloudBrain) reqRepoCloudBrainWriter := context.RequireRepoWriter(models.UnitTypeCloudBrain) + reqRepoModelManageReader := context.RequireRepoReader(models.UnitTypeModelManage) + reqRepoModelManageWriter := context.RequireRepoWriter(models.UnitTypeModelManage) //reqRepoBlockChainReader := context.RequireRepoReader(models.UnitTypeBlockChain) //reqRepoBlockChainWriter := context.RequireRepoWriter(models.UnitTypeBlockChain) @@ -970,6 +972,23 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/create", reqRepoCloudBrainWriter, repo.CloudBrainNew) m.Post("/create", reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate) }, context.RepoRef()) + m.Group("/modelmanage", func() { + m.Post("/create_model", reqRepoModelManageWriter, repo.SaveModel) + m.Delete("/delete_model", repo.DeleteModel) + m.Put("/modify_model", repo.ModifyModelInfo) + m.Get("/show_model", reqRepoModelManageReader, repo.ShowModelTemplate) + m.Get("/show_model_info", repo.ShowModelInfo) + m.Get("/show_model_info_api", repo.ShowSingleModel) + m.Get("/show_model_api", repo.ShowModelPageInfo) + m.Get("/show_model_child_api", repo.ShowOneVersionOtherModel) + m.Get("/query_train_job", reqRepoCloudBrainReader, repo.QueryTrainJobList) + m.Get("/query_train_job_version", reqRepoCloudBrainReader, repo.QueryTrainJobVersionList) + m.Group("/:ID", func() { + m.Get("", repo.ShowSingleModel) + m.Get("/downloadsingle", repo.DownloadSingleModelFile) + }) + m.Get("/downloadall", repo.DownloadMultiModelFile) + }, context.RepoRef()) m.Group("/debugjob", func() { m.Get("", reqRepoCloudBrainReader, repo.DebugJobIndex) diff --git a/templates/repo/debugjob/index.tmpl b/templates/repo/debugjob/index.tmpl index 9517d0c79..15b020f4a 100644 --- a/templates/repo/debugjob/index.tmpl +++ b/templates/repo/debugjob/index.tmpl @@ -368,7 +368,10 @@ {{$.i18n.Tr "repo.delete"}} +<<<<<<< HEAD +======= +>>>>>>> V20211213 {{end}} @@ -415,7 +418,10 @@ +<<<<<<< HEAD {{end}} +======= +>>>>>>> V20211213 diff --git a/templates/repo/header.tmpl b/templates/repo/header.tmpl index 0257b6080..03514ff15 100755 --- a/templates/repo/header.tmpl +++ b/templates/repo/header.tmpl @@ -137,6 +137,12 @@ {{svg "octicon-inbox" 16}} {{.i18n.Tr "datasets"}} {{end}} + {{if .Permission.CanRead $.UnitTypeModelManage}} + + + {{.i18n.Tr "repo.model_manager"}} + + {{end}} {{if .Permission.CanRead $.UnitTypeCloudBrain}} {{svg "octicon-server" 16}} {{.i18n.Tr "repo.cloudbrain"}} @@ -164,86 +170,5 @@
- - \ No newline at end of file diff --git a/templates/repo/modelarts/notebook/index.tmpl b/templates/repo/modelarts/notebook/index.tmpl index 5e6ae12ae..f3030eaeb 100755 --- a/templates/repo/modelarts/notebook/index.tmpl +++ b/templates/repo/modelarts/notebook/index.tmpl @@ -1,187 +1,6 @@ {{template "base/head" .}} - -
diff --git a/templates/repo/modelarts/notebook/new.tmpl b/templates/repo/modelarts/notebook/new.tmpl index ee629875d..5f28e1d67 100755 --- a/templates/repo/modelarts/notebook/new.tmpl +++ b/templates/repo/modelarts/notebook/new.tmpl @@ -1,89 +1,8 @@ {{template "base/head" .}}
diff --git a/templates/repo/modelarts/trainjob/index.tmpl b/templates/repo/modelarts/trainjob/index.tmpl index 80de9ae74..123f645ea 100755 --- a/templates/repo/modelarts/trainjob/index.tmpl +++ b/templates/repo/modelarts/trainjob/index.tmpl @@ -2,184 +2,6 @@ {{template "base/head" .}} diff --git a/templates/repo/modelarts/trainjob/version_new.tmpl b/templates/repo/modelarts/trainjob/version_new.tmpl index 18fcd844e..3c680d2c5 100644 --- a/templates/repo/modelarts/trainjob/version_new.tmpl +++ b/templates/repo/modelarts/trainjob/version_new.tmpl @@ -49,87 +49,6 @@ text-align: center; color: #C2C7CC;" } -#mask { - position: fixed; - top: 0px; - left: 0px; - right: 0px; - bottom: 0px; - filter: alpha(opacity=60); - background-color: #777; - z-index: 1000; - display: none; - opacity: 0.8; - -moz-opacity: 0.5; - padding-top: 100px; - color: #000000 - } - /* 加载圈css效果图 */ - -#loadingPage { - margin: 200px auto; - width: 50px; - height: 40px; - text-align: center; - font-size: 10px; - display: block; -} - -#loadingPage>div { - background-color: green; - height: 100%; - width: 6px; - display: inline-block; - -webkit-animation: sk-stretchdelay 1.2s infinite ease-in-out; - animation: sk-stretchdelay 1.2s infinite ease-in-out; - } - - #loadingPage .rect2 { - -webkit-animation-delay: -1.1s; - animation-delay: -1.1s; - } - - #loadingPage .rect3 { - -webkit-animation-delay: -1.0s; - animation-delay: -1.0s; - } - - #loadingPage .rect4 { - -webkit-animation-delay: -0.9s; - animation-delay: -0.9s; - } - - #loadingPage .rect5 { - -webkit-animation-delay: -0.8s; - animation-delay: -0.8s; - } - .left2{ - margin-left: -2px; - } - - @-webkit-keyframes sk-stretchdelay { - 0%, - 40%, - 100% { - -webkit-transform: scaleY(0.4) - } - 20% { - -webkit-transform: scaleY(1.0) - } - } - - @keyframes sk-stretchdelay { - 0%, - 40%, - 100% { - transform: scaleY(0.4); - -webkit-transform: scaleY(0.4); - } - 20% { - transform: scaleY(1.0); - -webkit-transform: scaleY(1.0); - } -} +{{template "base/head" .}} + + +
+
+
+
+
+
+
+
+
+{{$repository := .Repository.ID}} + +
+ +
+ + +
+ + + + +
+ +
+
+ + + +{{template "base/footer" .}} + + + diff --git a/templates/repo/modelmanage/showinfo.tmpl b/templates/repo/modelmanage/showinfo.tmpl new file mode 100644 index 000000000..821340eb0 --- /dev/null +++ b/templates/repo/modelmanage/showinfo.tmpl @@ -0,0 +1,218 @@ +{{template "base/head" .}} +
+ {{template "repo/header" .}} + +
+

+ + + +

+
+
+
+
+{{template "base/footer" .}} + \ No newline at end of file diff --git a/templates/repo/settings/options.tmpl b/templates/repo/settings/options.tmpl index ed30e97a9..de31aa59b 100644 --- a/templates/repo/settings/options.tmpl +++ b/templates/repo/settings/options.tmpl @@ -158,7 +158,14 @@
- + {{$isModelMangeEnabled := .Repository.UnitEnabled $.UnitTypeModelManage }} +
+ +
+ + +
+
{{$isWikiEnabled := or (.Repository.UnitEnabled $.UnitTypeWiki) (.Repository.UnitEnabled $.UnitTypeExternalWiki)}}
diff --git a/web_src/js/components/Model.vue b/web_src/js/components/Model.vue new file mode 100644 index 000000000..dafcce29f --- /dev/null +++ b/web_src/js/components/Model.vue @@ -0,0 +1,427 @@ + + + + + diff --git a/web_src/js/index.js b/web_src/js/index.js index b8d969206..a1f044b4a 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -39,6 +39,7 @@ import Images from './components/Images.vue'; import EditTopics from './components/EditTopics.vue'; import DataAnalysis from './components/DataAnalysis.vue' import Contributors from './components/Contributors.vue' +import Model from './components/Model.vue'; Vue.use(ElementUI); Vue.prototype.$axios = axios; @@ -2916,11 +2917,12 @@ $(document).ready(async () => { initVueEditTopic(); initVueContributors(); initVueImages(); + initVueModel(); initVueDataAnalysis(); initTeamSettings(); initCtrlEnterSubmit(); initNavbarContentToggle(); - // initTopicbar(); + // initTopicbar();vim // closeTopicbar(); initU2FAuth(); initU2FRegister(); @@ -3647,7 +3649,7 @@ function initVueContributors() { function initVueImages() { const el = document.getElementById('images'); - console.log("el",el) + if (!el) { return; @@ -3659,6 +3661,20 @@ function initVueImages() { render: h => h(Images) }); } +function initVueModel() { + const el = document.getElementById('model_list'); + + + if (!el) { + return; + } + + new Vue({ + el: el, + + render: h => h(Model) + }); +} function initVueDataAnalysis() { const el = document.getElementById('data_analysis'); console.log("el",el) diff --git a/web_src/less/openi.less b/web_src/less/openi.less index d09d60fa6..608dffab4 100644 --- a/web_src/less/openi.less +++ b/web_src/less/openi.less @@ -368,4 +368,186 @@ display: block; color: #3F3F40; font-size: 18px; margin-bottom: 1rem; + + +.selectcloudbrain .active.item{ + color: #0087f5 !important; + border: 1px solid #0087f5; + margin: -1px; + background: #FFF !important; +} +#deletemodel { + width: 100%; + height: 100%; +} +/* 弹窗 */ + +#mask { + position: fixed; + top: 0px; + left: 0px; + right: 0px; + bottom: 0px; + filter: alpha(opacity=60); + background-color: #777; + z-index: 1000; + display: none; + opacity: 0.8; + -moz-opacity: 0.5; + padding-top: 100px; + color: #000000 +} + +#loadingPage { + margin: 200px auto; + width: 50px; + height: 40px; + text-align: center; + font-size: 10px; + display: block; +} + +#loadingPage>div { + background-color: green; + height: 100%; + width: 6px; + display: inline-block; + -webkit-animation: sk-stretchdelay 1.2s infinite ease-in-out; + animation: sk-stretchdelay 1.2s infinite ease-in-out; +} + +#loadingPage .rect2 { + -webkit-animation-delay: -1.1s; + animation-delay: -1.1s; +} + +#loadingPage .rect3 { + -webkit-animation-delay: -1.0s; + animation-delay: -1.0s; +} + +#loadingPage .rect4 { + -webkit-animation-delay: -0.9s; + animation-delay: -0.9s; +} + +#loadingPage .rect5 { + -webkit-animation-delay: -0.8s; + animation-delay: -0.8s; +} + +@-webkit-keyframes sk-stretchdelay { + 0%, + 40%, + 100% { + -webkit-transform: scaleY(0.4) + } + 20% { + -webkit-transform: scaleY(1.0) + } +} + +@keyframes sk-stretchdelay { + 0%, + 40%, + 100% { + transform: scaleY(0.4); + -webkit-transform: scaleY(0.4); + } + 20% { + transform: scaleY(1.0); + -webkit-transform: scaleY(1.0); + } +} +/* 消息框 */ + +.alert { + display: none; + position: fixed; + width: 100%; + z-index: 1001; + padding: 15px; + border: 1px solid transparent; + border-radius: 4px; + text-align: center; + font-weight: bold; +} + +.alert-success { + color: #3c763d; + background-color: #dff0d8; + border-color: #d6e9c6; +} + +.alert-info { + color: #31708f; + background-color: #d9edf7; + border-color: #bce8f1; +} + +.alert-warning { + color: #8a6d3b; + background-color: #fcf8e3; + border-color: #faebcc; +} + +.alert-danger { + color: #a94442; + background-color: #f2dede; + border-color: #ebccd1; +} + +.pusher { + width: calc(100% - 260px); + box-sizing: border-box; +} +/* 弹窗 (background) */ + +#imageModal { + display: none; + position: fixed; + z-index: 1; + left: 0; + top: 0; + width: 100%; + height: 100%; + overflow: auto; + background-color: rgb(0, 0, 0); + background-color: rgba(0, 0, 0, 0.4); +} +/* 弹窗内容 */ + +.modal-content { + background-color: #fefefe; + margin: 15% auto; + padding: 20px; + border: 1px solid #888; + width: 30%; +} +/* 关闭按钮 */ + +.close { + color: #aaa; + float: right; + font-size: 28px; + font-weight: bold; +} + +.close:hover, +.close:focus { + color: black; + text-decoration: none; + cursor: pointer; +} + +.dis { + margin-bottom: 20px; +} + +.disabled { + cursor: pointer; + pointer-events: none; +} +.letf2{ + margin-left: -2px; } \ No newline at end of file