Browse Source

#3169

update
tags/v1.22.12.1^2
chenyifan01 3 years ago
parent
commit
8e7aff452a
5 changed files with 170 additions and 11 deletions
  1. +22
    -1
      models/cloudbrain.go
  2. +105
    -0
      models/repo.go
  3. +19
    -4
      routers/repo/ai_model_manage.go
  4. +3
    -0
      routers/repo/attachment.go
  5. +21
    -6
      services/repository/repository.go

+ 22
- 1
models/cloudbrain.go View File

@@ -1861,6 +1861,7 @@ func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) {
session.Commit()

go IncreaseDatasetUseCount(cloudbrain.Uuid)
go ResetRepoAITaskNum(cloudbrain.RepoID)
return nil
}

@@ -2010,9 +2011,29 @@ func DeleteJob(job *Cloudbrain) error {

func deleteJob(e Engine, job *Cloudbrain) error {
_, err := e.ID(job.ID).Delete(job)
if err == nil {
go updateAITaskNumWhenDeleteJob(job)
}
return err
}

func updateAITaskNumWhenDeleteJob(job *Cloudbrain) {
repoId := job.RepoID
if repoId == 0 {
t := &Cloudbrain{}
_, tempErr := x.ID(job.ID).Unscoped().Get(t)
if tempErr != nil {
log.Error("updateAITaskNumWhenDeleteJob error.%v", tempErr)
return
}
repoId = t.RepoID
}

if repoId > 0 {
ResetRepoAITaskNum(repoId)
}
}

func GetCloudbrainByName(jobName string) (*Cloudbrain, error) {
cb := &Cloudbrain{JobName: jobName}
return getRepoCloudBrain(cb)
@@ -2138,7 +2159,7 @@ func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) {
}

go IncreaseDatasetUseCount(new.Uuid)
go ResetRepoAITaskNum(new.RepoID)
return nil
}
func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) {


+ 105
- 0
models/repo.go View File

@@ -2429,6 +2429,75 @@ func CheckRepoStats(ctx context.Context) error {
}
}
// ***** END: Repository.NumForks *****

// ***** START: Repository.DatasetCnt *****
desc = "repository count 'dataset_cnt'"
results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.dataset_cnt!=(select count(1) from attachment inner join dataset on attachment.dataset_id = dataset.id where dataset.repo_id = repository.id)")
if err != nil {
log.Error("Select %s: %v", desc, err)
} else {
for _, result := range results {
id := com.StrTo(result["id"]).MustInt64()
select {
case <-ctx.Done():
log.Warn("CheckRepoStats: Cancelled")
return ErrCancelledf("during %s for repo ID %d", desc, id)
default:
}
log.Trace("Updating %s: %d", desc, id)
err = ResetRepoDatasetNum(id)
if err != nil {
log.Error("Update %s[%d]: %v", desc, id, err)
}
}
}
// ***** END: Repository.DatasetCnt *****

// ***** START: Repository.ModelCnt *****
desc = "repository count 'model_cnt'"
results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.model_cnt!=(select count(1) from ai_model_manage where repository.id = ai_model_manage.repo_id and ai_model_manage.size > 0 )")
if err != nil {
log.Error("Select %s: %v", desc, err)
} else {
for _, result := range results {
id := com.StrTo(result["id"]).MustInt64()
select {
case <-ctx.Done():
log.Warn("CheckRepoStats: Cancelled")
return ErrCancelledf("during %s for repo ID %d", desc, id)
default:
}
log.Trace("Updating %s: %d", desc, id)
err = ResetRepoModelNum(id)
if err != nil {
log.Error("Update %s[%d]: %v", desc, id, err)
}
}
}
// ***** END: Repository.ModelCnt *****

// ***** START: Repository.AiTaskCnt *****
desc = "repository count 'ai_task_cnt'"
results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.ai_task_cnt!=(select count(1) from cloudbrain where repository.id = cloudbrain.repo_id and (cloudbrain.deleted_at is null or cloudbrain.deleted_at = '0001-01-01 00:00:00') )")
if err != nil {
log.Error("Select %s: %v", desc, err)
} else {
for _, result := range results {
id := com.StrTo(result["id"]).MustInt64()
select {
case <-ctx.Done():
log.Warn("CheckRepoStats: Cancelled")
return ErrCancelledf("during %s for repo ID %d", desc, id)
default:
}
log.Trace("Updating %s: %d", desc, id)
err = ResetRepoAITaskNum(id)
if err != nil {
log.Error("Update %s[%d]: %v", desc, id, err)
}
}
}
// ***** END: Repository.AiTaskCnt *****
return nil
}

@@ -2825,3 +2894,39 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi
}
return &RepoFile{CommitId: commitId, Content: d}, nil
}

func ResetRepoAITaskNum(repoId int64) error {
n, err := x.Where("repo_id = ? ", repoId).Count(&Cloudbrain{})
if err != nil {
return err
}
r := Repository{
AiTaskCnt: n,
}
_, err = x.Cols("ai_task_cnt").Where("id = ?", repoId).Update(&r)
return err
}

func ResetRepoDatasetNum(repoId int64) error {
n, err := x.Table("attachment").Join("inner", "dataset", "attachment.dataset_id = dataset.id").Where("dataset.repo_id = ?", repoId).Count()
if err != nil {
return err
}
r := Repository{
DatasetCnt: n,
}
_, err = x.Cols("dataset_cnt").Where("id = ?", repoId).Update(&r)
return err
}

func ResetRepoModelNum(repoId int64) error {
n, err := x.Where("repo_id = ? and size > 0 ", repoId).Count(&AiModelManage{})
if err != nil {
return err
}
r := Repository{
ModelCnt: n,
}
_, err = x.Cols("model_cnt").Where("id = ?", repoId).Update(&r)
return err
}

+ 19
- 4
routers/repo/ai_model_manage.go View File

@@ -2,6 +2,7 @@ package repo

import (
"archive/zip"
"code.gitea.io/gitea/services/repository"
"encoding/json"
"errors"
"fmt"
@@ -120,6 +121,9 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
if err != nil {
return "", err
}
if modelSize > 0 {
go repository.ResetRepoModelNum(aiTask.RepoID)
}
if len(lastNewModelId) > 0 {
//udpate status and version count
models.ModifyModelNewProperty(lastNewModelId, MODEL_NOT_LATEST, 0)
@@ -305,13 +309,14 @@ func getSize(files []storage.FileInfo) int64 {
func UpdateModelSize(modeluuid string) {
model, err := models.QueryModelById(modeluuid)
if err == nil {
var size int64
if model.Type == models.TypeCloudBrainOne {
if strings.HasPrefix(model.Path, setting.Attachment.Minio.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, model.Path[len(setting.Attachment.Minio.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from minio. id=" + modeluuid)
}
size := getSize(files)
size = getSize(files)
models.ModifyModelSize(modeluuid, size)
}
} else if model.Type == models.TypeCloudBrainTwo {
@@ -320,10 +325,13 @@ func UpdateModelSize(modeluuid string) {
if err != nil {
log.Info("Failed to query model size from obs. id=" + modeluuid)
}
size := getSize(files)
size = getSize(files)
models.ModifyModelSize(modeluuid, size)
}
}
if model.Size == 0 && size > 0 {
repository.ResetRepoModelNum(model.RepoId)
}
} else {
log.Info("not found model,uuid=" + modeluuid)
}
@@ -438,13 +446,14 @@ func DeleteModelFile(ctx *context.Context) {
fileName := ctx.Query("fileName")
model, err := models.QueryModelById(id)
if err == nil {
var totalSize int64
if model.ModelType == MODEL_LOCAL_TYPE {
if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
objectName := model.Path[len(bucketName)+1:] + fileName
log.Info("delete bucket=" + bucketName + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
totalSize := storage.MinioGetFilesSize(bucketName, []string{objectName})
totalSize = storage.MinioGetFilesSize(bucketName, []string{objectName})
err := storage.Attachments.DeleteDir(objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
@@ -464,7 +473,7 @@ func DeleteModelFile(ctx *context.Context) {
objectName := model.Path[len(setting.Bucket)+1:] + fileName
log.Info("delete bucket=" + setting.Bucket + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
totalSize := storage.ObsGetFilesSize(bucketName, []string{objectName})
totalSize = storage.ObsGetFilesSize(bucketName, []string{objectName})
err := storage.ObsRemoveObject(bucketName, objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
@@ -481,6 +490,9 @@ func DeleteModelFile(ctx *context.Context) {
}
}
}
if (model.Size - totalSize) <= 0 {
repository.ResetRepoModelNum(model.RepoId)
}
}
ctx.JSON(200, map[string]string{
"code": "0",
@@ -549,6 +561,9 @@ func deleteModelByID(ctx *context.Context, id string) error {
}
}
}
if model.Size > 0 {
go repository.ResetRepoModelNum(model.RepoId)
}
}
}
return err


+ 3
- 0
routers/repo/attachment.go View File

@@ -29,6 +29,7 @@ import (
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/upload"
"code.gitea.io/gitea/modules/worker"
repo_service "code.gitea.io/gitea/services/repository"
gouuid "github.com/satori/go.uuid"
)

@@ -180,6 +181,7 @@ func DeleteAttachment(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("DeleteAttachment: %v", err))
return
}
go repo_service.ResetRepoDatasetNumByDatasetId(attach.DatasetID)

attachjson, _ := json.Marshal(attach)
labelmsg.SendDeleteAttachToLabelSys(string(attachjson))
@@ -894,6 +896,7 @@ func CompleteMultipart(ctx *context.Context) {
return
}
attachment.UpdateDatasetUpdateUnix()
go repo_service.ResetRepoDatasetNumByDatasetId(dataset.ID)
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
if attachment.DatasetID != 0 {


+ 21
- 6
services/repository/repository.go View File

@@ -5,18 +5,17 @@
package repository

import (
"fmt"
"io/ioutil"
"net/http"
"os"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
pull_service "code.gitea.io/gitea/services/pull"
"fmt"
"io/ioutil"
"net/http"
"os"
"strings"
)

const SHELL_FLAG_ON = 1
@@ -332,3 +331,19 @@ func IsUploadFileInvalidErr(err error) bool {
_, ok := err.(UploadFileInvalidErr)
return ok
}

func ResetRepoDatasetNumByDatasetId(datasetID int64) error {
dataset, err := models.GetDatasetByID(datasetID)
if err != nil {
return err
}
return models.ResetRepoDatasetNum(dataset.RepoID)
}

func ResetRepoModelNum(repoId int64) error {
return models.ResetRepoModelNum(repoId)
}

func ResetRepoAITaskNum(repoId int64) error {
return models.ResetRepoAITaskNum(repoId)
}

Loading…
Cancel
Save