You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.go 15 kB

5 years ago
3 years ago
3 years ago
5 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
4 years ago
3 years ago
4 years ago
5 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
5 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
5 years ago
5 years ago
3 years ago
5 years ago
5 years ago
5 years ago
5 years ago
3 years ago
5 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
5 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555
  1. package models
  2. import (
  3. "errors"
  4. "fmt"
  5. "sort"
  6. "strings"
  7. "code.gitea.io/gitea/modules/log"
  8. "code.gitea.io/gitea/modules/timeutil"
  9. "xorm.io/builder"
  10. )
  11. const (
  12. DatasetStatusPrivate int32 = iota
  13. DatasetStatusPublic
  14. DatasetStatusDeleted
  15. )
  16. type Dataset struct {
  17. ID int64 `xorm:"pk autoincr"`
  18. Title string `xorm:"INDEX NOT NULL"`
  19. Status int32 `xorm:"INDEX"` // normal_private: 0, pulbic: 1, is_delete: 2
  20. Category string
  21. Description string `xorm:"TEXT"`
  22. DownloadTimes int64
  23. UseCount int64 `xorm:"DEFAULT 0"`
  24. NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"`
  25. Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"`
  26. License string
  27. Task string
  28. ReleaseID int64 `xorm:"INDEX"`
  29. UserID int64 `xorm:"INDEX"`
  30. RepoID int64 `xorm:"INDEX"`
  31. Repo *Repository `xorm:"-"`
  32. CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
  33. UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
  34. User *User `xorm:"-"`
  35. Attachments []*Attachment `xorm:"-"`
  36. }
  37. type DatasetWithStar struct {
  38. Dataset
  39. IsStaring bool
  40. }
  41. func (d *Dataset) IsPrivate() bool {
  42. switch d.Status {
  43. case DatasetStatusPrivate:
  44. return true
  45. case DatasetStatusPublic:
  46. return false
  47. case DatasetStatusDeleted:
  48. return false
  49. default:
  50. return false
  51. }
  52. }
  53. type DatasetList []*Dataset
  54. func (datasets DatasetList) loadAttributes(e Engine) error {
  55. if len(datasets) == 0 {
  56. return nil
  57. }
  58. set := make(map[int64]struct{})
  59. userIdSet := make(map[int64]struct{})
  60. datasetIDs := make([]int64, len(datasets))
  61. for i := range datasets {
  62. userIdSet[datasets[i].UserID] = struct{}{}
  63. set[datasets[i].RepoID] = struct{}{}
  64. datasetIDs[i] = datasets[i].ID
  65. }
  66. // Load owners.
  67. users := make(map[int64]*User, len(userIdSet))
  68. repos := make(map[int64]*Repository, len(set))
  69. if err := e.
  70. Where("id > 0").
  71. In("id", keysInt64(userIdSet)).
  72. Cols("id", "lower_name", "name", "full_name", "email").
  73. Find(&users); err != nil {
  74. return fmt.Errorf("find users: %v", err)
  75. }
  76. if err := e.
  77. Where("id > 0").
  78. In("id", keysInt64(set)).
  79. Cols("id", "owner_id", "owner_name", "lower_name", "name", "description", "alias", "lower_alias").
  80. Find(&repos); err != nil {
  81. return fmt.Errorf("find repos: %v", err)
  82. }
  83. for i := range datasets {
  84. datasets[i].User = users[datasets[i].UserID]
  85. datasets[i].Repo = repos[datasets[i].RepoID]
  86. }
  87. return nil
  88. }
  89. func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) error {
  90. if len(datasets) == 0 {
  91. return nil
  92. }
  93. datasetIDs := make([]int64, len(datasets))
  94. for i := range datasets {
  95. datasetIDs[i] = datasets[i].ID
  96. }
  97. attachments, err := AttachmentsByDatasetOption(datasetIDs, opts)
  98. if err != nil {
  99. return fmt.Errorf("GetAttachmentsByDatasetIds failed error: %v", err)
  100. }
  101. permissionMap := make(map[int64]*Permission, len(datasets))
  102. for _, attachment := range attachments {
  103. for i := range datasets {
  104. if attachment.DatasetID == datasets[i].ID {
  105. if opts.StarByMe {
  106. var permission *Permission
  107. if permission = permissionMap[datasets[i].ID]; permission == nil {
  108. permissionInstance, err := GetUserRepoPermission(datasets[i].Repo, opts.User)
  109. if err != nil {
  110. return fmt.Errorf("GetPermission failed error: %v", err)
  111. }
  112. permission = &permissionInstance
  113. }
  114. if permission.HasAccess() {
  115. datasets[i].Attachments = append(datasets[i].Attachments, attachment)
  116. } else if !attachment.IsPrivate {
  117. datasets[i].Attachments = append(datasets[i].Attachments, attachment)
  118. }
  119. } else {
  120. datasets[i].Attachments = append(datasets[i].Attachments, attachment)
  121. }
  122. }
  123. }
  124. }
  125. for i := range datasets {
  126. datasets[i].Repo.Owner = nil
  127. }
  128. return nil
  129. }
  130. type SearchDatasetOptions struct {
  131. Keyword string
  132. OwnerID int64
  133. User *User
  134. RepoID int64
  135. IncludePublic bool
  136. RecommendOnly bool
  137. Category string
  138. Task string
  139. License string
  140. DatasetIDs []int64 // 目前只在StarByMe为true时起作用
  141. ListOptions
  142. SearchOrderBy
  143. IsOwner bool
  144. StarByMe bool
  145. CloudBrainType int //0 cloudbrain 1 modelarts -1 all
  146. PublicOnly bool
  147. JustNeedZipFile bool
  148. NeedAttachment bool
  149. UploadAttachmentByMe bool
  150. }
  151. func CreateDataset(dataset *Dataset) (err error) {
  152. sess := x.NewSession()
  153. defer sess.Close()
  154. if err := sess.Begin(); err != nil {
  155. return err
  156. }
  157. datasetByRepoId := &Dataset{RepoID: dataset.RepoID}
  158. has, err := sess.Get(datasetByRepoId)
  159. if err != nil {
  160. return err
  161. }
  162. if has {
  163. return fmt.Errorf("The dataset already exists.")
  164. }
  165. if _, err = sess.Insert(dataset); err != nil {
  166. return err
  167. }
  168. return sess.Commit()
  169. }
  170. func RecommendDataset(dataSetId int64, recommend bool) error {
  171. dataset := Dataset{Recommend: recommend}
  172. _, err := x.ID(dataSetId).Cols("recommend").Update(dataset)
  173. return err
  174. }
  175. func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) {
  176. cond := SearchDatasetCondition(opts)
  177. return SearchDatasetByCondition(opts, cond)
  178. }
  179. func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
  180. var cond = builder.NewCond()
  181. cond = cond.And(builder.Neq{"dataset.status": DatasetStatusDeleted})
  182. cond = generateFilterCond(opts, cond)
  183. if opts.RepoID > 0 {
  184. cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID})
  185. }
  186. if opts.PublicOnly {
  187. cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
  188. cond = cond.And(builder.Eq{"attachment.is_private": false})
  189. } else if opts.IncludePublic {
  190. cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
  191. cond = cond.And(builder.Eq{"attachment.is_private": false})
  192. if opts.OwnerID > 0 {
  193. subCon := builder.NewCond()
  194. subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID})
  195. subCon = generateFilterCond(opts, subCon)
  196. cond = cond.Or(subCon)
  197. }
  198. } else if opts.OwnerID > 0 && !opts.StarByMe && !opts.UploadAttachmentByMe {
  199. cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID})
  200. if !opts.IsOwner {
  201. cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
  202. cond = cond.And(builder.Eq{"attachment.is_private": false})
  203. }
  204. }
  205. if len(opts.DatasetIDs) > 0 {
  206. if opts.StarByMe {
  207. cond = cond.And(builder.In("dataset.id", opts.DatasetIDs))
  208. } else {
  209. subCon := builder.NewCond()
  210. subCon = subCon.And(builder.In("dataset.id", opts.DatasetIDs))
  211. cond = cond.Or(subCon)
  212. }
  213. }
  214. return cond
  215. }
  216. func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.Cond {
  217. if len(opts.Keyword) > 0 {
  218. cond = cond.And(builder.Or(builder.Like{"LOWER(dataset.title)", strings.ToLower(opts.Keyword)}, builder.Like{"LOWER(dataset.description)", strings.ToLower(opts.Keyword)}))
  219. }
  220. if len(opts.Category) > 0 {
  221. cond = cond.And(builder.Eq{"dataset.category": opts.Category})
  222. }
  223. if len(opts.Task) > 0 {
  224. cond = cond.And(builder.Eq{"dataset.task": opts.Task})
  225. }
  226. if len(opts.License) > 0 {
  227. cond = cond.And(builder.Eq{"dataset.license": opts.License})
  228. }
  229. if opts.RecommendOnly {
  230. cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly})
  231. }
  232. if opts.JustNeedZipFile {
  233. cond = cond.And(builder.Gt{"attachment.decompress_state": 0})
  234. }
  235. if opts.CloudBrainType >= 0 {
  236. cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType})
  237. }
  238. if opts.UploadAttachmentByMe {
  239. cond = cond.And(builder.Eq{"attachment.uploader_id": opts.OwnerID})
  240. }
  241. return cond
  242. }
  243. func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (DatasetList, int64, error) {
  244. if opts.Page <= 0 {
  245. opts.Page = 1
  246. }
  247. var err error
  248. sess := x.NewSession()
  249. defer sess.Close()
  250. datasets := make(DatasetList, 0, opts.PageSize)
  251. selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend,dataset.use_count"
  252. count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id").
  253. Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
  254. Where(cond).Count(new(Dataset))
  255. if err != nil {
  256. return nil, 0, fmt.Errorf("Count: %v", err)
  257. }
  258. sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id").
  259. Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
  260. Where(cond).OrderBy(opts.SearchOrderBy.String())
  261. if opts.PageSize > 0 {
  262. sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
  263. }
  264. if err = sess.Find(&datasets); err != nil {
  265. return nil, 0, fmt.Errorf("Dataset: %v", err)
  266. }
  267. if err = datasets.loadAttributes(sess); err != nil {
  268. return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
  269. }
  270. if opts.NeedAttachment {
  271. if err = datasets.loadAttachmentAttributes(opts); err != nil {
  272. return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
  273. }
  274. }
  275. return datasets, count, nil
  276. }
  277. type datasetMetaSearch struct {
  278. ID []int64
  279. Rel []*Dataset
  280. }
  281. func (s datasetMetaSearch) Len() int {
  282. return len(s.ID)
  283. }
  284. func (s datasetMetaSearch) Swap(i, j int) {
  285. s.ID[i], s.ID[j] = s.ID[j], s.ID[i]
  286. s.Rel[i], s.Rel[j] = s.Rel[j], s.Rel[i]
  287. }
  288. func (s datasetMetaSearch) Less(i, j int) bool {
  289. return s.ID[i] < s.ID[j]
  290. }
  291. func GetDatasetAttachments(typeCloudBrain int, isSigned bool, user *User, rels ...*Dataset) (err error) {
  292. return getDatasetAttachments(x, typeCloudBrain, isSigned, user, rels...)
  293. }
  294. func getDatasetAttachments(e Engine, typeCloudBrain int, isSigned bool, user *User, rels ...*Dataset) (err error) {
  295. if len(rels) == 0 {
  296. return
  297. }
  298. // To keep this efficient as possible sort all datasets by id,
  299. // select attachments by dataset id,
  300. // then merge join them
  301. // Sort
  302. var sortedRels = datasetMetaSearch{ID: make([]int64, len(rels)), Rel: make([]*Dataset, len(rels))}
  303. var attachments []*Attachment
  304. for index, element := range rels {
  305. element.Attachments = []*Attachment{}
  306. sortedRels.ID[index] = element.ID
  307. sortedRels.Rel[index] = element
  308. }
  309. sort.Sort(sortedRels)
  310. // Select attachments
  311. if typeCloudBrain == -1 {
  312. err = e.
  313. Asc("dataset_id").
  314. In("dataset_id", sortedRels.ID).
  315. Find(&attachments, Attachment{})
  316. if err != nil {
  317. return err
  318. }
  319. } else {
  320. err = e.
  321. Asc("dataset_id").
  322. In("dataset_id", sortedRels.ID).
  323. And("type = ?", typeCloudBrain).
  324. Find(&attachments, Attachment{})
  325. if err != nil {
  326. return err
  327. }
  328. }
  329. // merge join
  330. var currentIndex = 0
  331. for _, attachment := range attachments {
  332. for sortedRels.ID[currentIndex] < attachment.DatasetID {
  333. currentIndex++
  334. }
  335. fileChunks := make([]*FileChunk, 0, 10)
  336. err = e.
  337. Where("uuid = ?", attachment.UUID).
  338. Find(&fileChunks)
  339. if err != nil {
  340. return err
  341. }
  342. if len(fileChunks) > 0 {
  343. attachment.Md5 = fileChunks[0].Md5
  344. } else {
  345. log.Error("has attachment record, but has no file_chunk record")
  346. attachment.Md5 = "no_record"
  347. }
  348. attachment.CanDel = CanDelAttachment(isSigned, user, attachment)
  349. sortedRels.Rel[currentIndex].Attachments = append(sortedRels.Rel[currentIndex].Attachments, attachment)
  350. }
  351. return
  352. }
  353. // AddDatasetAttachments adds a Dataset attachments
  354. func AddDatasetAttachments(DatasetID int64, attachmentUUIDs []string) (err error) {
  355. // Check attachments
  356. attachments, err := GetAttachmentsByUUIDs(attachmentUUIDs)
  357. if err != nil {
  358. return fmt.Errorf("GetAttachmentsByUUIDs [uuids: %v]: %v", attachmentUUIDs, err)
  359. }
  360. for i := range attachments {
  361. attachments[i].DatasetID = DatasetID
  362. // No assign value could be 0, so ignore AllCols().
  363. if _, err = x.ID(attachments[i].ID).Update(attachments[i]); err != nil {
  364. return fmt.Errorf("update attachment [%d]: %v", attachments[i].ID, err)
  365. }
  366. }
  367. return
  368. }
  369. func UpdateDataset(ctx DBContext, rel *Dataset) error {
  370. _, err := ctx.e.ID(rel.ID).AllCols().Update(rel)
  371. return err
  372. }
  373. func IncreaseDatasetUseCount(uuid string) {
  374. IncreaseAttachmentUseNumber(uuid)
  375. attachment, _ := GetAttachmentByUUID(uuid)
  376. if attachment != nil {
  377. x.Exec("UPDATE `dataset` SET use_count=use_count+1 WHERE id=?", attachment.DatasetID)
  378. }
  379. }
  380. // GetDatasetByID returns Dataset with given ID.
  381. func GetDatasetByID(id int64) (*Dataset, error) {
  382. rel := new(Dataset)
  383. has, err := x.
  384. ID(id).
  385. Get(rel)
  386. if err != nil {
  387. return nil, err
  388. } else if !has {
  389. return nil, ErrDatasetNotExist{id}
  390. }
  391. return rel, nil
  392. }
  393. func GetDatasetByRepo(repo *Repository) (*Dataset, error) {
  394. dataset := &Dataset{RepoID: repo.ID}
  395. has, err := x.Get(dataset)
  396. if err != nil {
  397. return nil, err
  398. }
  399. if has {
  400. return dataset, nil
  401. } else {
  402. return nil, ErrNotExist{repo.ID}
  403. }
  404. }
  405. func GetDatasetStarByUser(user *User) ([]*DatasetStar, error) {
  406. datasetStars := make([]*DatasetStar, 0)
  407. err := x.Cols("id", "uid", "dataset_id", "created_unix").Where("uid=?", user.ID).Find(&datasetStars)
  408. return datasetStars, err
  409. }
  410. func DeleteDataset(datasetID int64, uid int64) error {
  411. var err error
  412. sess := x.NewSession()
  413. defer sess.Close()
  414. if err = sess.Begin(); err != nil {
  415. return err
  416. }
  417. dataset := &Dataset{ID: datasetID, UserID: uid}
  418. has, err := sess.Get(dataset)
  419. if err != nil {
  420. return err
  421. } else if !has {
  422. return errors.New("not found")
  423. }
  424. if cnt, err := sess.ID(datasetID).Delete(new(Dataset)); err != nil {
  425. return err
  426. } else if cnt != 1 {
  427. return errors.New("not found")
  428. }
  429. if err = sess.Commit(); err != nil {
  430. sess.Close()
  431. return fmt.Errorf("Commit: %v", err)
  432. }
  433. return nil
  434. }
  435. func GetOwnerDatasetByID(id int64, user *User) (*Dataset, error) {
  436. dataset, err := GetDatasetByID(id)
  437. if err != nil {
  438. return nil, err
  439. }
  440. if !dataset.IsPrivate() {
  441. return dataset, nil
  442. }
  443. if dataset.IsPrivate() && user != nil && user.ID == dataset.UserID {
  444. return dataset, nil
  445. }
  446. return nil, errors.New("dataset not fount")
  447. }
  448. func IncreaseDownloadCount(datasetID int64) error {
  449. // Update download count.
  450. if _, err := x.Exec("UPDATE `dataset` SET download_times=download_times+1 WHERE id=?", datasetID); err != nil {
  451. return fmt.Errorf("increase dataset count: %v", err)
  452. }
  453. return nil
  454. }
  455. func GetCollaboratorDatasetIdsByUserID(userID int64) []int64 {
  456. var datasets []int64
  457. _ = x.Table("dataset").Join("INNER", "collaboration", "dataset.repo_id = collaboration.repo_id and collaboration.mode>0 and collaboration.user_id=?", userID).
  458. Cols("dataset.id").Find(&datasets)
  459. return datasets
  460. }
  461. func GetTeamDatasetIdsByUserID(userID int64) []int64 {
  462. var datasets []int64
  463. _ = x.Table("dataset").Join("INNER", "team_repo", "dataset.repo_id = team_repo.repo_id").
  464. Join("INNER", "team_user", "team_repo.team_id=team_user.team_id and team_user.uid=?", userID).
  465. Cols("dataset.id").Find(&datasets)
  466. return datasets
  467. }