You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.go 17 kB

3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656
  1. package repo
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "net/http"
  6. "sort"
  7. "strconv"
  8. "strings"
  9. "unicode/utf8"
  10. "code.gitea.io/gitea/models"
  11. "code.gitea.io/gitea/modules/auth"
  12. "code.gitea.io/gitea/modules/base"
  13. "code.gitea.io/gitea/modules/context"
  14. "code.gitea.io/gitea/modules/log"
  15. "code.gitea.io/gitea/modules/setting"
  16. )
  17. const (
  18. tplIndex base.TplName = "repo/datasets/index"
  19. tplDatasetCreate base.TplName = "repo/datasets/create"
  20. tplDatasetEdit base.TplName = "repo/datasets/edit"
  21. taskstplIndex base.TplName = "repo/datasets/tasks/index"
  22. )
  23. // MustEnableDataset check if repository enable internal dataset
  24. func MustEnableDataset(ctx *context.Context) {
  25. if !ctx.Repo.CanRead(models.UnitTypeDatasets) {
  26. ctx.NotFound("MustEnableDataset", nil)
  27. return
  28. }
  29. }
  30. func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment, repo *models.Repository) []*models.Attachment {
  31. if ctx.Repo.CanWrite(models.UnitTypeDatasets) {
  32. log.Info("can write.")
  33. return list
  34. } else {
  35. if repo.Owner == nil {
  36. repo.GetOwner()
  37. }
  38. permission := false
  39. if repo.Owner.IsOrganization() && ctx.User != nil {
  40. if repo.Owner.IsUserPartOfOrg(ctx.User.ID) {
  41. log.Info("user is member of org.")
  42. permission = true
  43. }
  44. }
  45. if !permission && ctx.User != nil {
  46. isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
  47. if isCollaborator {
  48. log.Info("Collaborator user may visit the attach.")
  49. permission = true
  50. }
  51. }
  52. var publicList []*models.Attachment
  53. for _, attach := range list {
  54. if !attach.IsPrivate {
  55. publicList = append(publicList, attach)
  56. } else {
  57. if permission {
  58. publicList = append(publicList, attach)
  59. }
  60. }
  61. }
  62. return publicList
  63. }
  64. }
  65. func QueryDataSet(ctx *context.Context) []*models.Attachment {
  66. repo := ctx.Repo.Repository
  67. dataset, err := models.GetDatasetByRepo(repo)
  68. if err != nil {
  69. log.Error("zou not found dataset 1")
  70. ctx.NotFound("GetDatasetByRepo", err)
  71. return nil
  72. }
  73. if ctx.Query("type") == "" {
  74. log.Error("zou not found type 2")
  75. ctx.NotFound("type error", nil)
  76. return nil
  77. }
  78. err = models.GetDatasetAttachments(ctx.QueryInt("type"), ctx.IsSigned, ctx.User, dataset)
  79. if err != nil {
  80. ctx.ServerError("GetDatasetAttachments", err)
  81. return nil
  82. }
  83. attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
  84. ctx.Data["SortType"] = ctx.Query("sort")
  85. sort.Slice(attachments, func(i, j int) bool {
  86. return attachments[i].CreatedUnix > attachments[j].CreatedUnix
  87. })
  88. return attachments
  89. }
  90. func DatasetIndex(ctx *context.Context) {
  91. log.Info("dataset index 1")
  92. MustEnableDataset(ctx)
  93. ctx.Data["PageIsDataset"] = true
  94. ctx.Data["SortType"] = ctx.Query("sort")
  95. repo := ctx.Repo.Repository
  96. dataset, err := models.GetDatasetByRepo(repo)
  97. ctx.Data["CanWrite"] = ctx.Repo.CanWrite(models.UnitTypeDatasets)
  98. if err != nil {
  99. log.Warn("query dataset, not found.")
  100. ctx.HTML(200, tplIndex)
  101. return
  102. }
  103. cloudbrainType := -1
  104. if ctx.Query("type") != "" {
  105. cloudbrainType = ctx.QueryInt("type")
  106. }
  107. err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset)
  108. if err != nil {
  109. ctx.ServerError("GetDatasetAttachments", err)
  110. return
  111. }
  112. attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
  113. if ctx.Data["SortType"] == "nameAsc" {
  114. sort.Slice(attachments, func(i, j int) bool {
  115. return strings.ToLower(attachments[i].Name) < strings.ToLower(attachments[j].Name)
  116. })
  117. } else if ctx.Data["SortType"] == "nameDesc" {
  118. sort.Slice(attachments, func(i, j int) bool {
  119. return strings.ToLower(attachments[i].Name) > strings.ToLower(attachments[j].Name)
  120. })
  121. } else if ctx.Data["SortType"] == "sizeAsc" {
  122. sort.Slice(attachments, func(i, j int) bool {
  123. return attachments[i].Size < attachments[j].Size
  124. })
  125. } else if ctx.Data["SortType"] == "sizeDesc" {
  126. sort.Slice(attachments, func(i, j int) bool {
  127. return attachments[i].Size > attachments[j].Size
  128. })
  129. } else if ctx.Data["SortType"] == "timeAsc" {
  130. sort.Slice(attachments, func(i, j int) bool {
  131. return attachments[i].CreatedUnix < attachments[j].CreatedUnix
  132. })
  133. } else {
  134. sort.Slice(attachments, func(i, j int) bool {
  135. return attachments[i].CreatedUnix > attachments[j].CreatedUnix
  136. })
  137. }
  138. page := ctx.QueryInt("page")
  139. if page <= 0 {
  140. page = 1
  141. }
  142. pagesize := ctx.QueryInt("pagesize")
  143. if pagesize <= 0 {
  144. pagesize = 10
  145. }
  146. pager := context.NewPagination(len(attachments), pagesize, page, 5)
  147. pageAttachments := getPageAttachments(attachments, page, pagesize)
  148. //load attachment creator
  149. for _, attachment := range pageAttachments {
  150. uploader, _ := models.GetUserByID(attachment.UploaderID)
  151. attachment.Uploader = uploader
  152. if !strings.HasSuffix(attachment.Name, ".zip") && !strings.HasSuffix(attachment.Name, ".tar.gz") {
  153. attachment.DecompressState = -1 //非压缩文件
  154. }
  155. }
  156. ctx.Data["Page"] = pager
  157. ctx.Data["Title"] = ctx.Tr("dataset.show_dataset")
  158. ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets"
  159. ctx.Data["dataset"] = dataset
  160. ctx.Data["Attachments"] = pageAttachments
  161. ctx.Data["IsOwner"] = true
  162. ctx.Data["StoreType"] = setting.Attachment.StoreType
  163. ctx.Data["Type"] = cloudbrainType
  164. renderAttachmentSettings(ctx)
  165. ctx.HTML(200, tplIndex)
  166. }
  167. func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment {
  168. begin := (page - 1) * pagesize
  169. end := (page) * pagesize
  170. if begin > len(attachments)-1 {
  171. return nil
  172. }
  173. if end > len(attachments)-1 {
  174. return attachments[begin:]
  175. } else {
  176. return attachments[begin:end]
  177. }
  178. }
  179. func CreateDataset(ctx *context.Context) {
  180. MustEnableDataset(ctx)
  181. ctx.Data["PageIsDataset"] = true
  182. ctx.HTML(200, tplDatasetCreate)
  183. }
  184. func EditDataset(ctx *context.Context) {
  185. MustEnableDataset(ctx)
  186. ctx.Data["PageIsDataset"] = true
  187. datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
  188. dataset, _ := models.GetDatasetByID(datasetId)
  189. if dataset == nil {
  190. ctx.Error(http.StatusNotFound, "")
  191. return
  192. }
  193. ctx.Data["Dataset"] = dataset
  194. ctx.HTML(200, tplDatasetEdit)
  195. }
  196. func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) {
  197. dataset := &models.Dataset{}
  198. if !NamePattern.MatchString(form.Title) {
  199. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
  200. return
  201. }
  202. if utf8.RuneCountInString(form.Description) > 1024 {
  203. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 1024)))
  204. return
  205. }
  206. dataset.RepoID = ctx.Repo.Repository.ID
  207. dataset.UserID = ctx.User.ID
  208. dataset.Category = form.Category
  209. dataset.Task = form.Task
  210. dataset.Title = form.Title
  211. dataset.License = form.License
  212. dataset.Description = form.Description
  213. dataset.DownloadTimes = 0
  214. if ctx.Repo.Repository.IsPrivate {
  215. dataset.Status = 0
  216. } else {
  217. dataset.Status = 1
  218. }
  219. err := models.CreateDataset(dataset)
  220. if err != nil {
  221. log.Error("fail to create dataset", err)
  222. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail")))
  223. } else {
  224. ctx.JSON(http.StatusOK, models.BaseOKMessage)
  225. }
  226. }
  227. func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
  228. ctx.Data["PageIsDataset"] = true
  229. ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset")
  230. if !NamePattern.MatchString(form.Title) {
  231. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
  232. return
  233. }
  234. if utf8.RuneCountInString(form.Description) > 1024 {
  235. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 1024)))
  236. return
  237. }
  238. rel, err := models.GetDatasetByID(form.ID)
  239. ctx.Data["dataset"] = rel
  240. if err != nil {
  241. log.Error("failed to query dataset", err)
  242. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
  243. return
  244. }
  245. rel.Title = form.Title
  246. rel.Description = form.Description
  247. rel.Category = form.Category
  248. rel.Task = form.Task
  249. rel.License = form.License
  250. if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil {
  251. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
  252. }
  253. ctx.JSON(http.StatusOK, models.BaseOKMessage)
  254. }
  255. func DatasetAction(ctx *context.Context) {
  256. var err error
  257. datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
  258. switch ctx.Params(":action") {
  259. case "star":
  260. err = models.StarDataset(ctx.User.ID, datasetId, true)
  261. case "unstar":
  262. err = models.StarDataset(ctx.User.ID, datasetId, false)
  263. }
  264. if err != nil {
  265. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
  266. } else {
  267. ctx.JSON(http.StatusOK, models.BaseOKMessage)
  268. }
  269. }
  270. func CurrentRepoDataset(ctx *context.Context) {
  271. page := ctx.QueryInt("page")
  272. cloudbrainType := ctx.QueryInt("type")
  273. keyword := strings.Trim(ctx.Query("q"), " ")
  274. repo := ctx.Repo.Repository
  275. var datasetIDs []int64
  276. dataset, err := models.GetDatasetByRepo(repo)
  277. if err != nil {
  278. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
  279. return
  280. }
  281. datasetIDs = append(datasetIDs, dataset.ID)
  282. datasets, count, err := models.Attachments(&models.AttachmentsOptions{
  283. ListOptions: models.ListOptions{
  284. Page: page,
  285. PageSize: setting.UI.DatasetPagingNum,
  286. },
  287. Keyword: keyword,
  288. NeedDatasetIDs: true,
  289. DatasetIDs: datasetIDs,
  290. Type: cloudbrainType,
  291. NeedIsPrivate: false,
  292. JustNeedZipFile: true,
  293. NeedRepoInfo: true,
  294. })
  295. if err != nil {
  296. ctx.ServerError("datasets", err)
  297. return
  298. }
  299. data, err := json.Marshal(datasets)
  300. if err != nil {
  301. log.Error("json.Marshal failed:", err.Error())
  302. ctx.JSON(200, map[string]string{
  303. "result_code": "-1",
  304. "error_msg": err.Error(),
  305. "data": "",
  306. })
  307. return
  308. }
  309. ctx.JSON(200, map[string]string{
  310. "result_code": "0",
  311. "data": string(data),
  312. "count": strconv.FormatInt(count, 10),
  313. })
  314. }
  315. func MyDatasets(ctx *context.Context) {
  316. page := ctx.QueryInt("page")
  317. cloudbrainType := ctx.QueryInt("type")
  318. keyword := strings.Trim(ctx.Query("q"), " ")
  319. uploaderID := ctx.User.ID
  320. datasets, count, err := models.Attachments(&models.AttachmentsOptions{
  321. ListOptions: models.ListOptions{
  322. Page: page,
  323. PageSize: setting.UI.DatasetPagingNum,
  324. },
  325. Keyword: keyword,
  326. NeedDatasetIDs: false,
  327. UploaderID: uploaderID,
  328. Type: cloudbrainType,
  329. NeedIsPrivate: false,
  330. JustNeedZipFile: true,
  331. NeedRepoInfo: true,
  332. RecommendOnly: ctx.QueryBool("recommend"),
  333. })
  334. if err != nil {
  335. ctx.ServerError("datasets", err)
  336. return
  337. }
  338. data, err := json.Marshal(datasets)
  339. if err != nil {
  340. log.Error("json.Marshal failed:", err.Error())
  341. ctx.JSON(200, map[string]string{
  342. "result_code": "-1",
  343. "error_msg": err.Error(),
  344. "data": "",
  345. })
  346. return
  347. }
  348. ctx.JSON(200, map[string]string{
  349. "result_code": "0",
  350. "data": string(data),
  351. "count": strconv.FormatInt(count, 10),
  352. })
  353. }
  354. func PublicDataset(ctx *context.Context) {
  355. page := ctx.QueryInt("page")
  356. cloudbrainType := ctx.QueryInt("type")
  357. keyword := strings.Trim(ctx.Query("q"), " ")
  358. datasets, count, err := models.Attachments(&models.AttachmentsOptions{
  359. ListOptions: models.ListOptions{
  360. Page: page,
  361. PageSize: setting.UI.DatasetPagingNum,
  362. },
  363. Keyword: keyword,
  364. NeedDatasetIDs: false,
  365. NeedIsPrivate: true,
  366. IsPrivate: false,
  367. Type: cloudbrainType,
  368. JustNeedZipFile: true,
  369. NeedRepoInfo: true,
  370. RecommendOnly: ctx.QueryBool("recommend"),
  371. })
  372. if err != nil {
  373. ctx.ServerError("datasets", err)
  374. return
  375. }
  376. data, err := json.Marshal(datasets)
  377. if err != nil {
  378. log.Error("json.Marshal failed:", err.Error())
  379. ctx.JSON(200, map[string]string{
  380. "result_code": "-1",
  381. "error_msg": err.Error(),
  382. "data": "",
  383. })
  384. return
  385. }
  386. ctx.JSON(200, map[string]string{
  387. "result_code": "0",
  388. "data": string(data),
  389. "count": strconv.FormatInt(count, 10),
  390. })
  391. }
  392. func MyFavoriteDataset(ctx *context.Context) {
  393. UserId := ctx.User.ID
  394. cloudbrainType := ctx.QueryInt("type")
  395. keyword := strings.Trim(ctx.Query("q"), " ")
  396. var NotColDatasetIDs []int64
  397. var IsColDatasetIDs []int64
  398. datasetStars, err := models.GetDatasetStarByUser(ctx.User)
  399. if err != nil {
  400. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
  401. log.Error("GetDatasetStarByUser failed:", err.Error())
  402. ctx.JSON(200, map[string]string{
  403. "result_code": "-1",
  404. "error_msg": err.Error(),
  405. "data": "",
  406. })
  407. return
  408. }
  409. //If the dataset has been deleted, it will not be counted
  410. for _, datasetStar := range datasetStars {
  411. IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar)
  412. if err != nil {
  413. log.Error("IsDatasetStarExist error:", err.Error())
  414. }
  415. if IsExist {
  416. DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset)
  417. if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator {
  418. IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID)
  419. } else {
  420. NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID)
  421. }
  422. }
  423. }
  424. NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{
  425. Keyword: keyword,
  426. NeedDatasetIDs: true,
  427. DatasetIDs: NotColDatasetIDs,
  428. NeedIsPrivate: true,
  429. IsPrivate: false,
  430. Type: cloudbrainType,
  431. JustNeedZipFile: true,
  432. NeedRepoInfo: true,
  433. RecommendOnly: ctx.QueryBool("recommend"),
  434. UserId: UserId,
  435. })
  436. if err != nil {
  437. ctx.ServerError("datasets", err)
  438. return
  439. }
  440. //If is collaborator, there is no need to determine whether the dataset is private or public
  441. IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{
  442. Keyword: keyword,
  443. NeedDatasetIDs: true,
  444. DatasetIDs: IsColDatasetIDs,
  445. NeedIsPrivate: false,
  446. Type: cloudbrainType,
  447. JustNeedZipFile: true,
  448. NeedRepoInfo: true,
  449. RecommendOnly: ctx.QueryBool("recommend"),
  450. UserId: UserId,
  451. })
  452. if err != nil {
  453. ctx.ServerError("datasets", err)
  454. return
  455. }
  456. for _, NotColDataset := range NotColDatasets {
  457. IsColDatasets = append(IsColDatasets, NotColDataset)
  458. }
  459. datasets := IsColDatasets
  460. count := NotColcount + IsColcount
  461. sort.Slice(datasets, func(i, j int) bool {
  462. return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix
  463. })
  464. page := ctx.QueryInt("page")
  465. if page <= 0 {
  466. page = 1
  467. }
  468. pagesize := ctx.QueryInt("pagesize")
  469. if pagesize <= 0 {
  470. pagesize = 5
  471. }
  472. pageDatasetsInfo := getPageDatasets(datasets, page, pagesize)
  473. if pageDatasetsInfo == nil {
  474. ctx.JSON(200, map[string]string{
  475. "result_code": "0",
  476. "data": "[]",
  477. "count": strconv.FormatInt(count, 10),
  478. })
  479. return
  480. }
  481. data, err := json.Marshal(pageDatasetsInfo)
  482. log.Info("data:", data)
  483. if err != nil {
  484. log.Error("json.Marshal failed:", err.Error())
  485. ctx.JSON(200, map[string]string{
  486. "result_code": "-1",
  487. "error_msg": err.Error(),
  488. "data": "",
  489. })
  490. return
  491. }
  492. ctx.JSON(200, map[string]string{
  493. "result_code": "0",
  494. "data": string(data),
  495. "count": strconv.FormatInt(count, 10),
  496. })
  497. }
  498. func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo {
  499. begin := (page - 1) * pagesize
  500. end := (page) * pagesize
  501. if begin > len(AttachmentInfos)-1 {
  502. return nil
  503. }
  504. if end > len(AttachmentInfos)-1 {
  505. return AttachmentInfos[begin:]
  506. } else {
  507. return AttachmentInfos[begin:end]
  508. }
  509. }
  510. func getTotalPage(total int64, pageSize int) int {
  511. another := 0
  512. if int(total)%pageSize != 0 {
  513. another = 1
  514. }
  515. return int(total)/pageSize + another
  516. }
  517. func GetDatasetStatus(ctx *context.Context) {
  518. var (
  519. err error
  520. )
  521. UUID := ctx.Params(":uuid")
  522. attachment, err := models.GetAttachmentByUUID(UUID)
  523. if err != nil {
  524. log.Error("GetDatasetStarByUser failed:", err.Error())
  525. ctx.JSON(200, map[string]string{
  526. "result_code": "-1",
  527. "error_msg": err.Error(),
  528. "data": "",
  529. })
  530. return
  531. }
  532. ctx.JSON(200, map[string]string{
  533. "result_code": "0",
  534. "UUID": UUID,
  535. "AttachmentStatus": fmt.Sprint(attachment.DecompressState),
  536. })
  537. }
  538. func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool {
  539. repo, err := models.GetRepositoryByID(dataset.RepoID)
  540. if err != nil {
  541. log.Error("query repo error:", err.Error())
  542. } else {
  543. repo.GetOwner()
  544. if ctx.User != nil {
  545. if repo.Owner.IsOrganization() {
  546. if repo.Owner.IsUserPartOfOrg(ctx.User.ID) {
  547. for _, t := range repo.Owner.Teams {
  548. if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) {
  549. return true
  550. }
  551. }
  552. isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID)
  553. if isOwner {
  554. return isOwner
  555. }
  556. return false
  557. }
  558. }
  559. isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
  560. if isCollaborator {
  561. return true
  562. }
  563. }
  564. }
  565. return false
  566. }
  567. func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) {
  568. dataset, err := models.GetDatasetByID(datasetStar.DatasetID)
  569. if err != nil {
  570. log.Error("query dataset error:", err.Error())
  571. return false, nil, nil, err
  572. } else {
  573. repo, err := models.GetRepositoryByID(dataset.RepoID)
  574. if err != nil {
  575. log.Error("GetRepositoryByID error:", err.Error())
  576. return false, nil, nil, err
  577. }
  578. return true, repo, dataset, nil
  579. }
  580. }