Browse Source

Merge pull request 'V20221116' (#3243) from V20221116 into develop

Reviewed-on: https://openi.pcl.ac.cn/OpenI/aiforge/pulls/3243
tags/v1.22.11.2
ychao_1983 3 years ago
parent
commit
eefdb716c5
100 changed files with 6437 additions and 21769 deletions
  1. +10
    -10
      README.md
  2. +90
    -58
      models/ai_model_manage.go
  3. +2
    -140
      models/attachment.go
  4. +21
    -0
      models/base_message.go
  5. +58
    -6
      models/cloudbrain.go
  6. +50
    -11
      models/cloudbrain_static.go
  7. +2
    -2
      models/dataset.go
  8. +75
    -0
      models/file_chunk.go
  9. +2
    -0
      models/models.go
  10. +2
    -2
      models/repo.go
  11. +3
    -3
      models/resource_queue.go
  12. +2
    -2
      models/resource_scene.go
  13. +21
    -8
      models/resource_specification.go
  14. +377
    -227
      models/user_business_analysis.go
  15. +200
    -0
      models/user_business_struct.go
  16. +20
    -23
      modules/auth/modelarts.go
  17. +6
    -6
      modules/cloudbrain/cloudbrain.go
  18. +111
    -0
      modules/convert/cloudbrain.go
  19. +4
    -4
      modules/grampus/grampus.go
  20. +26
    -0
      modules/grampus/resty.go
  21. +13
    -13
      modules/modelarts/modelarts.go
  22. +30
    -16
      modules/setting/setting.go
  23. +2
    -2
      modules/storage/minio.go
  24. +26
    -10
      modules/storage/minio_ext.go
  25. +24
    -25
      modules/storage/obs.go
  26. +45
    -0
      modules/structs/attachment.go
  27. +84
    -0
      modules/structs/cloudbrain.go
  28. +7
    -0
      modules/structs/tagger.go
  29. +6
    -2
      options/locale/locale_en-US.ini
  30. +6
    -2
      options/locale/locale_zh-CN.ini
  31. +17
    -19346
      package-lock.json
  32. +6
    -2
      routers/admin/cloudbrains.go
  33. +22
    -1
      routers/admin/resources.go
  34. +63
    -0
      routers/api/v1/api.go
  35. +25
    -0
      routers/api/v1/repo/attachments.go
  36. +112
    -38
      routers/api/v1/repo/cloudbrain.go
  37. +188
    -89
      routers/api/v1/repo/cloudbrain_dashboard.go
  38. +123
    -0
      routers/api/v1/repo/datasets.go
  39. +141
    -0
      routers/api/v1/repo/images.go
  40. +71
    -0
      routers/api/v1/repo/mlops.go
  41. +16
    -41
      routers/api/v1/repo/modelarts.go
  42. +115
    -0
      routers/api/v1/repo/modelmanage.go
  43. +36
    -0
      routers/api/v1/repo/spec.go
  44. +4
    -1
      routers/private/internal.go
  45. +44
    -23
      routers/repo/ai_model_convert.go
  46. +402
    -80
      routers/repo/ai_model_manage.go
  47. +2
    -2
      routers/repo/aisafety.go
  48. +18
    -17
      routers/repo/attachment.go
  49. +323
    -0
      routers/repo/attachment_model.go
  50. +26
    -47
      routers/repo/cloudbrain.go
  51. +160
    -62
      routers/repo/cloudbrain_statistic.go
  52. +2
    -318
      routers/repo/dataset.go
  53. +73
    -62
      routers/repo/grampus.go
  54. +2
    -2
      routers/repo/modelarts.go
  55. +30
    -0
      routers/response/api_response.go
  56. +5
    -1
      routers/response/response.go
  57. +2
    -2
      routers/response/response_list.go
  58. +20
    -4
      routers/routes/routes.go
  59. +4
    -0
      routers/user/home.go
  60. +2
    -2
      routers/user/notification.go
  61. +12
    -12
      services/cloudbrain/cloudbrainTask/count.go
  62. +631
    -0
      services/cloudbrain/cloudbrainTask/inference.go
  63. +83
    -0
      services/cloudbrain/cloudbrainTask/sync_status.go
  64. +1210
    -0
      services/cloudbrain/cloudbrainTask/train.go
  65. +1
    -1
      services/cloudbrain/resource/resource_queue.go
  66. +62
    -7
      services/cloudbrain/resource/resource_specification.go
  67. +46
    -7
      services/cloudbrain/util.go
  68. +6
    -6
      templates/admin/cloudbrain/list.tmpl
  69. +4
    -6
      templates/admin/cloudbrain/search.tmpl
  70. +4
    -6
      templates/admin/cloudbrain/search_dashboard.tmpl
  71. +21
    -2
      templates/base/footer_content.tmpl
  72. +21
    -1
      templates/base/footer_content_fluid.tmpl
  73. +2
    -2
      templates/base/head_navbar.tmpl
  74. +2
    -2
      templates/base/head_navbar_fluid.tmpl
  75. +2
    -2
      templates/base/head_navbar_home.tmpl
  76. +2
    -2
      templates/base/head_navbar_pro.tmpl
  77. +1
    -1
      templates/custom/max_log.tmpl
  78. +2
    -4
      templates/repo/cloudbrain/inference/new.tmpl
  79. +0
    -71
      templates/repo/cloudbrain/inference/show.tmpl
  80. +31
    -161
      templates/repo/cloudbrain/trainjob/show.tmpl
  81. +32
    -25
      templates/repo/grampus/trainjob/show.tmpl
  82. +2
    -4
      templates/repo/modelarts/inferencejob/new.tmpl
  83. +25
    -24
      templates/repo/modelarts/trainjob/show.tmpl
  84. +39
    -38
      templates/repo/modelmanage/convertIndex.tmpl
  85. +10
    -0
      templates/repo/modelmanage/create_local_1.tmpl
  86. +11
    -0
      templates/repo/modelmanage/create_local_2.tmpl
  87. +581
    -0
      templates/repo/modelmanage/create_online.tmpl
  88. +53
    -28
      templates/repo/modelmanage/index.tmpl
  89. +5
    -530
      templates/repo/modelmanage/showinfo.tmpl
  90. +3
    -1
      templates/repo/modelsafety/show.tmpl
  91. +2
    -1
      templates/repo/view_file.tmpl
  92. +7
    -7
      templates/user/dashboard/cloudbrains.tmpl
  93. +1
    -1
      templates/user/dashboard/navbar.tmpl
  94. +124
    -91
      web_src/js/components/Model.vue
  95. +1
    -1
      web_src/js/components/basic/editDialog.vue
  96. +6
    -6
      web_src/js/components/images/Images.vue
  97. +2
    -2
      web_src/js/components/images/adminImages.vue
  98. +4
    -4
      web_src/js/features/cloudbrainShow.js
  99. +1
    -1
      web_src/js/features/cloudrbanin.js
  100. +6
    -0
      web_src/js/features/i18nVue.js

+ 10
- 10
README.md View File

@@ -2,7 +2,7 @@

<h1><img src="public/img/favicon.png" alt="logo" width="30" height="30">AiForge - 启智AI开发协作平台</h1>

[![release](https://img.shields.io/badge/release-1.21.11.1-blue)](https://git.openi.org.cn/OpenI/aiforge/releases/latest)
[![release](https://img.shields.io/badge/release-1.21.11.1-blue)](https://openi.pcl.ac.cn/OpenI/aiforge/releases/latest)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)


@@ -10,7 +10,7 @@

启智AI开发协作平台是一个在线Web应用,旨在为人工智能算法、模型开发提供在线协同工作环境,它提供了<b>代码托管、数据集管理与共享、免费云端算力资源支持(GPU/NPU)、共享镜像</b>等功能。

[启智AI开发协作平台](https://git.openi.org.cn) 是使用本项目构建的在线服务,您可以直接点击链接访问试用。
[启智AI开发协作平台](https://openi.pcl.ac.cn) 是使用本项目构建的在线服务,您可以直接点击链接访问试用。

本项目是基于[Gitea](https://github.com/go-gitea/gitea)发展而来的,我们对其进行了Fork并基于此扩展了人工智能开发中需要的功能,如数据集管理和模型训练等。对于和代码托管相关的功能,您可以参考[Gitea的文档](https://docs.gitea.io/zh-cn/)。

@@ -20,7 +20,7 @@
后端服务涵盖了AI模型开发流水线,包括代码协同开发、数据管理、模型调试、训练、推理和部署等(*目前尚未支持模型部署*)。在不同的开发阶段,我们还将提供丰富的开发工具供用户使用,如数据标注、数据筛选、模型转换、模型压缩、代码检测等。我们也欢迎社区提供更多丰富的工具接入,提高利用平台进行开发的效率。
![系统架构图](assets/架构图.png)
## 在线服务使用
本项目的在线服务平台的详细使用帮助文档,可参阅本项目[百科](https://git.openi.org.cn/OpenI/aiforge/wiki)内容。
本项目的在线服务平台的详细使用帮助文档,可参阅本项目[百科](https://openi.pcl.ac.cn/OpenI/aiforge/wiki)内容。
- 如何创建账号
- 如何创建组织及管理成员权限
- 如何创建项目仓库
@@ -39,22 +39,22 @@
[从源代码安装说明](https://docs.gitea.io/zh-cn/install-from-source/)

## 授权许可
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://git.openi.org.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://openi.pcl.ac.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。


## 需要帮助?
如果您在使用或者开发过程中遇到问题,可以在以下渠道咨询:
- 点击[这里](https://git.openi.org.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**)
- 点击[这里](https://openi.pcl.ac.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**)
- 加入微信群实时交流,获得进一步的支持
<img src="https://git.openi.org.cn/OpenI/aiforge/wiki/raw/img/wechatgroup.jpg" width=200px />
<img src="https://openi.pcl.ac.cn/OpenI/aiforge/wiki/raw/img/wechatgroup.jpg" width=200px />

## 启智社区小白训练营:
- 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://git.openi.org.cn/zeizei/OpenI_Learning)
- 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://openi.pcl.ac.cn/zeizei/OpenI_Learning)

## 平台引用
如果本平台对您的科研工作提供了帮助,可在论文致谢中加入:
英文版:```Thanks for the support provided by OpenI Community (https://git.openi.org.cn).```
中文版:```感谢启智社区提供的技术支持(https://git.openi.org.cn)。```
英文版:```Thanks for the support provided by OpenI Community (https://openi.pcl.ac.cn).```
中文版:```感谢启智社区提供的技术支持(https://openi.pcl.ac.cn)。```

如果您的成果中引用了本平台,也欢迎在下述开源项目中提交您的成果信息:
https://git.openi.org.cn/OpenIOSSG/references
https://openi.pcl.ac.cn/OpenIOSSG/references

+ 90
- 58
models/ai_model_manage.go View File

@@ -12,67 +12,68 @@ import (
)

type AiModelManage struct {
ID string `xorm:"pk"`
Name string `xorm:"INDEX NOT NULL"`
Version string `xorm:"NOT NULL"`
VersionCount int `xorm:"NOT NULL DEFAULT 0"`
New int `xorm:"NOT NULL"`
Type int `xorm:"NOT NULL"`
Size int64 `xorm:"NOT NULL"`
Description string `xorm:"varchar(2000)"`
Label string `xorm:"varchar(1000)"`
Path string `xorm:"varchar(400) NOT NULL"`
DownloadCount int `xorm:"NOT NULL DEFAULT 0"`
Engine int64 `xorm:"NOT NULL DEFAULT 0"`
Status int `xorm:"NOT NULL DEFAULT 0"`
StatusDesc string `xorm:"varchar(500)"`
Accuracy string `xorm:"varchar(1000)"`
AttachmentId string `xorm:"NULL"`
RepoId int64 `xorm:"INDEX NULL"`
CodeBranch string `xorm:"varchar(400) NULL"`
CodeCommitID string `xorm:"NULL"`
UserId int64 `xorm:"NOT NULL"`
UserName string
UserRelAvatarLink string
TrainTaskInfo string `xorm:"text NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
IsCanOper bool
IsCanDelete bool
ID string `xorm:"pk" json:"id"`
Name string `xorm:"INDEX NOT NULL" json:"name"`
ModelType int `xorm:"NULL" json:"modelType"`
Version string `xorm:"NOT NULL" json:"version"`
VersionCount int `xorm:"NOT NULL DEFAULT 0" json:"versionCount"`
New int `xorm:"NOT NULL" json:"new"`
Type int `xorm:"NOT NULL" json:"type"`
Size int64 `xorm:"NOT NULL" json:"size"`
Description string `xorm:"varchar(2000)" json:"description"`
Label string `xorm:"varchar(1000)" json:"label"`
Path string `xorm:"varchar(400) NOT NULL" json:"path"`
DownloadCount int `xorm:"NOT NULL DEFAULT 0" json:"downloadCount"`
Engine int64 `xorm:"NOT NULL DEFAULT 0" json:"engine"`
Status int `xorm:"NOT NULL DEFAULT 0" json:"status"`
StatusDesc string `xorm:"varchar(500)" json:"statusDesc"`
Accuracy string `xorm:"varchar(1000)" json:"accuracy"`
AttachmentId string `xorm:"NULL" json:"attachmentId"`
RepoId int64 `xorm:"INDEX NULL" json:"repoId"`
CodeBranch string `xorm:"varchar(400) NULL" json:"codeBranch"`
CodeCommitID string `xorm:"NULL" json:"codeCommitID"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
TrainTaskInfo string `xorm:"text NULL" json:"trainTaskInfo"`
CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
}

type AiModelConvert struct {
ID string `xorm:"pk"`
Name string `xorm:"INDEX NOT NULL"`
Status string `xorm:"NULL"`
StatusResult string `xorm:"NULL"`
SrcEngine int `xorm:"NOT NULL DEFAULT 0"`
RepoId int64 `xorm:"INDEX NULL"`
ModelId string `xorm:"NOT NULL"`
ModelName string `xorm:"NULL"`
ModelVersion string `xorm:"NOT NULL"`
ModelPath string `xorm:"NULL"`
DestFormat int `xorm:"NOT NULL DEFAULT 0"`
NetOutputFormat int `xorm:"NULL"`
UserId int64 `xorm:"NOT NULL"`
CloudBrainTaskId string `xorm:"NULL"`
ModelArtsVersionId string `xorm:"NULL"`
ContainerID string
ContainerIp string
RunTime int64 `xorm:"NULL"`
TrainJobDuration string
InputShape string `xorm:"varchar(2000)"`
InputDataFormat string `xorm:"NOT NULL"`
Description string `xorm:"varchar(2000)"`
Path string `xorm:"varchar(400) NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
StartTime timeutil.TimeStamp
EndTime timeutil.TimeStamp
UserName string
UserRelAvatarLink string
IsCanOper bool
IsCanDelete bool
ID string `xorm:"pk" json:"id"`
Name string `xorm:"INDEX NOT NULL" json:"name"`
Status string `xorm:"NULL" json:"status"`
StatusResult string `xorm:"NULL" json:"statusResult"`
SrcEngine int `xorm:"NOT NULL DEFAULT 0" json:"srcEngine"`
RepoId int64 `xorm:"INDEX NULL" json:"repoId"`
ModelId string `xorm:"NOT NULL" json:"modelId"`
ModelName string `xorm:"NULL" json:"modelName"`
ModelVersion string `xorm:"NOT NULL" json:"modelVersion"`
ModelPath string `xorm:"NULL" json:"modelPath"`
DestFormat int `xorm:"NOT NULL DEFAULT 0" json:"destFormat"`
NetOutputFormat int `xorm:"NULL" json:"netOutputFormat"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
CloudBrainTaskId string `xorm:"NULL" json:"cloudBrainTaskId"`
ModelArtsVersionId string `xorm:"NULL" json:"modelArtsVersionId"`
ContainerID string `json:"containerID"`
ContainerIp string `json:"containerIp"`
RunTime int64 `xorm:"NULL" json:"runTime"`
TrainJobDuration string `json:"trainJobDuration"`
InputShape string `xorm:"varchar(2000)" json:"inputShape"`
InputDataFormat string `xorm:"NOT NULL" json:"inputDataFormat"`
Description string `xorm:"varchar(2000)" json:"description"`
Path string `xorm:"varchar(400) NOT NULL" json:"path"`
CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
StartTime timeutil.TimeStamp `json:"startTime"`
EndTime timeutil.TimeStamp `json:"endTime"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
}

type AiModelQueryOptions struct {
@@ -287,6 +288,37 @@ func ModifyModelDescription(id string, description string) error {
return nil
}

func ModifyLocalModel(id string, name, label, description string, engine int) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("name", "label", "description", "engine").Update(&AiModelManage{
Description: description,
Name: name,
Label: label,
Engine: int64(engine),
})
if err != nil {
return err
}
log.Info("success to update description from db.re=" + fmt.Sprint((re)))
return nil
}

func ModifyModelSize(id string, size int64) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("size").Update(&AiModelManage{
Size: size,
})
if err != nil {
return err
}
log.Info("success to update size from db.re=" + fmt.Sprint((re)))
return nil
}

func ModifyModelStatus(id string, modelSize int64, status int, modelPath string, statusDesc string) error {
var sess *xorm.Session
sess = x.ID(id)


+ 2
- 140
models/attachment.go View File

@@ -61,30 +61,6 @@ type AttachmentUsername struct {
Name string
}

type AttachmentInfo struct {
Attachment `xorm:"extends"`
Repo *Repository `xorm:"extends"`
RelAvatarLink string `xorm:"extends"`
UserName string `xorm:"extends"`
Recommend bool `xorm:"-"`
}

type AttachmentsOptions struct {
ListOptions
DatasetIDs []int64
DecompressState int
Type int
UploaderID int64
NeedDatasetIDs bool
NeedIsPrivate bool
IsPrivate bool
JustNeedZipFile bool
NeedRepoInfo bool
Keyword string
RecommendOnly bool
UserId int64
}

func (a *Attachment) AfterUpdate() {
if a.DatasetID > 0 {
datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment))
@@ -158,7 +134,8 @@ func (a *Attachment) S3DownloadURL() string {
if a.Type == TypeCloudBrainOne {
url, _ = storage.Attachments.PresignedGetURL(setting.Attachment.Minio.BasePath+AttachmentRelativePath(a.UUID), a.Name)
} else if a.Type == TypeCloudBrainTwo {
url, _ = storage.ObsGetPreSignedUrl(a.UUID, a.Name)
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(a.UUID[0:1], a.UUID[1:2], a.UUID, a.Name)), "/")
url, _ = storage.ObsGetPreSignedUrl(objectName, a.Name)
}

return url
@@ -493,19 +470,6 @@ func getPrivateAttachments(e Engine, userID int64) ([]*AttachmentUsername, error
return attachments, nil
}

func getAllUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) {
attachments := make([]*AttachmentUsername, 0, 10)
if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+
"= `user`.id").Where("decompress_state= ? and attachment.type = ? and (uploader_id= ? or is_private = ?)", DecompressStateDone, TypeCloudBrainOne, userID, false).Find(&attachments); err != nil {
return nil, err
}
return attachments, nil
}

func GetAllUserAttachments(userID int64) ([]*AttachmentUsername, error) {
return getAllUserAttachments(x, userID)
}

func getModelArtsUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) {
attachments := make([]*AttachmentUsername, 0, 10)
if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+
@@ -587,7 +551,6 @@ func AttachmentsByDatasetOption(datasets []int64, opts *SearchDatasetOptions) ([
)
}


attachments := make([]*Attachment, 0)
if err := sess.Table(&Attachment{}).Where(cond).Desc("id").
Find(&attachments); err != nil {
@@ -601,107 +564,6 @@ func GetAllAttachmentSize() (int64, error) {
return x.SumInt(&Attachment{}, "size")
}

func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
sess := x.NewSession()
defer sess.Close()

var cond = builder.NewCond()
if opts.NeedDatasetIDs {
cond = cond.And(
builder.In("attachment.dataset_id", opts.DatasetIDs),
)
}

if opts.UploaderID > 0 {
cond = cond.And(
builder.Eq{"attachment.uploader_id": opts.UploaderID},
)
}

if (opts.Type) >= 0 {
cond = cond.And(
builder.Eq{"attachment.type": opts.Type},
)
}

if opts.NeedIsPrivate {
cond = cond.And(
builder.Eq{"attachment.is_private": opts.IsPrivate},
)
}
if opts.RecommendOnly {
cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id").
From("attachment").
Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true")))
}

if opts.JustNeedZipFile {
var DecompressState []int32
DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed)
cond = cond.And(
builder.In("attachment.decompress_state", DecompressState),
)
}

var count int64
var err error
if len(opts.Keyword) == 0 {
count, err = sess.Where(cond).Count(new(Attachment))
} else {
lowerKeyWord := strings.ToLower(opts.Keyword)

cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord}))
count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo))

}

if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
}

if opts.Page >= 0 && opts.PageSize > 0 {
var start int
if opts.Page == 0 {
start = 0
} else {
start = (opts.Page - 1) * opts.PageSize
}
sess.Limit(opts.PageSize, start)
}

sess.OrderBy("attachment.created_unix DESC")
attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum)
if err := sess.Table(&Attachment{}).Where(cond).
Find(&attachments); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}

if opts.NeedRepoInfo {
for _, attachment := range attachments {
dataset, err := GetDatasetByID(attachment.DatasetID)
if err != nil {
return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err)
}
attachment.Recommend = dataset.Recommend
repo, err := GetRepositoryByID(dataset.RepoID)
if err == nil {
attachment.Repo = repo
} else {
return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err)
}
user, err := GetUserByID(attachment.UploaderID)
if err == nil {
attachment.RelAvatarLink = user.RelAvatarLink()
attachment.UserName = user.Name
} else {
return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err)
}
}
}

return attachments, count, nil
}

func GetAllDatasetContributorByDatasetId(datasetId int64) ([]*User, error) {
r := make([]*User, 0)
if err := x.Select("distinct(public.user.*)").Table("attachment").Join("LEFT", "user", "public.user.ID = attachment.uploader_id").Where("attachment.dataset_id = ?", datasetId).Find(&r); err != nil {


+ 21
- 0
models/base_message.go View File

@@ -14,3 +14,24 @@ func BaseErrorMessage(message string) BaseMessage {
1, message,
}
}

type BaseMessageApi struct {
Code int `json:"code"`
Message string `json:"message"`
}

var BaseOKMessageApi = BaseMessageApi{
0, "",
}

func BaseErrorMessageApi(message string) BaseMessageApi {
return BaseMessageApi{
1, message,
}
}

type BaseMessageWithDataApi struct {
Code int `json:"code"`
Message string `json:"message"`
Data interface{} `json:"data"`
}

+ 58
- 6
models/cloudbrain.go View File

@@ -291,12 +291,30 @@ func (task *Cloudbrain) IsRunning() bool {
status == string(JobRunning) || status == GrampusStatusRunning
}

func (task *Cloudbrain) IsUserHasRight(user *User) bool {
if user == nil {
return false
}
return user.IsAdmin || user.ID == task.UserID
}

func ConvertDurationToStr(duration int64) string {
if duration <= 0 {
return DURATION_STR_ZERO
}
return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60)
}
func ConvertStrToDuration(trainJobDuration string) int64 {
trainJobDurationList := strings.Split(trainJobDuration, ":")
if len(trainJobDurationList) == 3 {
i, _ := strconv.ParseInt(trainJobDurationList[0], 10, 64)
j, _ := strconv.ParseInt(trainJobDurationList[1], 10, 64)
k, _ := strconv.ParseInt(trainJobDurationList[2], 10, 64)
return i*3600 + j*60 + k
} else {
return 0
}
}

func IsTrainJobTerminal(status string) bool {
return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled) || status == GrampusStatusFailed || status == GrampusStatusStopped || status == GrampusStatusSucceeded
@@ -1589,9 +1607,23 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) {
}
}
if (opts.AiCenter) != "" {
cond = cond.And(
builder.Like{"cloudbrain.ai_center", opts.AiCenter},
)
if opts.AiCenter == AICenterOfCloudBrainOne {
cond = cond.And(
builder.Eq{"cloudbrain.type": TypeCloudBrainOne},
)
} else if opts.AiCenter == AICenterOfCloudBrainTwo {
cond = cond.And(
builder.Eq{"cloudbrain.type": TypeCloudBrainTwo},
)
} else if opts.AiCenter == AICenterOfChengdu {
cond = cond.And(
builder.Eq{"cloudbrain.type": TypeCDCenter},
)
} else {
cond = cond.And(
builder.Like{"cloudbrain.ai_center", opts.AiCenter},
)
}
}
if (opts.Cluster) != "" {
if opts.Cluster == "resource_cluster_openi" {
@@ -1968,7 +2000,7 @@ func UpdateTrainJobVersion(job *Cloudbrain) error {
func updateJobTrainVersion(e Engine, job *Cloudbrain) error {
var sess *xorm.Session
sess = e.Where("job_id = ? AND version_name=?", job.JobID, job.VersionName)
_, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time", "created_unix").Update(job)
_, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time", "created_unix", "ai_center").Update(job)
return err
}

@@ -2030,10 +2062,17 @@ func GetStoppedJobWithNoStartTimeEndTime() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
return cloudbrains, x.SQL("select * from cloudbrain where status in (?,?,?,?,?,?,?) and (start_time is null or end_time is null) limit 100", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded).Find(&cloudbrains)
}
func GetC2NetWithAiCenterWrongJob() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
return cloudbrains, x.
In("status", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded).
Where("type = ?", TypeC2Net).
Find(&cloudbrains)
}

func GetModelSafetyTestTask() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
sess := x.Where("job_type = ?", string(JobTypeModelSafety))
sess := x.Where("job_type=?", string(JobTypeModelSafety))
err := sess.Find(&cloudbrains)
return cloudbrains, err
}
@@ -2285,10 +2324,23 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er
}
// sess.OrderBy("cloudbrain.created_unix DESC")
cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum)
if err := sess.Cols("status", "type", "job_type", "train_job_duration", "duration", "compute_resource", "created_unix", "start_time", "end_time", "work_server_number").Table(&Cloudbrain{}).Unscoped().Where(cond).
if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond).
Find(&cloudbrains); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}
if opts.NeedRepoInfo {
var ids []int64
for _, task := range cloudbrains {
ids = append(ids, task.RepoID)
}
repositoryMap, err := GetRepositoriesMapByIDs(ids)
if err == nil {
for _, task := range cloudbrains {
task.Repo = repositoryMap[task.RepoID]
}
}

}
return cloudbrains, count, nil
}



+ 50
- 11
models/cloudbrain_static.go View File

@@ -42,14 +42,14 @@ type TaskDetail struct {
type CloudbrainDurationStatistic struct {
ID int64 `xorm:"pk autoincr"`
Cluster string
AiCenterCode string
AiCenterCode string `xorm:"INDEX"`
AiCenterName string
ComputeResource string
AccCardType string
AccCardType string `xorm:"INDEX"`

DateTime string
DayTime string
HourTime int
DateTime timeutil.TimeStamp `xorm:"INDEX"`
DayTime string `xorm:"INDEX"`
HourTime int `xorm:"INDEX"`
CardsUseDuration int
CardsTotalDuration int
CardsTotalNum int
@@ -275,11 +275,15 @@ func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, err
sess := x.NewSession()
defer sess.Close()
var cond = builder.NewCond()
cond = cond.And(
builder.And(builder.Gte{"cloudbrain.end_time": beginTime}, builder.Lte{"cloudbrain.end_time": endTime}),
sess.Exec("if ")
cond = cond.Or(
builder.And(builder.Gte{"cloudbrain.end_time": beginTime}, builder.Lte{"cloudbrain.start_time": beginTime}, builder.Gt{"cloudbrain.start_time": 0}),
)
cond = cond.Or(
builder.Eq{"cloudbrain.status": string(JobRunning)},
builder.And(builder.Gte{"cloudbrain.start_time": beginTime}, builder.Lte{"cloudbrain.start_time": endTime}, builder.Gt{"cloudbrain.start_time": 0}),
)
cond = cond.Or(
builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}),
)
sess.OrderBy("cloudbrain.created_unix ASC")
cloudbrains := make([]*CloudbrainInfo, 0, 10)
@@ -309,6 +313,20 @@ func InsertCloudbrainDurationStatistic(cloudbrainDurationStatistic *CloudbrainDu
return xStatistic.Insert(cloudbrainDurationStatistic)
}

func GetDurationStatisticByDate(date string, hour int, aiCenterCode string, accCardType string) (*CloudbrainDurationStatistic, error) {
cb := &CloudbrainDurationStatistic{DayTime: date, HourTime: hour, AiCenterCode: aiCenterCode, AccCardType: accCardType}
return getDurationStatistic(cb)
}
func getDurationStatistic(cb *CloudbrainDurationStatistic) (*CloudbrainDurationStatistic, error) {
has, err := x.Get(cb)
if err != nil {
return nil, err
} else if !has {
return nil, ErrJobNotExist{}
}
return cb, nil
}

func DeleteCloudbrainDurationStatisticHour(date string, hour int, aiCenterCode string, accCardType string) error {
sess := xStatistic.NewSession()
defer sess.Close()
@@ -332,7 +350,7 @@ func DeleteCloudbrainDurationStatisticHour(date string, hour int, aiCenterCode s
func GetCanUseCardInfo() ([]*ResourceQueue, error) {
sess := x.NewSession()
defer sess.Close()
sess.OrderBy("resource_queue.id ASC")
sess.OrderBy("resource_queue.cluster DESC, resource_queue.ai_center_code ASC")
ResourceQueues := make([]*ResourceQueue, 0, 10)
if err := sess.Table(&ResourceQueue{}).Find(&ResourceQueues); err != nil {
log.Info("find error.")
@@ -346,7 +364,7 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur
var cond = builder.NewCond()
if opts.BeginTime.Unix() > 0 && opts.EndTime.Unix() > 0 {
cond = cond.And(
builder.And(builder.Gte{"cloudbrain_duration_statistic.created_unix": opts.BeginTime.Unix()}, builder.Lte{"cloudbrain_duration_statistic.created_unix": opts.EndTime.Unix()}),
builder.And(builder.Gte{"cloudbrain_duration_statistic.date_time": opts.BeginTime.Unix()}, builder.Lt{"cloudbrain_duration_statistic.date_time": opts.EndTime.Unix()}),
)
}
if opts.AiCenterCode != "" {
@@ -365,10 +383,31 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur
func GetDurationRecordBeginTime() ([]*CloudbrainDurationStatistic, error) {
sess := xStatistic.NewSession()
defer sess.Close()
sess.OrderBy("cloudbrain_duration_statistic.id ASC limit 1")
sess.OrderBy("cloudbrain_duration_statistic.date_time ASC limit 1")
CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0)
if err := sess.Table(&CloudbrainDurationStatistic{}).Find(&CloudbrainDurationStatistics); err != nil {
log.Info("find error.")
}
return CloudbrainDurationStatistics, nil
}

func GetDurationRecordUpdateTime() ([]*CloudbrainDurationStatistic, error) {
sess := xStatistic.NewSession()
defer sess.Close()
sess.OrderBy("cloudbrain_duration_statistic.date_time DESC limit 1")
CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0)
if err := sess.Table(&CloudbrainDurationStatistic{}).Find(&CloudbrainDurationStatistics); err != nil {
log.Info("find error.")
}
return CloudbrainDurationStatistics, nil
}

func DeleteCloudbrainDurationStatistic() error {
sess := xStatistic.NewSession()
defer sess.Close()
if _, err := sess.Exec("TRUNCATE TABLE cloudbrain_duration_statistic"); err != nil {
log.Info("TRUNCATE cloudbrain_duration_statistic error.")
return err
}
return nil
}

+ 2
- 2
models/dataset.go View File

@@ -22,8 +22,8 @@ const (

type Dataset struct {
ID int64 `xorm:"pk autoincr"`
Title string `xorm:"INDEX NOT NULL"`
Status int32 `xorm:"INDEX"` // normal_private: 0, pulbic: 1, is_delete: 2
Title string `xorm:"INDEX NOT NULL""`
Status int32 `xorm:"INDEX""` // normal_private: 0, pulbic: 1, is_delete: 2
Category string
Description string `xorm:"TEXT"`
DownloadTimes int64


+ 75
- 0
models/file_chunk.go View File

@@ -28,6 +28,23 @@ type FileChunk struct {
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

type ModelFileChunk struct {
ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"INDEX"`
Md5 string `xorm:"INDEX"`
ModelUUID string `xorm:"INDEX"`
ObjectName string `xorm:"DEFAULT ''"`
IsUploaded int `xorm:"DEFAULT 0"` // not uploaded: 0, uploaded: 1
UploadID string `xorm:"UNIQUE"` //minio upload id
TotalChunks int
Size int64
UserID int64 `xorm:"INDEX"`
Type int `xorm:"INDEX DEFAULT 0"`
CompletedParts []string `xorm:"DEFAULT ''"` // chunkNumber+etag eg: ,1-asqwewqe21312312.2-123hjkas
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

// GetFileChunkByMD5 returns fileChunk by given id
func GetFileChunkByMD5(md5 string) (*FileChunk, error) {
return getFileChunkByMD5(x, md5)
@@ -49,6 +66,21 @@ func GetFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int) (*Fi
return getFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain)
}

func GetModelFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int, uuid string) (*ModelFileChunk, error) {
return getModelFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain, uuid)
}

func getModelFileChunkByMD5AndUser(e Engine, md5 string, userID int64, typeCloudBrain int, uuid string) (*ModelFileChunk, error) {
fileChunk := new(ModelFileChunk)

if has, err := e.Where("md5 = ? and user_id = ? and type = ? and model_uuid= ?", md5, userID, typeCloudBrain, uuid).Get(fileChunk); err != nil {
return nil, err
} else if !has {
return nil, ErrFileChunkNotExist{md5, ""}
}
return fileChunk, nil
}

func getFileChunkByMD5AndUser(e Engine, md5 string, userID int64, typeCloudBrain int) (*FileChunk, error) {
fileChunk := new(FileChunk)

@@ -76,6 +108,21 @@ func getFileChunkByUUID(e Engine, uuid string) (*FileChunk, error) {
return fileChunk, nil
}

func GetModelFileChunkByUUID(uuid string) (*ModelFileChunk, error) {
return getModelFileChunkByUUID(x, uuid)
}

func getModelFileChunkByUUID(e Engine, uuid string) (*ModelFileChunk, error) {
fileChunk := new(ModelFileChunk)

if has, err := e.Where("uuid = ?", uuid).Get(fileChunk); err != nil {
return nil, err
} else if !has {
return nil, ErrFileChunkNotExist{"", uuid}
}
return fileChunk, nil
}

// InsertFileChunk insert a record into file_chunk.
func InsertFileChunk(fileChunk *FileChunk) (_ *FileChunk, err error) {
if _, err := x.Insert(fileChunk); err != nil {
@@ -85,6 +132,14 @@ func InsertFileChunk(fileChunk *FileChunk) (_ *FileChunk, err error) {
return fileChunk, nil
}

// InsertFileChunk insert a record into file_chunk.
func InsertModelFileChunk(fileChunk *ModelFileChunk) (_ *ModelFileChunk, err error) {
if _, err := x.Insert(fileChunk); err != nil {
return nil, err
}
return fileChunk, nil
}

func DeleteFileChunkById(uuid string) (*FileChunk, error) {
return deleteFileChunkById(x, uuid)
}
@@ -106,6 +161,17 @@ func deleteFileChunkById(e Engine, uuid string) (*FileChunk, error) {
}
}

func UpdateModelFileChunk(fileChunk *ModelFileChunk) error {
return updateModelFileChunk(x, fileChunk)
}

func updateModelFileChunk(e Engine, fileChunk *ModelFileChunk) error {
var sess *xorm.Session
sess = e.Where("uuid = ?", fileChunk.UUID)
_, err := sess.Cols("is_uploaded").Update(fileChunk)
return err
}

// UpdateFileChunk updates the given file_chunk in database
func UpdateFileChunk(fileChunk *FileChunk) error {
return updateFileChunk(x, fileChunk)
@@ -127,3 +193,12 @@ func deleteFileChunk(e Engine, fileChunk *FileChunk) error {
_, err := e.ID(fileChunk.ID).Delete(fileChunk)
return err
}

func DeleteModelFileChunk(fileChunk *ModelFileChunk) error {
return deleteModelFileChunk(x, fileChunk)
}

func deleteModelFileChunk(e Engine, fileChunk *ModelFileChunk) error {
_, err := e.ID(fileChunk.ID).Delete(fileChunk)
return err
}

+ 2
- 0
models/models.go View File

@@ -136,6 +136,7 @@ func init() {
new(ImageTopic),
new(ImageTopicRelation),
new(FileChunk),
new(ModelFileChunk),
new(BlockChain),
new(RecommendOrg),
new(AiModelManage),
@@ -185,6 +186,7 @@ func init() {
new(UserAnalysisPara),
new(Invitation),
new(CloudbrainDurationStatistic),
new(UserSummaryCurrentYear),
)

gonicNames := []string{"SSL", "UID"}


+ 2
- 2
models/repo.go View File

@@ -223,10 +223,10 @@ type Repository struct {
BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"`

// git clone and git pull total count
CloneCnt int64 `xorm:"NOT NULL DEFAULT 0"`
CloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"clone_cnt"`

// only git clone total count
GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0"`
GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"git_clone_cnt"`

CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`


+ 3
- 3
models/resource_queue.go View File

@@ -143,6 +143,9 @@ func InsertResourceQueue(queue ResourceQueue) (int64, error) {
func UpdateResourceQueueById(queueId int64, queue ResourceQueue) (int64, error) {
return x.ID(queueId).Update(&queue)
}
func UpdateResourceCardsTotalNum(queueId int64, queue ResourceQueue) (int64, error) {
return x.ID(queueId).Cols("cards_total_num", "remark").Update(&queue)
}

func SearchResourceQueue(opts SearchResourceQueueOptions) (int64, []ResourceQueue, error) {
var cond = builder.NewCond()
@@ -313,9 +316,6 @@ func SyncGrampusQueues(updateList []ResourceQueue, insertList []ResourceQueue, e
if _, err = sess.In("id", deleteSpcIds).Update(&ResourceSpecification{Status: SpecOffShelf}); err != nil {
return err
}
if _, err = sess.In("spec_id", deleteSpcIds).Delete(&ResourceSceneSpec{}); err != nil {
return err
}
}

}


+ 2
- 2
models/resource_scene.go View File

@@ -116,7 +116,7 @@ func InsertResourceScene(r ResourceSceneReq) error {

//check
specs := make([]ResourceSpecification, 0)
cond := builder.In("id", r.SpecIds).And(builder.Eq{"status": SpecOnShelf})
cond := builder.In("id", r.SpecIds)
if err := sess.Where(cond).Find(&specs); err != nil {
return err
}
@@ -175,7 +175,7 @@ func UpdateResourceScene(r ResourceSceneReq) error {
}
//check specification
specs := make([]ResourceSpecification, 0)
cond := builder.In("id", r.SpecIds).And(builder.Eq{"status": SpecOnShelf})
cond := builder.In("id", r.SpecIds)
if err := sess.Where(cond).Find(&specs); err != nil {
return err
}


+ 21
- 8
models/resource_specification.go View File

@@ -12,6 +12,13 @@ const (
SpecOffShelf
)

type SearchSpecOrderBy int

const (
SearchSpecOrderById SearchSpecOrderBy = iota
SearchSpecOrder4Standard
)

type ResourceSpecification struct {
ID int64 `xorm:"pk autoincr"`
QueueId int64 `xorm:"INDEX"`
@@ -85,6 +92,7 @@ type SearchResourceSpecificationOptions struct {
Status int
Cluster string
AvailableCode int
OrderBy SearchSpecOrderBy
}

type SearchResourceBriefSpecificationOptions struct {
@@ -168,6 +176,7 @@ type FindSpecsOptions struct {
UseShareMemGiB bool
//if true,find specs no matter used or not used in scene. if false,only find specs used in scene
RequestAll bool
SpecStatus int
}

type Specification struct {
@@ -232,10 +241,18 @@ func SearchResourceSpecification(opts SearchResourceSpecificationOptions) (int64
return 0, nil, err
}

var orderby = ""
switch opts.OrderBy {
case SearchSpecOrder4Standard:
orderby = "resource_queue.compute_resource asc,resource_queue.acc_card_type asc,resource_specification.acc_cards_num asc,resource_specification.cpu_cores asc,resource_specification.mem_gi_b asc,resource_specification.share_mem_gi_b asc"
default:
orderby = "resource_specification.id desc"
}

r := make([]ResourceSpecAndQueue, 0)
err = x.Where(cond).
Join("INNER", "resource_queue", "resource_queue.ID = resource_specification.queue_id").
Desc("resource_specification.id").
OrderBy(orderby).
Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).
Unscoped().Find(&r)
if err != nil {
@@ -269,10 +286,6 @@ func ResourceSpecOffShelf(id int64) (int64, error) {
}
sess.Close()
}()
//delete scene spec relation
if _, err = sess.Where("spec_id = ?", id).Delete(&ResourceSceneSpec{}); err != nil {
return 0, err
}

param := ResourceSpecification{
Status: SpecOffShelf,
@@ -317,9 +330,6 @@ func SyncGrampusSpecs(updateList []ResourceSpecification, insertList []ResourceS
if _, err = sess.Cols("status", "is_available").In("id", deleteIds).Update(&ResourceSpecification{Status: SpecOffShelf, IsAvailable: false}); err != nil {
return err
}
if _, err = sess.In("spec_id", deleteIds).Delete(&ResourceSceneSpec{}); err != nil {
return err
}
}

//update exists specs
@@ -384,6 +394,9 @@ func FindSpecs(opts FindSpecsOptions) ([]*Specification, error) {
if opts.UseShareMemGiB {
cond = cond.And(builder.Eq{"resource_specification.share_mem_gi_b": opts.ShareMemGiB})
}
if opts.SpecStatus > 0 {
cond = cond.And(builder.Eq{"resource_specification.status": opts.SpecStatus})
}
r := make([]*Specification, 0)
s := x.Where(cond).
Join("INNER", "resource_queue", "resource_queue.id = resource_specification.queue_id")


+ 377
- 227
models/user_business_analysis.go View File

@@ -3,12 +3,15 @@ package models
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"sort"
"strconv"
"strings"
"time"

"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
"xorm.io/xorm"
@@ -19,185 +22,6 @@ const (
BATCH_INSERT_SIZE = 50
)

type UserBusinessAnalysisAll struct {
ID int64 `xorm:"pk"`

CountDate int64 `xorm:"pk"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 10
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

DataDate string `xorm:"NULL"`

//cloudbraintask
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysis struct {
ID int64 `xorm:"pk"`
DataDate string `xorm:"pk"`
CountDate int64 `xorm:"NULL"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 6
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisQueryOptions struct {
ListOptions
UserName string
@@ -499,7 +323,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
DataDate := currentTimeNow.Format("2006-01-02 15:04")

CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -517,16 +341,16 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -752,7 +576,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
startTime := currentTimeNow.AddDate(0, 0, -1)

CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -764,13 +588,13 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("query commit code errr.")
} else {
log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
//CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
//CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)

OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
@@ -778,14 +602,19 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

InvitationMap := queryUserInvitationCount(start_unix, end_unix)

DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"

bonusMap := make(map[string]map[string]int)
if tableName == "user_business_analysis_current_year" {
bonusMap = getBonusMap()
log.Info("truncate all data from table:user_summary_current_year ")
statictisSess.Exec("TRUNCATE TABLE user_summary_current_year")
}
cond := "type != 1 and is_active=true"
count, err := sess.Where(cond).Count(new(User))
if err != nil {
@@ -883,6 +712,37 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1
}
}
if tableName == "user_business_analysis_current_year" {
//年度数据
subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
mostActiveDay := ""
if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
mostActiveDay = getMostActiveJson(userInfo)
}
scoreMap := make(map[string]float64)
repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
scoreMap["datasetscore"] = datasetscore
codeInfo, codescore := getCodeInfo(dateRecordAll)
scoreMap["codescore"] = codescore
cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap, scoreMap)
playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap)
re := &UserSummaryCurrentYear{
ID: dateRecordAll.ID,
Name: dateRecordAll.Name,
Email: dateRecordAll.Email,
Phone: dateRecordAll.Phone,
RegistDate: dateRecordAll.RegistDate,
DateCount: int(subTime.Hours()) / 24,
MostActiveDay: mostActiveDay,
RepoInfo: repoInfo,
DataSetInfo: dataSetInfo,
CodeInfo: codeInfo,
CloudBrainInfo: cloudBrainInfo,
PlayARoll: playARoll,
}
statictisSess.Insert(re)
}
}
if len(dateRecordBatch) > 0 {
err := insertTable(dateRecordBatch, tableName, statictisSess)
@@ -890,6 +750,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
if err != nil {
log.Info("insert all data failed." + err.Error())
}

}
indexTotal += PAGE_SIZE
if indexTotal >= count {
@@ -911,6 +772,204 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount))
}

func getBonusMap() map[string]map[string]int {
bonusMap := make(map[string]map[string]int)
url := setting.RecommentRepoAddr + "bonus/record.txt"
content, err := GetContentFromPromote(url)
if err == nil {
filenames := strings.Split(content, "\n")
for i := 0; i < len(filenames); i++ {
url = setting.RecommentRepoAddr + "bonus/" + filenames[i]
csvContent, err1 := GetContentFromPromote(url)
if err1 == nil {
//read csv
lines := strings.Split(csvContent, "\n")
for j := 1; j < len(lines); j++ {
aLine := strings.Split(lines[j], ",")
if len(aLine) < 7 {
continue
}
userName := aLine[1]
//email := lines[2]
record, ok := bonusMap[userName]
if !ok {
record = make(map[string]int)
}
record["times"] = getMapKeyStringValue("times", record) + getIntValue(aLine[3])
record["total_bonus"] = getMapKeyStringValue("total_bonus", record) + getIntValue(aLine[4])
record["total_cardtime"] = getMapKeyStringValue("total_cardtime", record) + getIntValue(aLine[5])
record["total_giveup"] = getMapKeyStringValue("total_giveup", record) + getIntValue(aLine[6])
}
}
}
}
return bonusMap
}

func getIntValue(val string) int {
i, err := strconv.Atoi(val)
if err == nil {
return i
}
return 0
}

func getPlayARoll(bonusMap map[string]map[string]int, userName string, scoreMap map[string]float64) string {
bonusInfo := make(map[string]string)
record, ok := bonusMap[userName]
if ok {
rollscore := 0.0
bonusInfo["times"] = fmt.Sprint(record["times"])
if record["times"] >= 4 {
rollscore = float64(record["times"]) / float64(4)
}
scoreMap["rollscore"] = rollscore
bonusInfo["total_bonus"] = fmt.Sprint(record["total_bonus"])
bonusInfo["total_cardtime"] = fmt.Sprint(record["total_cardtime"])
bonusInfo["total_giveup"] = fmt.Sprint(record["total_giveup"])
bonusInfoJson, _ := json.Marshal(bonusInfo)
return string(bonusInfoJson)
} else {
return ""
}
}

func getCloudBrainInfo(dateRecordAll UserBusinessAnalysisAll, CloudBrainTaskItemMap map[string]int, scoreMap map[string]float64) string {
trainscore := 0.0
debugscore := 0.0
runtime := 0.0
if dateRecordAll.CloudBrainTaskNum > 0 {
cloudBrainInfo := make(map[string]string)
cloudBrainInfo["create_task_num"] = fmt.Sprint(dateRecordAll.CloudBrainTaskNum)
cloudBrainInfo["debug_task_num"] = fmt.Sprint(dateRecordAll.GpuDebugJob + dateRecordAll.NpuDebugJob)
if dateRecordAll.GpuDebugJob+dateRecordAll.NpuDebugJob >= 50 {
debugscore = float64(dateRecordAll.GpuDebugJob+dateRecordAll.NpuDebugJob) / float64(50)
}
cloudBrainInfo["train_task_num"] = fmt.Sprint(dateRecordAll.GpuTrainJob + dateRecordAll.NpuTrainJob)
if dateRecordAll.GpuTrainJob+dateRecordAll.NpuTrainJob >= 50 {
trainscore = float64(dateRecordAll.GpuTrainJob+dateRecordAll.NpuTrainJob) / float64(50)
}
cloudBrainInfo["inference_task_num"] = fmt.Sprint(dateRecordAll.NpuInferenceJob + CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_GpuInferenceJob"])
cloudBrainInfo["card_runtime"] = fmt.Sprint(dateRecordAll.CloudBrainRunTime)
if dateRecordAll.CloudBrainRunTime >= 100 {
runtime = float64(dateRecordAll.CloudBrainRunTime) / float64(100)
}
cloudBrainInfo["card_runtime_money"] = fmt.Sprint(dateRecordAll.CloudBrainRunTime * 5)
cloudBrainInfo["CloudBrainOne"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_CloudBrainOne"])
cloudBrainInfo["CloudBrainTwo"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_CloudBrainTwo"])
cloudBrainInfo["C2Net"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_C2Net"])

cloudBrainInfoJson, _ := json.Marshal(cloudBrainInfo)
scoreMap["trainscore"] = trainscore
scoreMap["debugscore"] = debugscore
scoreMap["runtime"] = runtime
return string(cloudBrainInfoJson)
} else {
scoreMap["trainscore"] = trainscore
scoreMap["debugscore"] = debugscore
scoreMap["runtime"] = runtime
return ""
}
}

func getCodeInfo(dateRecordAll UserBusinessAnalysisAll) (string, float64) {
if dateRecordAll.CommitCount > 0 {
codeInfo := make(map[string]string)
codeInfo["commit_count"] = fmt.Sprint(dateRecordAll.CommitCount)
codeInfo["commit_line"] = fmt.Sprint(dateRecordAll.CommitCodeSize)
score := 0.0
score = float64(dateRecordAll.CommitCodeSize) / float64(dateRecordAll.CommitCount) / float64(20000)
if score < (float64(dateRecordAll.CommitCount) / float64(100)) {
score = float64(dateRecordAll.CommitCount) / float64(100)
}
codeInfo["score"] = fmt.Sprintf("%.2f", score)

codeInfoJson, _ := json.Marshal(codeInfo)
return string(codeInfoJson), score
} else {
return "", 0
}
}

func getDataSetInfo(userId int64, CreatedDataset map[int64]int, dataSetDownloadMap map[int64]int, CommitDatasetNumMap map[int64]int, CollectedDataset map[int64]int) (string, float64) {
datasetInfo := make(map[string]string)
score := 0.0
if create_count, ok := CreatedDataset[userId]; ok {
datasetInfo["create_count"] = fmt.Sprint(create_count)
score = float64(create_count) / 10
}
if upload_count, ok := CommitDatasetNumMap[userId]; ok {
datasetInfo["upload_file_count"] = fmt.Sprint(upload_count)
}
if download_count, ok := dataSetDownloadMap[userId]; ok {
datasetInfo["download_count"] = fmt.Sprint(download_count)
}
if cllected_count, ok := CollectedDataset[userId]; ok {
datasetInfo["cllected_count"] = fmt.Sprint(cllected_count)
}

if len(datasetInfo) > 0 {
datasetInfoJson, _ := json.Marshal(datasetInfo)
return string(datasetInfoJson), score
} else {
return "", score
}
}

func getRepoDetailInfo(repoDetailInfoMap map[string]int, userId int64, mostDownload map[int64]string) string {
repoDetailInfo := make(map[string]string)
if total, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total"]; ok {
repoDetailInfo["repo_total"] = fmt.Sprint(total)
}
if private, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_private"]; ok {
repoDetailInfo["repo_is_private"] = fmt.Sprint(private)
}
if public, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_public"]; ok {
repoDetailInfo["repo_is_public"] = fmt.Sprint(public)
}
if download, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total_download"]; ok {
repoDetailInfo["repo_total_download"] = fmt.Sprint(download)
}
if mostdownload, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_most_download"]; ok {
repoDetailInfo["repo_most_download_count"] = fmt.Sprint(mostdownload)
}
if mostdownloadName, ok := mostDownload[userId]; ok {
repoDetailInfo["repo_most_download_name"] = mostdownloadName
}
if len(repoDetailInfo) > 0 {
repoDetailInfoJson, _ := json.Marshal(repoDetailInfo)
return string(repoDetailInfoJson)
} else {
return ""
}
}

func getMostActiveJson(userInfo map[string]int) string {
mostActiveMap := make(map[string]string)
if day, ok := userInfo["hour_day"]; ok {
hour := userInfo["hour_hour"]
month := userInfo["hour_month"]
year := userInfo["hour_year"]
delete(userInfo, "hour_day")
delete(userInfo, "hour_hour")
delete(userInfo, "hour_month")
delete(userInfo, "hour_year")
mostActiveMap["before_dawn"] = fmt.Sprint(year) + "/" + fmt.Sprint(month) + "/" + fmt.Sprint(day) + " " + fmt.Sprint(hour)
}
max := 0
max_day := ""
for key, value := range userInfo {
if value > max {
max = value
max_day = key
}
}
mostActiveMap["most_active_day"] = max_day
mostActiveMap["most_active_num"] = fmt.Sprint(max)
mostActiveMapJson, _ := json.Marshal(mostActiveMap)
return string(mostActiveMapJson)
}

func updateUserIndex(tableName string, statictisSess *xorm.Session, userId int64, userIndex float64) {
updateSql := "UPDATE public." + tableName + " set user_index=" + fmt.Sprint(userIndex*100) + " where id=" + fmt.Sprint(userId)
statictisSess.Exec(updateSql)
@@ -997,7 +1056,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,

DataDate := CountDate.Format("2006-01-02")
CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -1010,19 +1069,19 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("query commit code errr.")
} else {
//log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
//CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -1490,41 +1549,65 @@ func queryPullRequest(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}

func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
func queryCommitAction(start_unix int64, end_unix int64, actionType int64) (map[int64]int, map[int64]map[string]int) {
sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)
cond := "user_id=act_user_id and op_type=" + fmt.Sprint(actionType) + " and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
mostActiveMap := make(map[int64]map[string]int)
cond := "user_id=act_user_id and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Action))
if err != nil {
log.Info("query action error. return.")
return resultMap
return resultMap, mostActiveMap
}

var indexTotal int64
indexTotal = 0
for {
sess.Select("id,user_id,op_type,act_user_id").Table("action").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,user_id,op_type,act_user_id,created_unix").Table("action").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
actionList := make([]*Action, 0)
sess.Find(&actionList)

log.Info("query action size=" + fmt.Sprint(len(actionList)))
for _, actionRecord := range actionList {
if _, ok := resultMap[actionRecord.UserID]; !ok {
resultMap[actionRecord.UserID] = 1
if int64(actionRecord.OpType) == actionType {
if _, ok := resultMap[actionRecord.UserID]; !ok {
resultMap[actionRecord.UserID] = 1
} else {
resultMap[actionRecord.UserID] += 1
}
}
key := getDate(actionRecord.CreatedUnix)
if _, ok := mostActiveMap[actionRecord.UserID]; !ok {
tmpMap := make(map[string]int)
tmpMap[key] = 1
mostActiveMap[actionRecord.UserID] = tmpMap
} else {
resultMap[actionRecord.UserID] += 1
mostActiveMap[actionRecord.UserID][key] = getMapKeyStringValue(key, mostActiveMap[actionRecord.UserID]) + 1
}
utcTime := actionRecord.CreatedUnix.AsTime()
hour := utcTime.Hour()
if hour >= 0 && hour <= 5 {
key = "hour_hour"
if getMapKeyStringValue(key, mostActiveMap[actionRecord.UserID]) < hour {
mostActiveMap[actionRecord.UserID][key] = hour
mostActiveMap[actionRecord.UserID]["hour_day"] = utcTime.Day()
mostActiveMap[actionRecord.UserID]["hour_month"] = int(utcTime.Month())
mostActiveMap[actionRecord.UserID]["hour_year"] = utcTime.Year()
}
}
}

indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}

return resultMap
return resultMap, mostActiveMap
}
func getDate(createTime timeutil.TimeStamp) string {
return createTime.Format("2006-01-02")
}

func queryCreateIssue(start_unix int64, end_unix int64) map[int64]int {
@@ -1714,15 +1797,16 @@ func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int
return resultMap, resultFocusedByOtherMap
}

func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
func queryRecommedDataSet(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
userIdDdatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true"
userIdRecommentDatasetMap := make(map[int64]int)
userIdCreateDatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(Dataset))
if err != nil {
log.Info("query recommend dataset error. return.")
return userIdDdatasetMap
return userIdRecommentDatasetMap, userIdCreateDatasetMap
}
var indexTotal int64
indexTotal = 0
@@ -1732,18 +1816,21 @@ func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
if datasetRecord.Recommend {
if _, ok := userIdRecommentDatasetMap[datasetRecord.UserID]; !ok {
userIdRecommentDatasetMap[datasetRecord.UserID] = 1
} else {
userIdRecommentDatasetMap[datasetRecord.UserID] += 1
}
}
userIdCreateDatasetMap[datasetRecord.UserID] = getMapValue(datasetRecord.UserID, userIdCreateDatasetMap) + 1
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdDdatasetMap
return userIdRecommentDatasetMap, userIdCreateDatasetMap
}

func queryAllDataSet() (map[int64]int64, map[int64]int64) {
@@ -1922,22 +2009,23 @@ func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64
return imageCollect, imageCollected
}

func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
resultSizeMap := make(map[int64]int)
resultNumMap := make(map[int64]int)
resultDownloadMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Attachment))
if err != nil {
log.Info("query attachment error. return.")
return resultSizeMap, resultNumMap
return resultSizeMap, resultNumMap, resultDownloadMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uploader_id,size").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,uploader_id,size,download_count").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
attachmentList := make([]*Attachment, 0)
sess.Find(&attachmentList)

@@ -1946,9 +2034,11 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6
if _, ok := resultSizeMap[attachRecord.UploaderID]; !ok {
resultSizeMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB
resultNumMap[attachRecord.UploaderID] = 1
resultDownloadMap[attachRecord.UploaderID] = int(attachRecord.DownloadCount)
} else {
resultSizeMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB
resultNumMap[attachRecord.UploaderID] += 1
resultDownloadMap[attachRecord.UploaderID] += int(attachRecord.DownloadCount)
}
}

@@ -1958,32 +2048,50 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6
}
}

return resultSizeMap, resultNumMap
return resultSizeMap, resultNumMap, resultDownloadMap
}

func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[string]int, map[int64]string) {
sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)

detailInfoMap := make(map[string]int)
mostDownloadMap := make(map[int64]string)

cond := "is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(Repository))
if err != nil {
log.Info("query Repository error. return.")
return resultMap
return resultMap, detailInfoMap, mostDownloadMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,owner_id,name").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,owner_id,name,is_private,clone_cnt").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
repoList := make([]*Repository, 0)
sess.Find(&repoList)
log.Info("query Repository size=" + fmt.Sprint(len(repoList)))
for _, repoRecord := range repoList {
if _, ok := resultMap[repoRecord.OwnerID]; !ok {
resultMap[repoRecord.OwnerID] = 1
resultMap[repoRecord.OwnerID] = getMapValue(repoRecord.OwnerID, resultMap) + 1

key := fmt.Sprint(repoRecord.OwnerID) + "_total"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + 1

if repoRecord.IsPrivate {
key := fmt.Sprint(repoRecord.OwnerID) + "_is_private"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + 1
} else {
resultMap[repoRecord.OwnerID] += 1
key := fmt.Sprint(repoRecord.OwnerID) + "_is_public"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + 1
}
key = fmt.Sprint(repoRecord.OwnerID) + "_total_download"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + int(repoRecord.CloneCnt)

key = fmt.Sprint(repoRecord.OwnerID) + "_most_download"
if int(repoRecord.CloneCnt) > getMapKeyStringValue(key, detailInfoMap) {
detailInfoMap[key] = int(repoRecord.CloneCnt)
mostDownloadMap[repoRecord.OwnerID] = repoRecord.DisplayName()
}
}
indexTotal += PAGE_SIZE
@@ -1992,7 +2100,7 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
}
}

return resultMap
return resultMap, detailInfoMap, mostDownloadMap
}

func queryUserRepoOpenIIndex(start_unix int64, end_unix int64) map[int64]float64 {
@@ -2180,6 +2288,7 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
setMapKey("CloudBrainRunTime", cloudTaskRecord.UserID, int(cloudTaskRecord.Duration), resultItemMap)
}
if cloudTaskRecord.Type == 1 { //npu
setMapKey("CloudBrainTwo", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "INFERENCE" {
@@ -2187,14 +2296,32 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
} else {
setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else { //type=0 gpu
} else if cloudTaskRecord.Type == 0 { //type=0 gpu
setMapKey("CloudBrainOne", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "INFERENCE" {
setMapKey("GpuInferenceJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "BENCHMARK" {
setMapKey("GpuBenchMarkJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else if cloudTaskRecord.Type == 2 {
setMapKey("C2Net", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.ComputeResource == NPUResource {
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else if cloudTaskRecord.ComputeResource == GPUResource {
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
}
}
}
indexTotal += PAGE_SIZE
@@ -2274,3 +2401,26 @@ func subMonth(t1, t2 time.Time) (month int) {
}
return month
}

func GetContentFromPromote(url string) (string, error) {
defer func() {
if err := recover(); err != nil {
log.Info("not error.", err)
return
}
}()
resp, err := http.Get(url)
if err != nil || resp.StatusCode != 200 {
log.Info("Get organizations url error=" + err.Error())
return "", err
}

bytes, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
if err != nil {
log.Info("Get organizations url error=" + err.Error())
return "", err
}
allLineStr := string(bytes)
return allLineStr, nil
}

+ 200
- 0
models/user_business_struct.go View File

@@ -2,6 +2,27 @@ package models

import "code.gitea.io/gitea/modules/timeutil"

type UserSummaryCurrentYear struct {
ID int64 `xorm:"pk"`
Email string `xorm:"NOT NULL"`
//user
Name string `xorm:"NOT NULL"`
Phone string `xorm:"NULL"`
//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

DateCount int `xorm:"NOT NULL DEFAULT 0"`
MostActiveDay string `xorm:" NULL "` //08.05
RepoInfo string `xorm:"varchar(1000)"` //创建了XX 个项目,公开项目XX 个,私有项目XX 个累计被下载XXX 次,其中《XXXXXXX 》项目,获得了最高XXX 次下载
DataSetInfo string `xorm:"varchar(500)"` //创建了XX 个数据集,上传了XX 个数据集文件,累计被下载XX 次,被收藏XX 次
CodeInfo string `xorm:"varchar(500)"` //代码提交次数,提交总代码行数,最晚的提交时间
CloudBrainInfo string `xorm:"varchar(1000)"` //,创建了XX 个云脑任务,调试任务XX 个,训练任务XX 个,推理任务XX 个,累计运行了XXXX 卡时,累计节省xxxxx 元
//这些免费的算力资源分别有,XX% 来自鹏城云脑1,XX% 来自鹏城云脑2,XX% 来自智算网络
PlayARoll string `xorm:"varchar(500)"` //你参加了XX 次“我为开源打榜狂”活动,累计上榜XX 次,总共获得了社区XXX 元的激励

Label string `xorm:"varchar(500)"`
}

type UserBusinessAnalysisCurrentYear struct {
ID int64 `xorm:"pk"`
CountDate int64 `xorm:"pk"`
@@ -505,3 +526,182 @@ type UserMetrics struct {
ActivityUserJson string `xorm:"text NULL"` //激活用户列表
CurrentDayRegistUser int `xorm:"NOT NULL DEFAULT 0"` //当天注册用户
}

type UserBusinessAnalysisAll struct {
ID int64 `xorm:"pk"`

CountDate int64 `xorm:"pk"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 10
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

DataDate string `xorm:"NULL"`

//cloudbraintask
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysis struct {
ID int64 `xorm:"pk"`
DataDate string `xorm:"pk"`
CountDate int64 `xorm:"NULL"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 6
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

+ 20
- 23
modules/auth/modelarts.go View File

@@ -57,29 +57,26 @@ type CreateModelArtsTrainJobForm struct {
}

type CreateModelArtsInferenceJobForm struct {
DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"`
BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"`
PoolID string `form:"pool_id" binding:"Required"`
Flavor string `form:"flavor" binding:"Required"`
Params string `form:"run_para_list" binding:"Required"`
Description string `form:"description"`
IsSaveParam string `form:"is_save_para"`
ParameterTemplateName string `form:"parameter_template_name"`
PrameterDescription string `form:"parameter_description"`
BranchName string `form:"branch_name" binding:"Required"`
VersionName string `form:"version_name" binding:"Required"`
FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"`
LabelName string `form:"label_names" binding:"Required"`
TrainUrl string `form:"train_url" binding:"Required"`
ModelName string `form:"model_name" binding:"Required"`
ModelVersion string `form:"model_version" binding:"Required"`
CkptName string `form:"ckpt_name" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"`
DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"`
BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"`
PoolID string `form:"pool_id" binding:"Required"`
Flavor string `form:"flavor" binding:"Required"`
Params string `form:"run_para_list" binding:"Required"`
Description string `form:"description"`
BranchName string `form:"branch_name" binding:"Required"`
VersionName string `form:"version_name" binding:"Required"`
FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"`
LabelName string `form:"label_names" binding:"Required"`
TrainUrl string `form:"train_url" binding:"Required"`
ModelName string `form:"model_name" binding:"Required"`
ModelVersion string `form:"model_version" binding:"Required"`
CkptName string `form:"ckpt_name" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"`
}

func (f *CreateModelArtsTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {


+ 6
- 6
modules/cloudbrain/cloudbrain.go View File

@@ -228,7 +228,7 @@ func AdminOrImageCreaterRight(ctx *context.Context) {

}

func GenerateTask(req GenerateCloudBrainTaskReq) error {
func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) {
var versionCount int
if req.JobType == string(models.JobTypeTrain) {
versionCount = 1
@@ -335,11 +335,11 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
})
if err != nil {
log.Error("CreateJob failed:", err.Error(), req.Ctx.Data["MsgID"])
return err
return "", err
}
if jobResult.Code != Success {
log.Error("CreateJob(%s) failed:%s", req.JobName, jobResult.Msg, req.Ctx.Data["MsgID"])
return errors.New(jobResult.Msg)
return "", errors.New(jobResult.Msg)
}

var jobID = jobResult.Payload["jobId"].(string)
@@ -380,13 +380,13 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
})

if err != nil {
return err
return "", err
}

task, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID failed: %v", err.Error())
return err
return "", err
}

stringId := strconv.FormatInt(task.ID, 10)
@@ -401,7 +401,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
notification.NotifyOtherTask(req.Ctx.User, req.Ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugGPUTask)
}

return nil
return jobID, nil
}

func IsBenchmarkJob(jobType string) bool {


+ 111
- 0
modules/convert/cloudbrain.go View File

@@ -0,0 +1,111 @@
package convert

import (
"code.gitea.io/gitea/models"
api "code.gitea.io/gitea/modules/structs"
)

func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain {
return &api.Cloudbrain{
ID: task.ID,
JobID: task.JobID,
JobType: task.JobType,
Type: task.Type,
DisplayJobName: task.DisplayJobName,
Status: task.Status,
CreatedUnix: int64(task.CreatedUnix),
RepoID: task.RepoID,
Duration: task.Duration,
TrainJobDuration: task.TrainJobDuration,
ImageID: task.ImageID,
Image: task.Image,
Uuid: task.Uuid,
DatasetName: task.DatasetName,
ComputeResource: task.ComputeResource,
AiCenter: task.AiCenter,
BranchName: task.BranchName,
Parameters: task.Parameters,
BootFile: task.BootFile,
Description: task.Description,
ModelName: task.ModelName,

ModelVersion: task.ModelVersion,
CkptName: task.CkptName,

StartTime: int64(task.StartTime),
EndTime: int64(task.EndTime),

Spec: ToSpecification(task.Spec),
}
}
func ToAttachment(attachment *models.Attachment) *api.AttachmentShow {
return &api.AttachmentShow{
ID: attachment.ID,
UUID: attachment.UUID,
DatasetID: attachment.DatasetID,
ReleaseID: attachment.ReleaseID,
UploaderID: attachment.UploaderID,
CommentID: attachment.CommentID,
Name: attachment.Name,
Description: attachment.Description,
DownloadCount: attachment.DownloadCount,
UseNumber: attachment.UseNumber,
Size: attachment.Size,
IsPrivate: attachment.IsPrivate,
DecompressState: attachment.DecompressState,
Type: attachment.Type,
CreatedUnix: int64(attachment.CreatedUnix),
}
}

func ToDataset(dataset *models.Dataset) *api.Dataset {
var convertAttachments []*api.AttachmentShow
for _, attachment := range dataset.Attachments {
convertAttachments = append(convertAttachments, ToAttachment(attachment))
}
return &api.Dataset{
ID: dataset.ID,
Title: dataset.Title,
Status: dataset.Status,
Category: dataset.Category,
Description: dataset.Description,
DownloadTimes: dataset.DownloadTimes,
UseCount: dataset.UseCount,
NumStars: dataset.NumStars,
Recommend: dataset.Recommend,
License: dataset.License,
Task: dataset.Task,
ReleaseID: dataset.ReleaseID,
UserID: dataset.UserID,
RepoID: dataset.RepoID,
Repo: &api.RepositoryShow{
OwnerName: dataset.Repo.OwnerName,
Name: dataset.Repo.Name,
},
CreatedUnix: int64(dataset.CreatedUnix),
UpdatedUnix: int64(dataset.UpdatedUnix),
Attachments: convertAttachments,
}
}

func ToSpecification(s *models.Specification) *api.SpecificationShow {
return &api.SpecificationShow{
ID: s.ID,
AccCardsNum: s.AccCardsNum,
AccCardType: s.AccCardType,
CpuCores: s.CpuCores,
MemGiB: s.MemGiB,
GPUMemGiB: s.GPUMemGiB,
ShareMemGiB: s.ShareMemGiB,
ComputeResource: s.ComputeResource,
UnitPrice: s.UnitPrice,
}
}

func ToTagger(user *models.User) *api.Tagger {
return &api.Tagger{
Name: user.Name,
RelAvatarURL: user.RelAvatarLink(),
Email: user.Email,
}
}

+ 4
- 4
modules/grampus/grampus.go View File

@@ -102,7 +102,7 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram
return datasetGrampus
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()

centerID, centerName := getCentersParamter(ctx, req)
@@ -150,7 +150,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
})
if err != nil {
log.Error("createJob failed: %v", err.Error())
return err
return "", err
}

jobID := jobResult.JobInfo.JobID
@@ -191,7 +191,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error

if err != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, err.Error())
return err
return "", err
}

var actionType models.ActionType
@@ -202,7 +202,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
}
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, actionType)

return nil
return jobID, nil
}

func getCentersParamter(ctx *context.Context, req *GenerateTrainJobReq) ([]string, []string) {


+ 26
- 0
modules/grampus/resty.go View File

@@ -245,6 +245,32 @@ func GetTrainJobLog(jobID string) (string, error) {
return logContent, nil
}

func GetGrampusMetrics(jobID string) (models.GetTrainJobMetricStatisticResult, error) {
checkSetting()
client := getRestyClient()
var result models.GetTrainJobMetricStatisticResult
res, err := client.R().
SetAuthToken(TOKEN).
Get(HOST + urlTrainJob + "/" + jobID + "/task/0/replica/0/metrics")

if err != nil {
return result, fmt.Errorf("resty GetTrainJobLog: %v", err)
}
if err = json.Unmarshal([]byte(res.String()), &result); err != nil {
log.Error("GetGrampusMetrics json.Unmarshal failed(%s): %v", res.String(), err.Error())
return result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error())
}
if res.StatusCode() != http.StatusOK {
log.Error("Call GrampusMetrics failed(%d):%s(%s)", res.StatusCode(), result.ErrorCode, result.ErrorMsg)
return result, fmt.Errorf("Call GrampusMetrics failed(%d):%d(%s)", res.StatusCode(), result.ErrorCode, result.ErrorMsg)
}
if !result.IsSuccess {
log.Error("GetGrampusMetrics(%s) failed", jobID)
return result, fmt.Errorf("GetGrampusMetrics failed:%s", result.ErrorMsg)
}
return result, nil
}

func StopJob(jobID string) (*models.GrampusStopJobResponse, error) {
checkSetting()
client := getRestyClient()


+ 13
- 13
modules/modelarts/modelarts.go View File

@@ -350,7 +350,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
return nil
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
var jobResult *models.CreateTrainJobResult
var createErr error
@@ -410,17 +410,17 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
})
if errTemp != nil {
log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error())
return errTemp
return "", errTemp
}
}
return createErr
return "", createErr
}
jobId := strconv.FormatInt(jobResult.JobID, 10)
jobID := strconv.FormatInt(jobResult.JobID, 10)
createErr = models.CreateCloudbrain(&models.Cloudbrain{
Status: TransTrainJobStatus(jobResult.Status),
UserID: ctx.User.ID,
RepoID: ctx.Repo.Repository.ID,
JobID: jobId,
JobID: jobID,
JobName: req.JobName,
DisplayJobName: req.DisplayJobName,
JobType: string(models.JobTypeTrain),
@@ -458,10 +458,10 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error

if createErr != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, createErr.Error())
return createErr
return "", createErr
}
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobId, req.DisplayJobName, models.ActionCreateTrainTask)
return nil
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateTrainTask)
return jobID, nil
}

func GenerateModelConvertTrainJob(req *GenerateTrainJobReq) (*models.CreateTrainJobResult, error) {
@@ -682,7 +682,7 @@ func GetOutputPathByCount(TotalVersionCount int) (VersionOutputPath string) {
return VersionOutputPath
}

func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (err error) {
func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
var jobResult *models.CreateTrainJobResult
var createErr error
@@ -742,10 +742,10 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
})
if err != nil {
log.Error("InsertCloudbrainTemp failed: %v", err.Error())
return err
return "", err
}
}
return err
return "", err
}

// attach, err := models.GetAttachmentByUUID(req.Uuid)
@@ -796,7 +796,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e

if err != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, err.Error())
return err
return "", err
}
if req.JobType == string(models.JobTypeModelSafety) {
task, err := models.GetCloudbrainByJobID(jobID)
@@ -807,7 +807,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateInferenceTask)
}

return nil
return jobID, nil
}

func GetNotebookImageName(imageId string) (string, error) {


+ 30
- 16
modules/setting/setting.go View File

@@ -598,20 +598,23 @@ var (

//grampus config
Grampus = struct {
Env string
Host string
UserName string
Password string
SpecialPools string
C2NetSequence string
SyncScriptProject string
LocalCenterID string
AiCenterInfo string
Env string
Host string
UserName string
Password string
SpecialPools string
C2NetSequence string
SyncScriptProject string
LocalCenterID string
AiCenterInfo string
AiCenterCodeAndNameInfo string
UsageRateBeginTime string
}{}

C2NetInfos *C2NetSqInfos
CenterInfos *AiCenterInfos
C2NetMapInfo map[string]*C2NetSequenceInfo
C2NetInfos *C2NetSqInfos
CenterInfos *AiCenterInfos
C2NetMapInfo map[string]*C2NetSequenceInfo
AiCenterCodeAndNameMapInfo map[string]*C2NetSequenceInfo

//elk config
ElkUrl string
@@ -1451,7 +1454,7 @@ func NewContext() {
MaxDuration = sec.Key("MAX_DURATION").MustInt64(14400)
TrainGpuTypes = sec.Key("TRAIN_GPU_TYPES").MustString("")
TrainResourceSpecs = sec.Key("TRAIN_RESOURCE_SPECS").MustString("")
MaxModelSize = sec.Key("MAX_MODEL_SIZE").MustFloat64(500)
MaxModelSize = sec.Key("MAX_MODEL_SIZE").MustFloat64(200)
InferenceGpuTypes = sec.Key("INFERENCE_GPU_TYPES").MustString("")
InferenceResourceSpecs = sec.Key("INFERENCE_RESOURCE_SPECS").MustString("")
SpecialPools = sec.Key("SPECIAL_POOL").MustString("")
@@ -1651,13 +1654,24 @@ func getGrampusConfig() {
Grampus.Password = sec.Key("PASSWORD").MustString("")
Grampus.SpecialPools = sec.Key("SPECIAL_POOL").MustString("")
Grampus.C2NetSequence = sec.Key("C2NET_SEQUENCE").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\",\"content_en\":\"Pencheng Cloudbrain Ⅰ\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\",\"content_en\":\"Pencheng Cloudbrain Ⅱ\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\",\"content_en\":\"Peking University AI Center\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\",\"content_en\":\"Hefei AI Center\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\",\"content_en\":\"Wuhan AI Center\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\",\"content_en\":\"Xi'an AI Center\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\",\"content_en\":\"Pengcheng Cloud Computing Institute\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\",\"content_en\":\"Zhongyuan AI Center\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\",\"content_en\":\"Chengdu AI Center\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\",\"content_en\":\"Hengqin AI Center\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\",\"content_en\":\"HPC & AI Center\"}]}")
Grampus.AiCenterCodeAndNameInfo = sec.Key("AI_CENTER_CODE_AND_NAME").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\",\"content_en\":\"Pencheng Cloudbrain Ⅰ\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\",\"content_en\":\"Pencheng Cloudbrain Ⅱ\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\",\"content_en\":\"Peking University AI Center\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\",\"content_en\":\"Hefei AI Center\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\",\"content_en\":\"Wuhan AI Center\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\",\"content_en\":\"Xi'an AI Center\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\",\"content_en\":\"Pengcheng Cloud Computing Institute\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\",\"content_en\":\"Zhongyuan AI Center\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\",\"content_en\":\"Chengdu AI Center\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\",\"content_en\":\"Hengqin AI Center\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\",\"content_en\":\"HPC & AI Center\"}]}")
Grampus.UsageRateBeginTime = sec.Key("USAGE_RATE_BEGIN_TIME").MustString("2021-01-01 00:00:00")
if Grampus.C2NetSequence != "" {
if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil {
log.Error("Unmarshal(C2NetSequence) failed:%v", err)
}
C2NetMapInfo=make(map[string]*C2NetSequenceInfo)
for _,value :=range C2NetInfos.C2NetSqInfo{
C2NetMapInfo[value.Name]=value
C2NetMapInfo = make(map[string]*C2NetSequenceInfo)
for _, value := range C2NetInfos.C2NetSqInfo {
C2NetMapInfo[value.Name] = value
}
}
if Grampus.AiCenterCodeAndNameInfo != "" {
if err := json.Unmarshal([]byte(Grampus.AiCenterCodeAndNameInfo), &C2NetInfos); err != nil {
log.Error("Unmarshal(AiCenterCodeAndNameInfo) failed:%v", err)
}
AiCenterCodeAndNameMapInfo = make(map[string]*C2NetSequenceInfo)
for _, value := range C2NetInfos.C2NetSqInfo {
AiCenterCodeAndNameMapInfo[value.Name] = value
}
}
Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus")


+ 2
- 2
modules/storage/minio.go View File

@@ -144,8 +144,8 @@ func (m *MinioStorage) HasObject(path string) (bool, error) {

// Indicate to our routine to exit cleanly upon return.
defer close(doneCh)
objectCh := m.client.ListObjects(m.bucket, m.buildMinioPath(path), false, doneCh)
//objectCh := m.client.ListObjects(m.bucket, m.buildMinioPath(path), false, doneCh)
objectCh := m.client.ListObjects(m.bucket, path, false, doneCh)
for object := range objectCh {
if object.Err != nil {
return hasObject, object.Err


+ 26
- 10
modules/storage/minio_ext.go View File

@@ -3,7 +3,6 @@ package storage
import (
"encoding/xml"
"errors"
"path"
"sort"
"strconv"
"strings"
@@ -101,7 +100,7 @@ func getClients() (*minio_ext.Client, *miniov6.Core, error) {
return client, core, nil
}

func GenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, partSize int64) (string, error) {
func GenMultiPartSignedUrl(objectName string, uploadId string, partNumber int, partSize int64) (string, error) {
minioClient, _, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -110,7 +109,7 @@ func GenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, partSiz

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")

return minioClient.GenUploadPartSignedUrl(uploadId, bucketName, objectName, partNumber, partSize, PresignedUploadPartUrlExpireTime, setting.Attachment.Minio.Location)
}
@@ -268,6 +267,23 @@ func MinioCopyFiles(bucketName string, srcPath string, destPath string, Files []
return fileTotalSize, nil
}

func MinioCopyAFile(srcBucketName, srcObjectName, destBucketName, destObjectName string) (int64, error) {
_, core, err := getClients()
var fileTotalSize int64
fileTotalSize = 0
if err != nil {
log.Error("getClients failed:", err.Error())
return fileTotalSize, err
}
meta, err := core.StatObject(srcBucketName, srcObjectName, miniov6.StatObjectOptions{})
if err != nil {
log.Info("Get file error:" + err.Error())
}
core.CopyObject(srcBucketName, srcObjectName, destBucketName, destObjectName, meta.UserMetadata)
fileTotalSize = meta.Size
return fileTotalSize, nil
}

func MinioPathCopy(bucketName string, srcPath string, destPath string) (int64, error) {
_, core, err := getClients()
var fileTotalSize int64
@@ -301,7 +317,7 @@ func MinioPathCopy(bucketName string, srcPath string, destPath string) (int64, e
return fileTotalSize, nil
}

func NewMultiPartUpload(uuid string) (string, error) {
func NewMultiPartUpload(objectName string) (string, error) {
_, core, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -310,12 +326,12 @@ func NewMultiPartUpload(uuid string) (string, error) {

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")

return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{})
}

func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) {
func CompleteMultiPartUpload(objectName string, uploadID string, totalChunks int) (string, error) {
client, core, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -324,8 +340,8 @@ func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (str

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
log.Info("bucketName=" + bucketName + " objectName=" + objectName + " uploadID=" + uploadID)
partInfos, err := client.ListObjectParts(bucketName, objectName, uploadID)
if err != nil {
log.Error("ListObjectParts failed:", err.Error())
@@ -351,7 +367,7 @@ func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (str
return core.CompleteMultipartUpload(bucketName, objectName, uploadID, complMultipartUpload.Parts)
}

func GetPartInfos(uuid string, uploadID string) (string, error) {
func GetPartInfos(objectName string, uploadID string) (string, error) {
minioClient, _, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -360,7 +376,7 @@ func GetPartInfos(uuid string, uploadID string) (string, error) {

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")

partInfos, err := minioClient.ListObjectParts(bucketName, objectName, uploadID)
if err != nil {


+ 24
- 25
modules/storage/obs.go View File

@@ -90,17 +90,16 @@ func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err
} else {
continue
}

break
}

return output, nil
}

func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) {
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
func GetObsPartInfos(objectName, uploadID string) (string, error) {
key := objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")

allParts, err := listAllParts(uuid, uploadID, key)
allParts, err := listAllParts(objectName, uploadID, key)
if err != nil {
log.Error("listAllParts failed: %v", err)
return "", err
@@ -114,10 +113,11 @@ func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) {
return chunks, nil
}

func NewObsMultiPartUpload(uuid, fileName string) (string, error) {
func NewObsMultiPartUpload(objectName string) (string, error) {
input := &obs.InitiateMultipartUploadInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")

output, err := ObsCli.InitiateMultipartUpload(input)
if err != nil {
@@ -128,13 +128,14 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) {
return output.UploadId, nil
}

func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error {
func CompleteObsMultiPartUpload(objectName, uploadID string, totalChunks int) error {
input := &obs.CompleteMultipartUploadInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
//input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
input.UploadId = uploadID

allParts, err := listAllParts(uuid, uploadID, input.Key)
allParts, err := listAllParts(objectName, uploadID, input.Key)
if err != nil {
log.Error("listAllParts failed: %v", err)
return err
@@ -153,15 +154,16 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int
return err
}

log.Info("uuid:%s, RequestId:%s", uuid, output.RequestId)
log.Info("uuid:%s, RequestId:%s", objectName, output.RequestId)

return nil
}

func ObsMultiPartUpload(uuid string, uploadId string, partNumber int, fileName string, putBody io.ReadCloser) error {
func ObsMultiPartUpload(objectName string, uploadId string, partNumber int, fileName string, putBody io.ReadCloser) error {
input := &obs.UploadPartInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.UploadId = uploadId
input.PartNumber = partNumber
input.Body = putBody
@@ -241,11 +243,6 @@ func ObsDownloadAFile(bucket string, key string) (io.ReadCloser, error) {
}
}

func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) {

return ObsDownloadAFile(setting.Bucket, strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/"))
}

func ObsModelDownload(JobName string, fileName string) (io.ReadCloser, error) {
input := &obs.GetObjectInput{}
input.Bucket = setting.Bucket
@@ -297,7 +294,7 @@ func ObsCopyManyFile(srcBucket string, srcPath string, destBucket string, destPa
log.Info("Get File error, error=" + err.Error())
continue
}
obsCopyFile(srcBucket, srcKey, destBucket, destKey)
ObsCopyFile(srcBucket, srcKey, destBucket, destKey)
fileTotalSize += out.ContentLength
}

@@ -321,7 +318,7 @@ func ObsCopyAllFile(srcBucket string, srcPath string, destBucket string, destPat
index++
for _, val := range output.Contents {
destKey := destPath + val.Key[length:]
obsCopyFile(srcBucket, val.Key, destBucket, destKey)
ObsCopyFile(srcBucket, val.Key, destBucket, destKey)
fileTotalSize += val.Size
}
if output.IsTruncated {
@@ -340,7 +337,7 @@ func ObsCopyAllFile(srcBucket string, srcPath string, destBucket string, destPat
return fileTotalSize, nil
}

func obsCopyFile(srcBucket string, srcKeyName string, destBucket string, destKeyName string) error {
func ObsCopyFile(srcBucket string, srcKeyName string, destBucket string, destKeyName string) error {
input := &obs.CopyObjectInput{}
input.Bucket = destBucket
input.Key = destKeyName
@@ -529,11 +526,12 @@ func GetObsListObject(jobName, outPutPath, parentDir, versionName string) ([]Fil
}
}

func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, fileName string) (string, error) {
func ObsGenMultiPartSignedUrl(objectName string, uploadId string, partNumber int) (string, error) {

input := &obs.CreateSignedUrlInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Expires = 60 * 60
input.Method = obs.HttpMethodPut

@@ -581,10 +579,11 @@ func GetObsCreateSignedUrl(jobName, parentDir, fileName string) (string, error)
return GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir, fileName), "/"))
}

func ObsGetPreSignedUrl(uuid, fileName string) (string, error) {
func ObsGetPreSignedUrl(objectName, fileName string) (string, error) {
input := &obs.CreateSignedUrlInput{}
input.Method = obs.HttpMethodGet
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Bucket = setting.Bucket
input.Expires = 60 * 60



+ 45
- 0
modules/structs/attachment.go View File

@@ -27,3 +27,48 @@ type Attachment struct {
type EditAttachmentOptions struct {
Name string `json:"name"`
}

type Dataset struct {
ID int64 `json:"id"`
Title string `json:"title"`
Status int32 `json:"status"`
Category string `json:"category"`
Description string `json:"description"`
DownloadTimes int64 `json:"downloadTimes"`
UseCount int64 `json:"useCount"`
NumStars int `json:"numStars"`
Recommend bool `json:"recommend"`
License string `json:"license"`
Task string `json:"task"`
ReleaseID int64 `json:"releaseId"`
UserID int64 `json:"userId"`
RepoID int64 `json:"repoId"`
Repo *RepositoryShow `json:"repo"`
CreatedUnix int64 `json:"createdUnix"`
UpdatedUnix int64 `json:"updatedUnix"`

Attachments []*AttachmentShow `json:"attachments"`
}

type RepositoryShow struct {
OwnerName string `json:"ownerName"`
Name string `json:"name"`
}

type AttachmentShow struct {
ID int64 `json:"id"`
UUID string `json:"uuid"`
DatasetID int64 `json:"datasetId"`
ReleaseID int64 `json:"releaseId"`
UploaderID int64 `json:"uploaderId"`
CommentID int64 `json:"commentId"`
Name string `json:"name"`
Description string `json:"description"`
DownloadCount int64 `json:"downloadCount"`
UseNumber int64 `json:"useNumber"`
Size int64 `json:"size"`
IsPrivate bool `json:"isPrivate"`
DecompressState int32 `json:"decompressState"`
Type int `json:"type"`
CreatedUnix int64 `json:"createdUnix"`
}

+ 84
- 0
modules/structs/cloudbrain.go View File

@@ -0,0 +1,84 @@
package structs

type CreateGrampusTrainJobOption struct {
DisplayJobName string `json:"display_job_name" binding:"Required"`
JobName string `json:"job_name" binding:"Required" `
Attachment string `json:"attachment" binding:"Required"`
BootFile string `json:"boot_file" binding:"Required"`
ImageID string `json:"image_id" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
Description string `json:"description"`
BranchName string `json:"branch_name" binding:"Required"`
EngineName string `json:"engine_name" binding:"Required"`
WorkServerNumber int `json:"work_server_number" binding:"Required"`
Image string `json:"image" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type CreateTrainJobOption struct {
Type int `json:"type"`
DisplayJobName string `json:"display_job_name" binding:"Required"`
ImageID string `json:"image_id"`
Image string `json:"image" binding:"Required"`
Attachment string `json:"attachment" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
Description string `json:"description" `
BootFile string `json:"boot_file" binding:"Required"`
BranchName string `json:"branch_name" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
WorkServerNumber int `json:"work_server_number"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type Cloudbrain struct {
ID int64 `json:"id"`
JobID string `json:"job_id"`
JobType string `json:"job_type"`
Type int `json:"type"`
DisplayJobName string `json:"display_job_name"`
Status string `json:"status"`
CreatedUnix int64 `json:"created_unix"`
RepoID int64 `json:"repo_id"`
Duration int64 `json:"duration"` //运行时长 单位秒
TrainJobDuration string `json:"train_job_duration"`
ImageID string `json:"image_id"` //grampus image_id
Image string `json:"image"`
Uuid string `json:"uuid"` //数据集id
DatasetName string `json:"dataset_name"`
ComputeResource string `json:"compute_resource"` //计算资源,例如npu
AiCenter string `json:"ai_center"` //grampus ai center: center_id+center_name
BranchName string `json:"branch_name"` //分支名称
Parameters string `json:"parameters"` //传给modelarts的param参数
BootFile string `json:"boot_file"` //启动文件
Description string `json:"description"` //描述
ModelName string `json:"model_name"` //模型名称
ModelVersion string `json:"model_version"` //模型版本
CkptName string `json:"ckpt_name"` //权重文件名称
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`

Spec *SpecificationShow `json:"spec"`
}

type SpecificationShow struct {
ID int64 `json:"id"`
AccCardsNum int `json:"acc_cards_num"`
AccCardType string `json:"acc_card_type"`
CpuCores int `json:"cpu_cores"`
MemGiB float32 `json:"mem_gi_b"`
GPUMemGiB float32 `json:"gpu_mem_gi_b"`
ShareMemGiB float32 `json:"share_mem_gi_b"`
ComputeResource string `json:"compute_resource"`
UnitPrice int `json:"unit_price"`
}

+ 7
- 0
modules/structs/tagger.go View File

@@ -0,0 +1,7 @@
package structs

type Tagger struct {
Name string `json:"name"`
Email string `json:"email"`
RelAvatarURL string `json:"relAvatarURL"`
}

+ 6
- 2
options/locale/locale_en-US.ini View File

@@ -617,6 +617,7 @@ organization = Organizations
uid = Uid
u2f = Security Keys
bind_wechat = Bind WeChat
no_wechat_bind = Can not do the operation, please bind WeChat first.
wechat_bind = WeChat Binding
bind_account_information = Bind account information
bind_time = Bind Time
@@ -1036,6 +1037,7 @@ cloudbrain.time.starttime=Start run time
cloudbrain.time.endtime=End run time
cloudbrain.datasetdownload=Dataset download url
model_manager = Model
model_experience = Model Experience
model_noright=You have no right to do the operation.
model_rename=Duplicate model name, please modify model name.

@@ -1266,12 +1268,14 @@ model.manage.model_accuracy = Model Accuracy
model.convert=Model Transformation
model.list=Model List
model.manage.create_new_convert_task=Create Model Transformation Task

model.manage.import_local_model=Import Local Model
model.manage.import_online_model=Import Online Model
model.manage.notcreatemodel=No model has been created
model.manage.init1=Code version: You have not initialized the code repository, please
model.manage.init2=initialized first ;
model.manage.createtrainjob_tip=Training task: you haven't created a training task, please create it first
model.manage.createtrainjob=Training task.
model.manage.createmodel_tip=You can import local model or online model. Import online model should
model.manage.createtrainjob=Create training task.
model.manage.delete=Delete Model
model.manage.delete_confirm=Are you sure to delete this model? Once this model is deleted, it cannot be restored.
model.manage.select.trainjob=Select train task


+ 6
- 2
options/locale/locale_zh-CN.ini View File

@@ -622,6 +622,7 @@ organization=组织
uid=用户 ID
u2f=安全密钥
wechat_bind = 微信绑定
no_wechat_bind = 不能创建任务,请先绑定微信。
bind_wechat = 绑定微信
bind_account_information = 绑定账号信息
bind_time = 绑定时间
@@ -1036,6 +1037,7 @@ datasets.desc=数据集功能
cloudbrain_helper=使用GPU/NPU资源,开启Notebook、模型训练任务等

model_manager = 模型
model_experience = 模型体验
model_noright=您没有操作权限。
model_rename=模型名称重复,请修改模型名称

@@ -1281,12 +1283,14 @@ model.manage.model_accuracy = 模型精度
model.convert=模型转换任务
model.list=模型列表
model.manage.create_new_convert_task=创建模型转换任务

model.manage.import_local_model=导入本地模型
model.manage.import_online_model=导入线上模型
model.manage.notcreatemodel=未创建过模型
model.manage.init1=代码版本:您还没有初始化代码仓库,请先
model.manage.init2=创建代码版本;
model.manage.createtrainjob_tip=训练任务:您还没创建过训练任务,请先创建
model.manage.createtrainjob=训练任务。
model.manage.createmodel_tip=您可以导入本地模型或者导入线上模型。导入线上模型需先
model.manage.createtrainjob=创建训练任务。
model.manage.delete=删除模型
model.manage.delete_confirm=你确认删除该模型么?此模型一旦删除不可恢复。
model.manage.select.trainjob=选择训练任务


+ 17
- 19346
package-lock.json
File diff suppressed because it is too large
View File


+ 6
- 2
routers/admin/cloudbrains.go View File

@@ -17,6 +17,7 @@ import (
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
)

const (
@@ -95,6 +96,8 @@ func CloudBrains(ctx *context.Context) {
models.LoadSpecs4CloudbrainInfo(ciTasks)

for i, task := range ciTasks {
ciTasks[i] = cloudbrainService.UpdateCloudbrainAiCenter(ciTasks[i])
ciTasks[i].Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(ciTasks[i].Cloudbrain.AiCenter, ctx.Language())
ciTasks[i].CanDebug = true
ciTasks[i].CanDel = true
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource
@@ -186,7 +189,8 @@ func DownloadCloudBrains(ctx *context.Context) {
}
models.LoadSpecs4CloudbrainInfo(pageRecords)
for _, record := range pageRecords {

record = cloudbrainService.UpdateCloudbrainAiCenter(record)
record.Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(record.Cloudbrain.AiCenter, ctx.Language())
for k, v := range allValues(row, record, ctx) {
f.SetCellValue(cloudBrain, k, v)
}
@@ -208,7 +212,7 @@ func allValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[str
return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): repo.GetCloudbrainCluster(rs.Cloudbrain, ctx),
getCellName("C", row): rs.JobType, getCellName("D", row): rs.Status, getCellName("E", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT),
getCellName("F", row): getDurationTime(rs), getCellName("G", row): rs.ComputeResource,
getCellName("H", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), getCellName("I", row): getCloudbrainCardType(rs),
getCellName("H", row): rs.Cloudbrain.AiCenter, getCellName("I", row): getCloudbrainCardType(rs),
getCellName("J", row): rs.Name, getCellName("K", row): getRepoPathName(rs), getCellName("L", row): rs.JobName,
}
}


+ 22
- 1
routers/admin/resources.go View File

@@ -127,6 +127,7 @@ func GetResourceSpecificationList(ctx *context.Context) {
Status: status,
Cluster: cluster,
AvailableCode: available,
OrderBy: models.SearchSpecOrderById,
})
if err != nil {
log.Error("GetResourceSpecificationList error.%v", err)
@@ -136,6 +137,26 @@ func GetResourceSpecificationList(ctx *context.Context) {
ctx.JSON(http.StatusOK, response.SuccessWithData(list))
}

func GetAllResourceSpecificationList(ctx *context.Context) {
queue := ctx.QueryInt64("queue")
status := ctx.QueryInt("status")
cluster := ctx.Query("cluster")
available := ctx.QueryInt("available")
list, err := resource.GetAllDistinctResourceSpecification(models.SearchResourceSpecificationOptions{
QueueId: queue,
Status: status,
Cluster: cluster,
AvailableCode: available,
})
if err != nil {
log.Error("GetResourceSpecificationList error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

ctx.JSON(http.StatusOK, response.SuccessWithData(list))
}

func GetResourceSpecificationScenes(ctx *context.Context) {
specId := ctx.ParamsInt64(":id")
list, err := resource.GetResourceSpecificationScenes(specId)
@@ -182,7 +203,7 @@ func UpdateResourceSpecification(ctx *context.Context, req models.ResourceSpecif

if err != nil {
log.Error("UpdateResourceSpecification error. %v", err)
ctx.JSON(http.StatusOK, response.ResponseError(err))
ctx.JSON(http.StatusOK, response.ResponseBizError(err))
return
}
ctx.JSON(http.StatusOK, response.Success())


+ 63
- 0
routers/api/v1/api.go View File

@@ -242,6 +242,15 @@ func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler {
}
}

func reqWeChat() macaron.Handler {
return func(ctx *context.Context) {
if setting.WechatAuthSwitch && ctx.User.WechatOpenId == "" {
ctx.JSON(http.StatusForbidden, models.BaseErrorMessageApi("settings.no_wechat_bind"))
return
}
}
}

// reqRepoReader user should have specific read permission or be a repo admin or a site admin
func reqRepoReader(unitType models.UnitType) macaron.Handler {
return func(ctx *context.Context) {
@@ -517,6 +526,25 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown)
m.Post("/markdown/raw", misc.MarkdownRaw)

m.Group("/images", func() {

m.Get("/public", repo.GetPublicImages)
m.Get("/custom", repo.GetCustomImages)
m.Get("/star", repo.GetStarImages)
m.Get("/npu", repo.GetNpuImages)

}, reqToken())

m.Group("/attachments", func() {

m.Get("/:uuid", repo.GetAttachment)
m.Get("/get_chunks", repo.GetSuccessChunks)
m.Get("/new_multipart", repo.NewMultipart)
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteMultipart)

}, reqToken())

// Notifications
m.Group("/notifications", func() {
m.Combo("").
@@ -603,6 +631,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/overview_resource", repo.GetCloudbrainResourceOverview)
m.Get("/resource_usage_statistic", repo.GetDurationRateStatistic)
m.Get("/resource_usage_rate_detail", repo.GetCloudbrainResourceUsageDetail)
m.Get("/resource_queues", repo.GetResourceQueues)
m.Get("/apitest_for_statistic", repo.CloudbrainDurationStatisticForTest)
})
}, operationReq)
@@ -701,6 +730,13 @@ func RegisterRoutes(m *macaron.Macaron) {

m.Combo("/repositories/:id", reqToken()).Get(repo.GetByID)

m.Group("/datasets/:username/:reponame", func() {
m.Get("/current_repo", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets", repo.MyDatasetsMultiple)
m.Get("/public_datasets", repo.PublicDatasetMultiple)
m.Get("/my_favorite", repo.MyFavoriteDatasetMultiple)
}, reqToken(), repoAssignment())

m.Group("/repos", func() {
m.Get("/search", repo.Search)

@@ -709,7 +745,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate)
m.Post("/migrate/submit", reqToken(), bind(auth.MigrateRepoForm{}), repo.MigrateSubmit)

m.Group("/specification", func() {
m.Get("", repo.GetResourceSpec)
}, reqToken())

m.Group("/:username/:reponame", func() {
m.Get("/right", reqToken(), repo.GetRight)
m.Get("/tagger", reqToken(), repo.ListTagger)
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
Delete(reqToken(), reqOwner(), repo.Delete).
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit)
@@ -938,21 +980,41 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/:id/log", repo.CloudbrainGetLog)
m.Get("/:id/download_log_file", repo.CloudbrainDownloadLogFile)
m.Group("/train-job", func() {

m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), context.ReferencesGitRepo(false), bind(api.CreateTrainJobOption{}), repo.CreateCloudBrain)

m.Group("/:jobid", func() {
m.Get("", repo.GetModelArtsTrainJobVersion)
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)
m.Get("/model_list", repo.CloudBrainModelList)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop)
})
})
m.Group("/inference-job", func() {
m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), bind(api.CreateTrainJobOption{}), context.ReferencesGitRepo(false), repo.CreateCloudBrainInferenceTask)

m.Group("/:jobid", func() {
m.Get("", repo.GetCloudBrainInferenceJob)
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)

m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.DelCloudBrainJob)
m.Get("/result_list", repo.InferencJobResultList)
})
})
}, reqRepoReader(models.UnitTypeCloudBrain))
m.Group("/modelmanage", func() {
m.Post("/create_new_model", repo.CreateNewModel)
m.Get("/show_model_api", repo.ShowModelManageApi)
m.Delete("/delete_model", repo.DeleteModel)
m.Get("/downloadall", repo.DownloadModel)
m.Get("/query_model_byId", repo.QueryModelById)
m.Get("/query_model_for_predict", repo.QueryModelListForPredict)
m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict)
m.Get("/query_train_model", repo.QueryTrainModelList)
m.Post("/create_model_convert", repo.CreateModelConvert)
m.Get("/show_model_convert_page", repo.ShowModelConvertPage)
m.Get("/query_model_convert_byId", repo.QueryModelConvertById)

m.Get("/:id", repo.GetCloudbrainModelConvertTask)
m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog)
m.Get("/:id/modelartlog", repo.TrainJobForModelConvertGetLog)
@@ -989,6 +1051,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("", repo.GetModelArtsTrainJobVersion)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.GrampusStopJob)
m.Get("/log", repo_ext.GrampusGetLog)
m.Get("/metrics", repo_ext.GrampusMetrics)
m.Get("/download_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo_ext.GrampusDownloadLog)
})
})


+ 25
- 0
routers/api/v1/repo/attachments.go View File

@@ -0,0 +1,25 @@
package repo

import (
"code.gitea.io/gitea/modules/context"
routeRepo "code.gitea.io/gitea/routers/repo"
)

func GetSuccessChunks(ctx *context.APIContext) {
routeRepo.GetSuccessChunks(ctx.Context)
}

func NewMultipart(ctx *context.APIContext) {
routeRepo.NewMultipart(ctx.Context)
}
func GetMultipartUploadUrl(ctx *context.APIContext) {
routeRepo.GetMultipartUploadUrl(ctx.Context)
}

func CompleteMultipart(ctx *context.APIContext) {
routeRepo.CompleteMultipart(ctx.Context)

}
func GetAttachment(ctx *context.APIContext) {
routeRepo.GetAttachment(ctx.Context)
}

+ 112
- 38
routers/api/v1/repo/cloudbrain.go View File

@@ -16,6 +16,14 @@ import (
"strings"
"time"

cloudbrainService "code.gitea.io/gitea/services/cloudbrain"

"code.gitea.io/gitea/modules/convert"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/modules/notification"

"code.gitea.io/gitea/modules/setting"
@@ -29,6 +37,77 @@ import (
routerRepo "code.gitea.io/gitea/routers/repo"
)

func CloudBrainShow(ctx *context.APIContext) {

task, err := models.GetCloudbrainByJobID(ctx.Params(":jobid"))

if err != nil {
log.Info("error:" + err.Error())
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("repo.cloudbrain_query_fail"))
return
}
cloudbrainTask.PrepareSpec4Show(task)
task.ContainerIp = ""
if cloudbrainTask.IsTaskNotStop(task) {
cloudbrainTask.SyncTaskStatus(task)
}

if task.TrainJobDuration == "" {
if task.Duration == 0 {
var duration int64
if task.Status == string(models.JobWaiting) {
duration = 0
} else if task.Status == string(models.JobRunning) {
duration = time.Now().Unix() - int64(task.CreatedUnix)
} else {
duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix)
}
task.Duration = duration
}
task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
}
//to unify image output
if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
task.ImageID = strconv.FormatInt(task.EngineID, 10)
task.Image = task.EngineName

} else if task.Type == models.TypeC2Net {
task.Image = task.EngineName
}
task.AiCenter = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx.Context)

ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)})

}

func CreateCloudBrain(ctx *context.APIContext, option api.CreateTrainJobOption) {
if option.Type == cloudbrainTask.TaskTypeCloudbrainOne {
cloudbrainTask.CloudbrainOneTrainJobCreate(ctx.Context, option)
}
if option.Type == cloudbrainTask.TaskTypeModelArts {
cloudbrainTask.ModelArtsTrainJobNpuCreate(ctx.Context, option)
}

if option.Type == cloudbrainTask.TaskTypeGrampusGPU {
cloudbrainTask.GrampusTrainJobGpuCreate(ctx.Context, option)
}
if option.Type == cloudbrainTask.TaskTypeGrampusNPU {
cloudbrainTask.GrampusTrainJobNpuCreate(ctx.Context, option)
}

}

func CreateCloudBrainInferenceTask(ctx *context.APIContext, option api.CreateTrainJobOption) {

if option.Type == 0 {
cloudbrainTask.CloudBrainInferenceJobCreate(ctx.Context, option)
}
if option.Type == 1 {
cloudbrainTask.ModelArtsInferenceJobCreate(ctx.Context, option)
}

}

// cloudbrain get job task by jobid
func GetCloudbrainTask(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/cloudbrain/{jobid} cloudbrain jobTask
@@ -81,47 +160,22 @@ func GetCloudbrainTask(ctx *context.APIContext) {
"JobDuration": job.TrainJobDuration,
})
} else {
jobResult, err := cloudbrain.GetJob(job.JobID)
if err != nil {
ctx.NotFound(err)
log.Error("GetJob failed:", err)
return
}
result, _ := models.ConvertToJobResultPayload(jobResult.Payload)
jobAfter, err := cloudbrainTask.SyncCloudBrainOneStatus(job)

if err != nil {
ctx.NotFound(err)
log.Error("ConvertToJobResultPayload failed:", err)
log.Error("Sync cloud brain one status failed:", err)
return
}
oldStatus := job.Status
job.Status = result.JobStatus.State
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
}

if result.JobStatus.State != string(models.JobWaiting) {
models.ParseAndSetDurationFromCloudBrainOne(result, job)
if oldStatus != job.Status {
notification.NotifyChangeCloudbrainStatus(job, oldStatus)
}
err = models.UpdateJob(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}
}

ctx.JSON(http.StatusOK, map[string]interface{}{
"ID": ID,
"JobName": result.Config.JobName,
"JobStatus": result.JobStatus.State,
"SubState": result.JobStatus.SubState,
"CreatedTime": time.Unix(result.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05"),
"CompletedTime": time.Unix(result.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05"),
"JobDuration": job.TrainJobDuration,
"JobName": jobAfter.JobName,
"JobStatus": jobAfter.Status,
"SubState": "",
"CreatedTime": jobAfter.CreatedUnix.Format("2006-01-02 15:04:05"),
"CompletedTime": jobAfter.UpdatedUnix.Format("2006-01-02 15:04:05"),
"JobDuration": jobAfter.TrainJobDuration,
})
}
}
@@ -542,12 +596,24 @@ func CloudbrainGetLog(ctx *context.APIContext) {
existStr = taskRes.TaskStatuses[0].ExitDiagnostics
}
ctx.Data["existStr"] = existStr
log.Info("existStr=" + existStr)
} else {
ModelSafetyGetLog(ctx)
return
}
}

if job.JobType == string(models.JobTypeTrain) || job.JobType == string(models.JobTypeInference) {
if job.Type == models.TypeCloudBrainOne {
result, err := cloudbrain.GetJob(job.JobID)
existStr := ""
if err == nil && result != nil {
jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
taskRoles := jobRes.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
existStr = taskRes.TaskStatuses[0].ExitDiagnostics
}
ctx.Data["existStr"] = existStr
}
}

lines := ctx.QueryInt("lines")
@@ -580,7 +646,7 @@ func CloudbrainGetLog(ctx *context.APIContext) {
endLine += 1
}
}
result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath)
if result == nil {
log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"])
@@ -592,17 +658,25 @@ func CloudbrainGetLog(ctx *context.APIContext) {
if result["Content"] != nil {
content = result["Content"].(string)
}

if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
content = content + ctx.Data["existStr"].(string)
}

logFileName := result["FileName"]

//Logs can only be downloaded if the file exists
//and the current user is an administrator or the creator of the task
canLogDownload := logFileName != nil && logFileName != "" && job.IsUserHasRight(ctx.User)

re := map[string]interface{}{
"JobID": ID,
"LogFileName": result["FileName"],
"LogFileName": logFileName,
"StartLine": result["StartLine"],
"EndLine": result["EndLine"],
"Content": content,
"Lines": result["Lines"],
"CanLogDownload": result["FileName"] != "",
"CanLogDownload": canLogDownload,
"StartTime": job.StartTime,
}
//result := CloudbrainGetLogByJobId(job.JobID, job.JobName)


+ 188
- 89
routers/api/v1/repo/cloudbrain_dashboard.go View File

@@ -12,6 +12,8 @@ import (
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/repo"
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
"code.gitea.io/gitea/services/cloudbrain/resource"
"github.com/360EntSecGroup-Skylar/excelize/v2"
)

@@ -121,8 +123,8 @@ func GetOverviewDuration(ctx *context.Context) {
recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix
now := time.Now()
endTime := now
worker_server_num := 1
cardNum := 1
// worker_server_num := 1
// cardNum := 1
durationAllSum := int64(0)
cardDuSum := int64(0)

@@ -148,34 +150,40 @@ func GetOverviewDuration(ctx *context.Context) {
models.LoadSpecs4CloudbrainInfo(cloudbrains)

for _, cloudbrain := range cloudbrains {
if cloudbrain.Cloudbrain.WorkServerNumber >= 1 {
worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber
} else {
worker_server_num = 1
}
if cloudbrain.Cloudbrain.Spec == nil {
cardNum = 1
} else {
cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum
}
duration := cloudbrain.Duration
durationSum := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum)
cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
CardDurationString := repo.GetCloudbrainCardDuration(cloudbrain.Cloudbrain)
CardDuration := models.ConvertStrToDuration(CardDurationString)
// if cloudbrain.Cloudbrain.WorkServerNumber >= 1 {
// worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber
// } else {
// worker_server_num = 1
// }
// if cloudbrain.Cloudbrain.Spec == nil {
// cardNum = 1
// } else {
// cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum
// }
// duration := cloudbrain.Duration
// duration := cloudbrain.Duration
duration := models.ConvertStrToDuration(cloudbrain.TrainJobDuration)
// CardDuration := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum)

if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne {
cloudBrainOneDuration += duration
cloudBrainOneCardDuSum += durationSum
cloudBrainOneCardDuSum += CardDuration
} else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo {
cloudBrainTwoDuration += duration
cloudBrainTwoCardDuSum += durationSum
cloudBrainTwoCardDuSum += CardDuration
} else if cloudbrain.Cloudbrain.Type == models.TypeC2Net {
c2NetDuration += duration
c2NetCardDuSum += durationSum
c2NetCardDuSum += CardDuration
} else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter {
cDCenterDuration += duration
cDNetCardDuSum += durationSum
cDNetCardDuSum += CardDuration
}

durationAllSum += duration
cardDuSum += durationSum
cardDuSum += CardDuration
}
ctx.JSON(http.StatusOK, map[string]interface{}{
"cloudBrainOneCardDuSum": cloudBrainOneCardDuSum,
@@ -192,6 +200,28 @@ func GetOverviewDuration(ctx *context.Context) {
})
}

func GetCloudbrainCardDuration(task models.Cloudbrain) string {
cardNum := int(0)
spec, err := resource.GetCloudbrainSpec(task.ID)
if err != nil {
log.Info("error:" + err.Error())
return ""
}
if spec != nil {
cardNum = spec.AccCardsNum
} else {
cardNum = 1
}
var workServerNumber int64
if task.WorkServerNumber >= 1 {
workServerNumber = int64(task.WorkServerNumber)
} else {
workServerNumber = 1
}
cardDuration := models.ConvertDurationToStr(workServerNumber * int64(cardNum) * task.Duration)
return cardDuration
}

func GetAllCloudbrainsTrend(ctx *context.Context) {

queryType := ctx.QueryTrim("type")
@@ -703,6 +733,30 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
aiCenter := ctx.Query("aiCenter")
needDeleteInfo := ctx.Query("needDeleteInfo")

if cloudBrainType == models.TypeCloudBrainOne && aiCenter == models.AICenterOfCloudBrainOne {
aiCenter = ""
}
if cloudBrainType == models.TypeCloudBrainTwo && aiCenter == models.AICenterOfCloudBrainTwo {
aiCenter = ""
}
if cloudBrainType == models.TypeCDCenter && aiCenter == models.AICenterOfChengdu {
aiCenter = ""
}
if cloudBrainType == models.TypeCloudBrainAll {
if aiCenter == models.AICenterOfCloudBrainOne {
cloudBrainType = models.TypeCloudBrainOne
aiCenter = ""
}
if aiCenter == models.AICenterOfCloudBrainTwo {
cloudBrainType = models.TypeCloudBrainTwo
aiCenter = ""
}
if aiCenter == models.AICenterOfChengdu {
cloudBrainType = models.TypeCDCenter
aiCenter = ""
}
}

page := ctx.QueryInt("page")
pageSize := ctx.QueryInt("pagesize")
if page <= 0 {
@@ -732,7 +786,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) {

keyword := strings.Trim(ctx.Query("q"), " ")

ciTasks, _, err := models.CloudbrainAll(&models.CloudbrainsOptions{
ciTasks, count, err := models.CloudbrainAll(&models.CloudbrainsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: pageSize,
@@ -747,8 +801,8 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
NeedRepoInfo: true,
BeginTimeUnix: int64(recordBeginTime),
EndTimeUnix: endTime.Unix(),
// AiCenter: aiCenter,
NeedDeleteInfo: needDeleteInfo,
AiCenter: aiCenter,
NeedDeleteInfo: needDeleteInfo,
})
if err != nil {
ctx.ServerError("Get job failed:", err)
@@ -758,45 +812,43 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
nilTime := time.Time{}
tasks := []models.TaskDetail{}
for i, task := range ciTasks {
if aiCenter == "" || aiCenter == task.Cloudbrain.Spec.AiCenterCode {
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource
var taskDetail models.TaskDetail
taskDetail.ID = ciTasks[i].Cloudbrain.ID
taskDetail.JobID = ciTasks[i].Cloudbrain.JobID
taskDetail.JobName = ciTasks[i].JobName
taskDetail.DisplayJobName = ciTasks[i].DisplayJobName
taskDetail.Status = ciTasks[i].Status
taskDetail.JobType = ciTasks[i].JobType
taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix
taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration
taskDetail.StartTime = ciTasks[i].StartTime
taskDetail.EndTime = ciTasks[i].EndTime
taskDetail.ComputeResource = ciTasks[i].ComputeResource
taskDetail.Type = ciTasks[i].Cloudbrain.Type
taskDetail.UserName = ciTasks[i].User.Name
taskDetail.RepoID = ciTasks[i].RepoID
if ciTasks[i].Repo != nil {
taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name
taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias
}
if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 {
taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber)
} else {
taskDetail.WorkServerNum = 1
}
taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain)
taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain)
task = cloudbrainService.UpdateCloudbrainAiCenter(task)
var taskDetail models.TaskDetail
taskDetail.ID = ciTasks[i].Cloudbrain.ID
taskDetail.JobID = ciTasks[i].Cloudbrain.JobID
taskDetail.JobName = ciTasks[i].JobName
taskDetail.DisplayJobName = ciTasks[i].DisplayJobName
taskDetail.Status = ciTasks[i].Status
taskDetail.JobType = ciTasks[i].JobType
taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix
taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration
taskDetail.StartTime = ciTasks[i].StartTime
taskDetail.EndTime = ciTasks[i].EndTime
taskDetail.ComputeResource = ciTasks[i].ComputeResource
taskDetail.Type = ciTasks[i].Cloudbrain.Type
taskDetail.UserName = ciTasks[i].User.Name
taskDetail.RepoID = ciTasks[i].RepoID
taskDetail.AiCenter = repo.GetAiCenterNameByCode(task.Cloudbrain.AiCenter, ctx.Language())
if ciTasks[i].Repo != nil {
taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name
taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias
}
if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 {
taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber)
} else {
taskDetail.WorkServerNum = 1
}
taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain)
taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain)

if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil {
taskDetail.IsDelete = true
} else {
taskDetail.IsDelete = false
}
taskDetail.Spec = ciTasks[i].Spec
tasks = append(tasks, taskDetail)
if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil {
taskDetail.IsDelete = true
} else {
taskDetail.IsDelete = false
}
taskDetail.Spec = ciTasks[i].Spec
tasks = append(tasks, taskDetail)
}
count := int64(len(tasks))
pager := context.NewPagination(int(count), pageSize, page, getTotalPage(count, pageSize))
pager.SetDefaultParams(ctx)
pager.AddParam(ctx, "listType", "ListType")
@@ -1176,6 +1228,12 @@ func getMonthCloudbrainInfo(beginTime time.Time, endTime time.Time) ([]DateCloud
}

func DownloadCloudBrainBoard(ctx *context.Context) {
recordCloudbrain, err := models.GetRecordBeginTime()
if err != nil {
log.Error("Can not get recordCloudbrain", err)
ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err"))
return
}

page := 1

@@ -1184,14 +1242,20 @@ func DownloadCloudBrainBoard(ctx *context.Context) {
var cloudBrain = ctx.Tr("repo.cloudbrain")
fileName := getCloudbrainFileName(cloudBrain)

recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix
now := time.Now()
endTime := now

_, total, err := models.CloudbrainAll(&models.CloudbrainsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: pageSize,
},
Type: models.TypeCloudBrainAll,
NeedRepoInfo: false,
Type: models.TypeCloudBrainAll,
BeginTimeUnix: int64(recordBeginTime),
EndTimeUnix: endTime.Unix(),
})
log.Info("totalcountisis:", total)

if err != nil {
log.Warn("Can not get cloud brain info", err)
@@ -1216,8 +1280,10 @@ func DownloadCloudBrainBoard(ctx *context.Context) {
Page: page,
PageSize: pageSize,
},
Type: models.TypeCloudBrainAll,
NeedRepoInfo: true,
Type: models.TypeCloudBrainAll,
BeginTimeUnix: int64(recordBeginTime),
EndTimeUnix: endTime.Unix(),
NeedRepoInfo: true,
})
if err != nil {
log.Warn("Can not get cloud brain info", err)
@@ -1225,7 +1291,8 @@ func DownloadCloudBrainBoard(ctx *context.Context) {
}
models.LoadSpecs4CloudbrainInfo(pageRecords)
for _, record := range pageRecords {

record = cloudbrainService.UpdateCloudbrainAiCenter(record)
record.Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(record.Cloudbrain.AiCenter, ctx.Language())
for k, v := range allCloudbrainValues(row, record, ctx) {
f.SetCellValue(cloudBrain, k, v)
}
@@ -1264,7 +1331,7 @@ func allCloudbrainValues(row int, rs *models.CloudbrainInfo, ctx *context.Contex
getCellName("G", row): rs.TrainJobDuration, getCellName("H", row): repo.GetCloudbrainCardDuration(rs.Cloudbrain),
getCellName("I", row): getBrainStartTime(rs),
getCellName("J", row): getBrainEndTime(rs), getCellName("K", row): rs.ComputeResource, getCellName("L", row): getCloudbrainCardType(rs),
getCellName("M", row): getWorkServerNum(rs), getCellName("N", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx),
getCellName("M", row): getWorkServerNum(rs), getCellName("N", row): rs.Cloudbrain.AiCenter,
getCellName("O", row): getCloudbrainFlavorName(rs), getCellName("P", row): rs.Name,
getCellName("Q", row): getBrainRepo(rs), getCellName("R", row): rs.JobName, getCellName("S", row): getBrainDeleteTime(rs),
}
@@ -1417,7 +1484,7 @@ func GetCloudbrainResourceOverview(ctx *context.Context) {
log.Error("Can not get GetDurationRecordBeginTime", err)
return
}
recordBeginTime := recordCloudbrainDuration[0].CreatedUnix
recordBeginTime := recordCloudbrainDuration[0].DateTime
recordUpdateTime := time.Now().Unix()
resourceQueues, err := models.GetCanUseCardInfo()
if err != nil {
@@ -1428,11 +1495,12 @@ func GetCloudbrainResourceOverview(ctx *context.Context) {
C2NetResourceDetail := []models.ResourceDetail{}
for _, resourceQueue := range resourceQueues {
if resourceQueue.Cluster == models.OpenICluster {
aiCenterName := repo.GetAiCenterNameByCode(resourceQueue.AiCenterCode, ctx.Language())
var resourceDetail models.ResourceDetail
resourceDetail.QueueCode = resourceQueue.QueueCode
resourceDetail.Cluster = resourceQueue.Cluster
resourceDetail.AiCenterCode = resourceQueue.AiCenterCode
resourceDetail.AiCenterName = resourceQueue.AiCenterName + "/" + resourceQueue.AiCenterCode
resourceDetail.AiCenterName = resourceQueue.AiCenterCode + "/" + aiCenterName
resourceDetail.ComputeResource = resourceQueue.ComputeResource
resourceDetail.AccCardType = resourceQueue.AccCardType + "(" + resourceQueue.ComputeResource + ")"
resourceDetail.CardsTotalNum = resourceQueue.CardsTotalNum
@@ -1440,11 +1508,12 @@ func GetCloudbrainResourceOverview(ctx *context.Context) {
OpenIResourceDetail = append(OpenIResourceDetail, resourceDetail)
}
if resourceQueue.Cluster == models.C2NetCluster {
aiCenterName := repo.GetAiCenterNameByCode(resourceQueue.AiCenterCode, ctx.Language())
var resourceDetail models.ResourceDetail
resourceDetail.QueueCode = resourceQueue.QueueCode
resourceDetail.Cluster = resourceQueue.Cluster
resourceDetail.AiCenterCode = resourceQueue.AiCenterCode
resourceDetail.AiCenterName = resourceQueue.AiCenterName + "/" + resourceQueue.AiCenterCode
resourceDetail.AiCenterName = resourceQueue.AiCenterCode + "/" + aiCenterName
resourceDetail.ComputeResource = resourceQueue.ComputeResource
resourceDetail.AccCardType = resourceQueue.AccCardType + "(" + resourceQueue.ComputeResource + ")"
resourceDetail.CardsTotalNum = resourceQueue.CardsTotalNum
@@ -1554,7 +1623,7 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) {
ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err"))
return beginTime, endTime
}
brainRecordBeginTime := recordCloudbrainDuration[0].CreatedUnix.AsTime()
brainRecordBeginTime := recordCloudbrainDuration[0].DateTime.AsTime()
beginTime = brainRecordBeginTime
endTime = now
} else if queryType == "today" {
@@ -1596,7 +1665,7 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) {
ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err"))
return beginTime, endTime
}
brainRecordBeginTime := recordCloudbrainDuration[0].CreatedUnix.AsTime()
brainRecordBeginTime := recordCloudbrainDuration[0].DateTime.AsTime()
beginTime = brainRecordBeginTime
endTime = now
} else {
@@ -1627,7 +1696,7 @@ func getAiCenterUsageDuration(beginTime time.Time, endTime time.Time, cloudbrain
usageRate := float64(0)

for _, cloudbrainStatistic := range cloudbrainStatistics {
if int64(cloudbrainStatistic.CreatedUnix) >= beginTime.Unix() && int64(cloudbrainStatistic.CreatedUnix) < endTime.Unix() {
if int64(cloudbrainStatistic.DateTime) >= beginTime.Unix() && int64(cloudbrainStatistic.DateTime) < endTime.Unix() {
totalDuration += cloudbrainStatistic.CardsTotalDuration
usageDuration += cloudbrainStatistic.CardsUseDuration
}
@@ -1659,28 +1728,29 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati
return OpenIDurationRate, C2NetDurationRate, 0
}
for _, cloudbrainStatistic := range cardDurationStatistics {
aiCenterName := cloudbrainStatistic.AiCenterCode + "/" + repo.GetAiCenterNameByCode(cloudbrainStatistic.AiCenterCode, "zh-CN")
if cloudbrainStatistic.Cluster == models.OpenICluster {
if _, ok := OpenITotalDuration[cloudbrainStatistic.AiCenterName]; !ok {
OpenITotalDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsTotalDuration
if _, ok := OpenITotalDuration[aiCenterName]; !ok {
OpenITotalDuration[aiCenterName] = cloudbrainStatistic.CardsTotalDuration
} else {
OpenITotalDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsTotalDuration
OpenITotalDuration[aiCenterName] += cloudbrainStatistic.CardsTotalDuration
}
if _, ok := OpenIUsageDuration[cloudbrainStatistic.AiCenterName]; !ok {
OpenIUsageDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsUseDuration
if _, ok := OpenIUsageDuration[aiCenterName]; !ok {
OpenIUsageDuration[aiCenterName] = cloudbrainStatistic.CardsUseDuration
} else {
OpenIUsageDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsUseDuration
OpenIUsageDuration[aiCenterName] += cloudbrainStatistic.CardsUseDuration
}
}
if cloudbrainStatistic.Cluster == models.C2NetCluster {
if _, ok := C2NetTotalDuration[cloudbrainStatistic.AiCenterName]; !ok {
C2NetTotalDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsTotalDuration
if _, ok := C2NetTotalDuration[aiCenterName]; !ok {
C2NetTotalDuration[aiCenterName] = cloudbrainStatistic.CardsTotalDuration
} else {
C2NetTotalDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsTotalDuration
C2NetTotalDuration[aiCenterName] += cloudbrainStatistic.CardsTotalDuration
}
if _, ok := C2NetUsageDuration[cloudbrainStatistic.AiCenterName]; !ok {
C2NetUsageDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsUseDuration
if _, ok := C2NetUsageDuration[aiCenterName]; !ok {
C2NetUsageDuration[aiCenterName] = cloudbrainStatistic.CardsUseDuration
} else {
C2NetUsageDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsUseDuration
C2NetUsageDuration[aiCenterName] += cloudbrainStatistic.CardsUseDuration
}
}
}
@@ -1690,16 +1760,17 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati
return OpenIDurationRate, C2NetDurationRate, 0
}
for _, v := range ResourceAiCenterRes {
aiCenterName := v.AiCenterCode + "/" + repo.GetAiCenterNameByCode(v.AiCenterCode, "zh-CN")
if cutString(v.AiCenterCode, 4) == cutString(models.AICenterOfCloudBrainOne, 4) {
if _, ok := OpenIUsageDuration[v.AiCenterName]; !ok {
OpenIUsageDuration[v.AiCenterName] = 0
if _, ok := OpenIUsageDuration[aiCenterName]; !ok {
OpenIUsageDuration[aiCenterName] = 0
}
if _, ok := OpenITotalDuration[v.AiCenterName]; !ok {
OpenITotalDuration[v.AiCenterName] = 0
if _, ok := OpenITotalDuration[aiCenterName]; !ok {
OpenITotalDuration[aiCenterName] = 0
}
} else {
if _, ok := C2NetUsageDuration[v.AiCenterName]; !ok {
C2NetUsageDuration[v.AiCenterName] = 0
if _, ok := C2NetUsageDuration[aiCenterName]; !ok {
C2NetUsageDuration[aiCenterName] = 0
}
}
}
@@ -1716,7 +1787,7 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati
for _, v := range OpenITotalDuration {
totalCanUse += float64(v)
}
for _, v := range OpenIUsageRate {
for _, v := range OpenIUsageDuration {
totalUse += float64(v)
}
if totalCanUse == 0 || totalUse == 0 {
@@ -1724,6 +1795,7 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati
} else {
totalUsageRate = totalUse / totalCanUse
}
delete(C2NetUsageDuration, "/")

OpenIDurationRate.AiCenterTotalDurationStat = OpenITotalDuration
OpenIDurationRate.AiCenterUsageDurationStat = OpenIUsageDuration
@@ -1831,3 +1903,30 @@ func getHourCloudbrainDuration(beginTime time.Time, endTime time.Time, aiCenterC
hourTimeStatistic.HourTimeUsageRate = hourTimeUsageRate
return hourTimeStatistic, nil
}

func CloudbrainUpdateAiCenter(ctx *context.Context) {
repo.CloudbrainDurationStatisticHour()
ctx.JSON(http.StatusOK, map[string]interface{}{
"message": 0,
})
}

func GetResourceQueues(ctx *context.Context) {
resourceQueues, err := models.GetCanUseCardInfo()
if err != nil {
log.Info("GetCanUseCardInfo err: %v", err)
return
}
Resource := make([]*models.ResourceQueue, 0)
aiCenterCodeMap := make(map[string]string)
for _, resourceQueue := range resourceQueues {
if _, ok := aiCenterCodeMap[resourceQueue.AiCenterCode]; !ok {
resourceQueue.AiCenterName = repo.GetAiCenterNameByCode(resourceQueue.AiCenterCode, ctx.Language())
aiCenterCodeMap[resourceQueue.AiCenterCode] = resourceQueue.AiCenterCode
Resource = append(Resource, resourceQueue)
}
}
ctx.JSON(http.StatusOK, map[string]interface{}{
"resourceQueues": Resource,
})
}

+ 123
- 0
routers/api/v1/repo/datasets.go View File

@@ -0,0 +1,123 @@
package repo

import (
"fmt"
"strings"

"code.gitea.io/gitea/modules/convert"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)

func PublicDatasetMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
PublicOnly: true,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)

}

func MyFavoriteDatasetMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
StarByMe: true,
DatasetIDs: models.GetDatasetIdsStarByUser(ctx.User.ID),
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)
}

func CurrentRepoDatasetMultiple(ctx *context.APIContext) {
datasetIds := models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID)
searchOrderBy := getSearchOrderByInValues(datasetIds)
opts := &models.SearchDatasetOptions{
RepoID: ctx.Repo.Repository.ID,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
DatasetIDs: datasetIds,
SearchOrderBy: searchOrderBy,
}

datasetMultiple(ctx, opts)

}

func MyDatasetsMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
UploadAttachmentByMe: true,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)

}
func datasetMultiple(ctx *context.APIContext, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
if page < 1 {
page = 1
}
pageSize := ctx.QueryInt("pageSize")
if pageSize < 1 {
pageSize = setting.UI.DatasetPagingNum
}

keyword := strings.Trim(ctx.Query("q"), " ")
opts.Keyword = keyword
if opts.SearchOrderBy.String() == "" {
opts.SearchOrderBy = models.SearchOrderByRecentUpdated
}

opts.RecommendOnly = ctx.QueryBool("recommend")
opts.ListOptions = models.ListOptions{
Page: page,
PageSize: pageSize,
}
opts.JustNeedZipFile = true
opts.User = ctx.User

datasets, count, err := models.SearchDataset(opts)

if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]interface{}{
"code": 1,
"message": err.Error(),
"data": []*api.Dataset{},
"count": 0,
})
return
}
var convertDatasets []*api.Dataset
for _, dataset := range datasets {
convertDatasets = append(convertDatasets, convert.ToDataset(dataset))
}

ctx.JSON(200, map[string]interface{}{
"code": 0,
"message": "",
"data": convertDatasets,
"count": count,
})
}

func getSearchOrderByInValues(datasetIds []int64) models.SearchOrderBy {
if len(datasetIds) == 0 {
return ""
}
searchOrderBy := "CASE id "
for i, id := range datasetIds {
searchOrderBy += fmt.Sprintf(" WHEN %d THEN %d", id, i+1)
}
searchOrderBy += " ELSE 0 END"
return models.SearchOrderBy(searchOrderBy)
}

+ 141
- 0
routers/api/v1/repo/images.go View File

@@ -0,0 +1,141 @@
package repo

import (
"encoding/json"
"net/http"
"strconv"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
)

type NPUImageINFO struct {
ID string `json:"id"`
Value string `json:"value"`
}

func GetPublicImages(ctx *context.APIContext) {
uid := getUID(ctx)
opts := models.SearchImageOptions{
IncludePublicOnly: true,
UID: uid,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
IncludeOfficialOnly: ctx.QueryBool("recommend"),
SearchOrderBy: "type desc, num_stars desc,id desc",
Status: models.IMAGE_STATUS_SUCCESS,
CloudbrainType: ctx.QueryInt("cloudbrainType"),
}

getImages(ctx, &opts)

}

func GetCustomImages(ctx *context.APIContext) {
uid := getUID(ctx)
opts := models.SearchImageOptions{
UID: uid,
IncludeOwnerOnly: true,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
Status: -1,
SearchOrderBy: "id desc",
}
getImages(ctx, &opts)

}
func GetStarImages(ctx *context.APIContext) {

uid := getUID(ctx)
opts := models.SearchImageOptions{
UID: uid,
IncludeStarByMe: true,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
Status: models.IMAGE_STATUS_SUCCESS,
SearchOrderBy: "id desc",
}
getImages(ctx, &opts)

}

func GetNpuImages(ctx *context.APIContext) {
cloudbrainType := ctx.QueryInt("type")
if cloudbrainType == 0 { //modelarts
getModelArtsImages(ctx)
} else { //c2net
getC2netNpuImages(ctx)
}
}

func getModelArtsImages(ctx *context.APIContext) {

var versionInfos modelarts.VersionInfo
_ = json.Unmarshal([]byte(setting.EngineVersions), &versionInfos)
var npuImageInfos []NPUImageINFO
for _, info := range versionInfos.Version {
npuImageInfos = append(npuImageInfos, NPUImageINFO{
ID: strconv.Itoa(info.ID),
Value: info.Value,
})
}
ctx.JSON(http.StatusOK, npuImageInfos)

}

func getC2netNpuImages(ctx *context.APIContext) {
images, err := grampus.GetImages(grampus.ProcessorTypeNPU)
var npuImageInfos []NPUImageINFO
if err != nil {
log.Error("GetImages failed:", err.Error())
ctx.JSON(http.StatusOK, []NPUImageINFO{})
} else {
for _, info := range images.Infos {
npuImageInfos = append(npuImageInfos, NPUImageINFO{
ID: info.ID,
Value: info.Name,
})
}
ctx.JSON(http.StatusOK, npuImageInfos)
}
}
func getImages(ctx *context.APIContext, opts *models.SearchImageOptions) {
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}

pageSize := ctx.QueryInt("pageSize")
if pageSize <= 0 {
pageSize = 15
}
opts.ListOptions = models.ListOptions{
Page: page,
PageSize: pageSize,
}
imageList, total, err := models.SearchImage(opts)
if err != nil {
log.Error("Can not get images:%v", err)
ctx.JSON(http.StatusOK, models.ImagesPageResult{
Count: 0,
Images: []*models.Image{},
})
} else {
ctx.JSON(http.StatusOK, models.ImagesPageResult{
Count: total,
Images: imageList,
})
}
}

func getUID(ctx *context.APIContext) int64 {
var uid int64 = -1
if ctx.IsSigned {
uid = ctx.User.ID
}
return uid
}

+ 71
- 0
routers/api/v1/repo/mlops.go View File

@@ -0,0 +1,71 @@
package repo

import (
"net/http"

"code.gitea.io/gitea/models"

"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/log"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/api/v1/utils"
)

//标注任务可分配人员
func ListTagger(ctx *context.APIContext) {

taggers := make([]*api.Tagger, 0)
userRemember := make(map[string]string)
collaborators, err := ctx.Repo.Repository.GetCollaborators(utils.GetListOptions(ctx))
if err != nil {
log.Warn("ListCollaborators", err)
ctx.JSON(http.StatusOK, taggers)
return
}
for _, collaborator := range collaborators {
taggers = append(taggers, convert.ToTagger(collaborator.User))
userRemember[collaborator.User.Name] = ""
}

teams, err := ctx.Repo.Repository.GetRepoTeams()
if err != nil {
log.Warn("ListTeams", err)
ctx.JSON(http.StatusOK, taggers)
return
}

for _, team := range teams {
team.GetMembers(&models.SearchMembersOptions{})
for _, user := range team.Members {
if _, ok := userRemember[user.Name]; !ok {
taggers = append(taggers, convert.ToTagger(user))
userRemember[user.Name] = ""
}
}
}
if !ctx.Repo.Owner.IsOrganization() {
if _, ok := userRemember[ctx.Repo.Owner.Name]; !ok {
taggers = append(taggers, convert.ToTagger(ctx.Repo.Owner))

}
}
ctx.JSON(http.StatusOK, taggers)

}
func GetRight(ctx *context.APIContext) {
right := "none"

if ctx.IsUserRepoReaderSpecific(models.UnitTypeCode) {
right = "read"
}

if ctx.IsUserRepoWriter([]models.UnitType{models.UnitTypeCode}) || ctx.IsUserRepoAdmin() {
right = "write"
}

ctx.JSON(http.StatusOK, map[string]string{
"right": right,
})

}

+ 16
- 41
routers/api/v1/repo/modelarts.go View File

@@ -12,6 +12,8 @@ import (
"strconv"
"strings"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

"code.gitea.io/gitea/modules/urfs_client/urchin"

"code.gitea.io/gitea/modules/notification"
@@ -20,7 +22,6 @@ import (
"code.gitea.io/gitea/modules/setting"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
@@ -109,39 +110,11 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
}

if job.Type == models.TypeCloudBrainOne {
jobResult, err := cloudbrain.GetJob(job.JobID)
if err != nil {
ctx.NotFound(err)
log.Error("GetJob failed:", err)
return
}
result, err := models.ConvertToJobResultPayload(jobResult.Payload)
job, err = cloudbrainTask.SyncCloudBrainOneStatus(job)
if err != nil {
ctx.NotFound(err)
log.Error("ConvertToJobResultPayload failed:", err)
return
}
oldStatus := job.Status
job.Status = result.JobStatus.State
if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))

job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
}

if result.JobStatus.State != string(models.JobWaiting) {
models.ParseAndSetDurationFromCloudBrainOne(result, job)
if oldStatus != job.Status {
notification.NotifyChangeCloudbrainStatus(job, oldStatus)
}
err = models.UpdateJob(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}
}
} else if job.Type == models.TypeCloudBrainTwo {
err := modelarts.HandleTrainJobInfo(job)
if err != nil {
@@ -173,7 +146,6 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
if len(result.JobInfo.Tasks) > 0 {
if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 {
job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
// aiCenterName = result.JobInfo.Tasks[0].CenterName[0]
aiCenterName = cloudbrainService.GetAiCenterShow(job.AiCenter, ctx.Context)
}
}
@@ -308,15 +280,6 @@ func TrainJobGetLog(ctx *context.APIContext) {
return
}

prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, versionName), "/") + "/job"
_, err = storage.GetObsLogFileName(prefix)
var canLogDownload bool
if err != nil {
canLogDownload = false
} else {
canLogDownload = true
}

ctx.Data["log_file_name"] = resultLogFile.LogFileList[0]

ctx.JSON(http.StatusOK, map[string]interface{}{
@@ -326,11 +289,23 @@ func TrainJobGetLog(ctx *context.APIContext) {
"EndLine": result.EndLine,
"Content": result.Content,
"Lines": result.Lines,
"CanLogDownload": canLogDownload,
"CanLogDownload": canLogDownload(ctx.User, task),
"StartTime": task.StartTime,
})
}

func canLogDownload(user *models.User, task *models.Cloudbrain) bool {
if task == nil || !task.IsUserHasRight(user) {
return false
}
prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, task.VersionName), "/") + "/job"
_, err := storage.GetObsLogFileName(prefix)
if err != nil {
return false
}
return true
}

func trainJobGetLogContent(jobID string, versionID int64, baseLine string, order string, lines int) (*models.GetTrainJobLogFileNamesResult, *models.GetTrainJobLogResult, error) {

resultLogFile, err := modelarts.GetTrainJobLogFileNames(jobID, strconv.FormatInt(versionID, 10))


+ 115
- 0
routers/api/v1/repo/modelmanage.go View File

@@ -0,0 +1,115 @@
package repo

import (
"net/http"

"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
routerRepo "code.gitea.io/gitea/routers/repo"
)

type FileInfo struct {
FileName string `json:"fileName"`
ModTime string `json:"modTime"`
IsDir bool `json:"isDir"`
Size int64 `json:"size"`
ParenDir string `json:"parenDir"`
UUID string `json:"uuid"`
}

func CreateNewModel(ctx *context.APIContext) {
log.Info("CreateNewModel by api.")
routerRepo.SaveModel(ctx.Context)
}

func ShowModelManageApi(ctx *context.APIContext) {
log.Info("ShowModelManageApi by api.")
routerRepo.ShowModelPageInfo(ctx.Context)
}

func DeleteModel(ctx *context.APIContext) {
log.Info("DeleteModel by api.")
routerRepo.DeleteModel(ctx.Context)
}

func DownloadModel(ctx *context.APIContext) {
log.Info("DownloadModel by api.")
routerRepo.DownloadMultiModelFile(ctx.Context)
}

func QueryModelById(ctx *context.APIContext) {
log.Info("QueryModelById by api.")
routerRepo.QueryModelById(ctx.Context)
}

func QueryModelListForPredict(ctx *context.APIContext) {
log.Info("QueryModelListForPredict by api.")
routerRepo.QueryModelListForPredict(ctx.Context)
}

func QueryTrainModelList(ctx *context.APIContext) {
result, err := routerRepo.QueryTrainModelFileById(ctx.Context)
if err != nil {
log.Info("query error." + err.Error())
}
re := convertFileFormat(result)
ctx.JSON(http.StatusOK, re)
}

func convertFileFormat(result []storage.FileInfo) []FileInfo {
re := make([]FileInfo, 0)
if result != nil {
for _, file := range result {
tmpFile := FileInfo{
FileName: file.FileName,
ModTime: file.ModTime,
IsDir: file.IsDir,
Size: file.Size,
ParenDir: file.ParenDir,
UUID: file.UUID,
}
re = append(re, tmpFile)
}
}
return re
}

func QueryModelFileForPredict(ctx *context.APIContext) {
log.Info("QueryModelFileForPredict by api.")
id := ctx.Query("id")
result := routerRepo.QueryModelFileByID(id)
re := convertFileFormat(result)
ctx.JSON(http.StatusOK, re)
}

func CreateModelConvert(ctx *context.APIContext) {
log.Info("CreateModelConvert by api.")
routerRepo.SaveModelConvert(ctx.Context)
}

func ShowModelConvertPage(ctx *context.APIContext) {
log.Info("ShowModelConvertPage by api.")
modelResult, count, err := routerRepo.GetModelConvertPageData(ctx.Context)
if err == nil {
mapInterface := make(map[string]interface{})
mapInterface["data"] = modelResult
mapInterface["count"] = count
ctx.JSON(http.StatusOK, mapInterface)
} else {
mapInterface := make(map[string]interface{})
mapInterface["data"] = nil
mapInterface["count"] = 0
ctx.JSON(http.StatusOK, mapInterface)
}

}

func QueryModelConvertById(ctx *context.APIContext) {
modelResult, err := routerRepo.GetModelConvertById(ctx.Context)
if err == nil {
ctx.JSON(http.StatusOK, modelResult)
} else {
ctx.JSON(http.StatusOK, nil)
}
}

+ 36
- 0
routers/api/v1/repo/spec.go View File

@@ -0,0 +1,36 @@
package repo

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/cloudbrain/resource"
)

func GetResourceSpec(ctx *context.APIContext) {
jobType := ctx.Query("jobType")
computeResource := ctx.Query("compute")
cluster := ctx.Query("cluster")
aiCenterCode := ctx.Query("center")
if jobType == "" || computeResource == "" || cluster == "" {
log.Info("GetResourceSpec api.param error")
ctx.JSON(200, response.OuterBizError(response.PARAM_ERROR))
return
}
specs, err := resource.FindAvailableSpecs4Show(ctx.User.ID, models.FindSpecsOptions{
JobType: models.JobType(jobType),
ComputeResource: computeResource,
Cluster: cluster,
AiCenterCode: aiCenterCode,
})
if err != nil {
log.Error("GetResourceSpec api error. %v", err)
ctx.JSON(200, response.OuterServerError(err.Error()))
return
}

specMap := make(map[string]interface{}, 0)
specMap["specs"] = specs
ctx.JSON(200, response.OuterSuccessWithData(specMap))
}

+ 4
- 1
routers/private/internal.go View File

@@ -6,9 +6,10 @@
package private

import (
"code.gitea.io/gitea/routers/admin"
"strings"

"code.gitea.io/gitea/routers/admin"

"code.gitea.io/gitea/routers/repo"

"code.gitea.io/gitea/modules/log"
@@ -52,7 +53,9 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/tool/org_stat", OrgStatisticManually)
m.Post("/tool/update_repo_visit/:date", UpdateRepoVisit)
m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration)
m.Post("/task/history_handle/aicenter", repo.HandleTaskWithAiCenter)
m.Post("/resources/specification/handle_historical_task", admin.RefreshHistorySpec)
m.Post("/duration_statisctic/history_handle", repo.CloudbrainUpdateHistoryData)

}, CheckInternalToken)
}

+ 44
- 23
routers/repo/ai_model_convert.go View File

@@ -74,27 +74,27 @@ func SaveModelConvert(ctx *context.Context) {
log.Info("save model convert start.")
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.no_operate_right"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.no_operate_right"),
})
return
}
name := ctx.Query("name")
desc := ctx.Query("desc")
modelId := ctx.Query("modelId")
modelPath := ctx.Query("ModelFile")
SrcEngine := ctx.QueryInt("SrcEngine")
modelPath := ctx.Query("modelFile")
SrcEngine := ctx.QueryInt("srcEngine")
InputShape := ctx.Query("inputshape")
InputDataFormat := ctx.Query("inputdataformat")
DestFormat := ctx.QueryInt("DestFormat")
NetOutputFormat := ctx.QueryInt("NetOutputFormat")
DestFormat := ctx.QueryInt("destFormat")
NetOutputFormat := ctx.QueryInt("netOutputFormat")

task, err := models.QueryModelById(modelId)
if err != nil {
log.Error("no such model!", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.model_not_exist"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.model_not_exist"),
})
return
}
@@ -105,8 +105,8 @@ func SaveModelConvert(ctx *context.Context) {
if convert.Name == name {
log.Info("convert.Name=" + name + " convert.id=" + convert.ID)
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.create_error1"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.create_error1"),
})
return
}
@@ -119,8 +119,8 @@ func SaveModelConvert(ctx *context.Context) {
if isRunningTask(convert.Status) {
log.Info("convert.Status=" + convert.Status + " convert.id=" + convert.ID)
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.create_error2"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.create_error2"),
})
return
}
@@ -150,7 +150,8 @@ func SaveModelConvert(ctx *context.Context) {
go goCreateTask(modelConvert, ctx, task)

ctx.JSON(200, map[string]string{
"result_code": "0",
"id": id,
"code": "0",
})
}

@@ -604,11 +605,11 @@ func StopModelConvert(ctx *context.Context) {
}

func ShowModelConvertInfo(ctx *context.Context) {
ctx.Data["ID"] = ctx.Query("ID")
ctx.Data["ID"] = ctx.Query("id")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

job, err := models.QueryModelConvertById(ctx.Query("ID"))
job, err := models.QueryModelConvertById(ctx.Query("id"))
if err == nil {
if job.TrainJobDuration == "" {
job.TrainJobDuration = "00:00:00"
@@ -707,6 +708,31 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
return
}
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pageSize := ctx.QueryInt("pageSize")
if pageSize <= 0 {
pageSize = setting.UI.IssuePagingNum
}
modelResult, count, err := GetModelConvertPageData(ctx)
if err == nil {
pager := context.NewPagination(int(count), page, pageSize, 5)
ctx.Data["Page"] = pager
ctx.Data["Tasks"] = modelResult
ctx.Data["MODEL_CONVERT_COUNT"] = count
} else {
ctx.ServerError("Query data error.", err)
}
}

func GetModelConvertById(ctx *context.Context) (*models.AiModelConvert, error) {
id := ctx.Query("id")
return models.QueryModelConvertById(id)
}

func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, int64, error) {
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
@@ -725,10 +751,8 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
})
if err != nil {
log.Info("query db error." + err.Error())
ctx.ServerError("Cloudbrain", err)
return
return nil, 0, err
}
ctx.Data["MODEL_CONVERT_COUNT"] = count
userIds := make([]int64, len(modelResult))
for i, model := range modelResult {
model.IsCanOper = isOper(ctx, model.UserId)
@@ -743,10 +767,7 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
model.UserRelAvatarLink = value.RelAvatarLink()
}
}
pager := context.NewPagination(int(count), page, pageSize, 5)
ctx.Data["Page"] = pager
ctx.Data["Tasks"] = modelResult

return modelResult, count, nil
}

func ModelConvertDownloadModel(ctx *context.Context) {
@@ -757,7 +778,7 @@ func ModelConvertDownloadModel(ctx *context.Context) {
ctx.ServerError("Not found task.", err)
return
}
AllDownload := ctx.QueryBool("AllDownload")
AllDownload := ctx.QueryBool("allDownload")
if AllDownload {
if job.IsGpuTrainTask() {
path := setting.CBCodePathPrefix + job.ID + "/model/"


+ 402
- 80
routers/repo/ai_model_manage.go View File

@@ -22,25 +22,33 @@ import (
)

const (
Model_prefix = "aimodels/"
tplModelManageIndex = "repo/modelmanage/index"
tplModelManageDownload = "repo/modelmanage/download"
tplModelInfo = "repo/modelmanage/showinfo"
MODEL_LATEST = 1
MODEL_NOT_LATEST = 0
MODEL_MAX_SIZE = 1024 * 1024 * 1024
STATUS_COPY_MODEL = 1
STATUS_FINISHED = 0
STATUS_ERROR = 2
Attachment_model = "model"
Model_prefix = "aimodels/"
tplModelManageIndex = "repo/modelmanage/index"
tplModelManageDownload = "repo/modelmanage/download"
tplModelInfo = "repo/modelmanage/showinfo"
tplCreateLocalModelInfo = "repo/modelmanage/create_local_1"
tplCreateLocalForUploadModelInfo = "repo/modelmanage/create_local_2"
tplCreateOnlineModelInfo = "repo/modelmanage/create_online"

MODEL_LATEST = 1
MODEL_NOT_LATEST = 0
MODEL_MAX_SIZE = 1024 * 1024 * 1024
STATUS_COPY_MODEL = 1
STATUS_FINISHED = 0
STATUS_ERROR = 2

MODEL_LOCAL_TYPE = 1
MODEL_ONLINE_TYPE = 0
)

func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) error {
func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) (string, error) {
aiTask, err := models.GetCloudbrainByJobIDAndVersionName(jobId, versionName)
if err != nil {
aiTask, err = models.GetRepoCloudBrainByJobID(ctx.Repo.Repository.ID, jobId)
if err != nil {
log.Info("query task error." + err.Error())
return err
return "", err
} else {
log.Info("query gpu train task.")
}
@@ -56,7 +64,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
if len(aimodels) > 0 {
for _, model := range aimodels {
if model.Version == version {
return errors.New(ctx.Tr("repo.model.manage.create_error"))
return "", errors.New(ctx.Tr("repo.model.manage.create_error"))
}
if model.New == MODEL_LATEST {
lastNewModelId = model.ID
@@ -70,13 +78,12 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
cloudType = models.TypeCloudBrainTwo
} else if aiTask.ComputeResource == models.GPUResource {
cloudType = models.TypeCloudBrainOne
spec, err := resource.GetCloudbrainSpec(aiTask.ID)
if err == nil {
flaverName := "GPU: " + fmt.Sprint(spec.AccCardsNum) + "*" + spec.AccCardType + ",CPU: " + fmt.Sprint(spec.CpuCores) + "," + ctx.Tr("cloudbrain.memory") + ": " + fmt.Sprint(spec.MemGiB) + "GB," + ctx.Tr("cloudbrain.shared_memory") + ": " + fmt.Sprint(spec.ShareMemGiB) + "GB"
aiTask.FlavorName = flaverName
}
}

spec, err := resource.GetCloudbrainSpec(aiTask.ID)
if err == nil {
specJson, _ := json.Marshal(spec)
aiTask.FlavorName = string(specJson)
}
accuracy := make(map[string]string)
accuracy["F1"] = ""
accuracy["Recall"] = ""
@@ -111,7 +118,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio

err = models.SaveModelToDb(model)
if err != nil {
return err
return "", err
}
if len(lastNewModelId) > 0 {
//udpate status and version count
@@ -134,7 +141,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio

log.Info("save model end.")
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask)
return nil
return id, nil
}

func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile string) {
@@ -173,7 +180,7 @@ func SaveNewNameModel(ctx *context.Context) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
name := ctx.Query("Name")
name := ctx.Query("name")
if name == "" {
ctx.Error(500, fmt.Sprintf("name or version is null."))
return
@@ -189,44 +196,181 @@ func SaveNewNameModel(ctx *context.Context) {
log.Info("save model end.")
}

func SaveLocalModel(ctx *context.Context) {
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
re := map[string]string{
"code": "-1",
}
log.Info("save SaveLocalModel start.")
uuid := uuid.NewV4()
id := uuid.String()
name := ctx.Query("name")
version := ctx.Query("version")
if version == "" {
version = "0.0.1"
}
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
taskType := ctx.QueryInt("type")
modelActualPath := ""
if taskType == models.TypeCloudBrainOne {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/"
modelActualPath = setting.Attachment.Minio.Bucket + "/" + destKeyNamePrefix
} else if taskType == models.TypeCloudBrainTwo {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/"
modelActualPath = setting.Bucket + "/" + destKeyNamePrefix
} else {
re["msg"] = "type is error."
ctx.JSON(200, re)
return
}
var lastNewModelId string
repoId := ctx.Repo.Repository.ID
aimodels := models.QueryModelByName(name, repoId)
if len(aimodels) > 0 {
for _, model := range aimodels {
if model.Version == version {
re["msg"] = ctx.Tr("repo.model.manage.create_error")
ctx.JSON(200, re)
return
}
if model.New == MODEL_LATEST {
lastNewModelId = model.ID
}
}
}
model := &models.AiModelManage{
ID: id,
Version: version,
ModelType: MODEL_LOCAL_TYPE,
VersionCount: len(aimodels) + 1,
Label: label,
Name: name,
Description: description,
New: MODEL_LATEST,
Type: taskType,
Path: modelActualPath,
Size: 0,
AttachmentId: "",
RepoId: repoId,
UserId: ctx.User.ID,
Engine: int64(engine),
TrainTaskInfo: "",
Accuracy: "",
Status: STATUS_FINISHED,
}

err := models.SaveModelToDb(model)
if err != nil {
re["msg"] = err.Error()
ctx.JSON(200, re)
return
}
if len(lastNewModelId) > 0 {
//udpate status and version count
models.ModifyModelNewProperty(lastNewModelId, MODEL_NOT_LATEST, 0)
}
var units []models.RepoUnit
var deleteUnitTypes []models.UnitType
units = append(units, models.RepoUnit{
RepoID: ctx.Repo.Repository.ID,
Type: models.UnitTypeModelManage,
Config: &models.ModelManageConfig{
EnableModelManage: true,
},
})
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeModelManage)

models.UpdateRepositoryUnits(ctx.Repo.Repository, units, deleteUnitTypes)

log.Info("save model end.")
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask)
re["code"] = "0"
re["id"] = id
ctx.JSON(200, re)
}

func getSize(files []storage.FileInfo) int64 {
var size int64
for _, file := range files {
size += file.Size
}
return size
}

func UpdateModelSize(modeluuid string) {
model, err := models.QueryModelById(modeluuid)
if err == nil {
if model.Type == models.TypeCloudBrainOne {
if strings.HasPrefix(model.Path, setting.Attachment.Minio.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, model.Path[len(setting.Attachment.Minio.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from minio. id=" + modeluuid)
}
size := getSize(files)
models.ModifyModelSize(modeluuid, size)
}
} else if model.Type == models.TypeCloudBrainTwo {
if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, model.Path[len(setting.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from obs. id=" + modeluuid)
}
size := getSize(files)
models.ModifyModelSize(modeluuid, size)
}
}
} else {
log.Info("not found model,uuid=" + modeluuid)
}
}

func SaveModel(ctx *context.Context) {
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
log.Info("save model start.")
JobId := ctx.Query("JobId")
VersionName := ctx.Query("VersionName")
name := ctx.Query("Name")
version := ctx.Query("Version")
label := ctx.Query("Label")
description := ctx.Query("Description")
engine := ctx.QueryInt("Engine")
JobId := ctx.Query("jobId")
VersionName := ctx.Query("versionName")
name := ctx.Query("name")
version := ctx.Query("version")
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
modelSelectedFile := ctx.Query("modelSelectedFile")
log.Info("engine=" + fmt.Sprint(engine) + " modelSelectedFile=" + modelSelectedFile)

re := map[string]string{
"code": "-1",
}
if JobId == "" || VersionName == "" {
ctx.Error(500, fmt.Sprintf("JobId or VersionName is null."))
re["msg"] = "JobId or VersionName is null."
ctx.JSON(200, re)
return
}
if modelSelectedFile == "" {
ctx.Error(500, fmt.Sprintf("Not selected model file."))
re["msg"] = "Not selected model file."
ctx.JSON(200, re)
return
}

if name == "" || version == "" {
ctx.Error(500, fmt.Sprintf("name or version is null."))
re["msg"] = "name or version is null."
ctx.JSON(200, re)
return
}

err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx)

id, err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx)
if err != nil {
log.Info("save model error." + err.Error())
ctx.Error(500, fmt.Sprintf("save model error. %v", err))
return
re["msg"] = err.Error()
} else {
re["code"] = "0"
re["id"] = id
}
ctx.Status(200)
ctx.JSON(200, re)
log.Info("save model end.")
}

@@ -288,16 +432,74 @@ func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir
return "", 0, nil
}
}
func DeleteModelFile(ctx *context.Context) {
log.Info("delete model start.")
id := ctx.Query("id")
fileName := ctx.Query("fileName")
model, err := models.QueryModelById(id)
if err == nil {
if model.ModelType == MODEL_LOCAL_TYPE {
if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
objectName := model.Path[len(bucketName)+1:] + fileName
log.Info("delete bucket=" + bucketName + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
totalSize := storage.MinioGetFilesSize(bucketName, []string{objectName})
err := storage.Attachments.DeleteDir(objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
return
} else {
log.Info("delete minio file size is:" + fmt.Sprint(totalSize))
models.ModifyModelSize(id, model.Size-totalSize)
}
}
} else if model.Type == models.TypeCloudBrainTwo {
bucketName := setting.Bucket
objectName := model.Path[len(setting.Bucket)+1:] + fileName
log.Info("delete bucket=" + setting.Bucket + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
totalSize := storage.ObsGetFilesSize(bucketName, []string{objectName})
err := storage.ObsRemoveObject(bucketName, objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
return
} else {
log.Info("delete obs file size is:" + fmt.Sprint(totalSize))
models.ModifyModelSize(id, model.Size-totalSize)
}
}
}
}
}
ctx.JSON(200, map[string]string{
"code": "0",
})
}

func DeleteModel(ctx *context.Context) {
log.Info("delete model start.")
id := ctx.Query("ID")
id := ctx.Query("id")
err := deleteModelByID(ctx, id)
if err != nil {
ctx.JSON(500, err.Error())
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
} else {
ctx.JSON(200, map[string]string{
"result_code": "0",
"code": "0",
})
}
}
@@ -309,14 +511,28 @@ func deleteModelByID(ctx *context.Context, id string) error {
return errors.New(ctx.Tr("repo.model_noright"))
}
if err == nil {
log.Info("bucket=" + setting.Bucket + " path=" + model.Path)
if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) {
err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:])
if err != nil {
log.Info("Failed to delete model. id=" + id)
return err

if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
log.Info("bucket=" + bucketName + " path=" + model.Path)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
err := storage.Attachments.DeleteDir(model.Path[len(bucketName)+1:])
if err != nil {
log.Info("Failed to delete model. id=" + id)
return err
}
}
} else if model.Type == models.TypeCloudBrainTwo {
log.Info("bucket=" + setting.Bucket + " path=" + model.Path)
if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) {
err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:])
if err != nil {
log.Info("Failed to delete model. id=" + id)
return err
}
}
}

err = models.DeleteModelById(id)
if err == nil { //find a model to change new
aimodels := models.QueryModelByName(model.Name, model.RepoId)
@@ -354,7 +570,7 @@ func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, in

func DownloadMultiModelFile(ctx *context.Context) {
log.Info("DownloadMultiModelFile start.")
id := ctx.Query("ID")
id := ctx.Query("id")
log.Info("id=" + id)
task, err := models.QueryModelById(id)
if err != nil {
@@ -487,7 +703,10 @@ func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *con

func QueryTrainJobVersionList(ctx *context.Context) {
log.Info("query train job version list. start.")
JobID := ctx.Query("JobID")
JobID := ctx.Query("jobId")
if JobID == "" {
JobID = ctx.Query("JobId")
}

VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)

@@ -515,20 +734,33 @@ func QueryTrainJobList(ctx *context.Context) {

}

func QueryTrainModelList(ctx *context.Context) {
log.Info("query train job list. start.")
jobName := ctx.Query("jobName")
taskType := ctx.QueryInt("type")
VersionName := ctx.Query("VersionName")
func QueryTrainModelFileById(ctx *context.Context) ([]storage.FileInfo, error) {
JobID := ctx.Query("jobId")
VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)
if err == nil {
if count == 1 {
task := VersionListTasks[0]
jobName := task.JobName
taskType := task.Type
VersionName := task.VersionName
modelDbResult, err := getModelFromObjectSave(jobName, taskType, VersionName)
return modelDbResult, err
}
}
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
return nil, errors.New("Not found task.")
}

func getModelFromObjectSave(jobName string, taskType int, VersionName string) ([]storage.FileInfo, error) {
if taskType == models.TypeCloudBrainTwo {
objectkey := path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, VersionName) + "/"
modelDbResult, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, objectkey)
log.Info("bucket=" + setting.Bucket + " objectkey=" + objectkey)
if err != nil {
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
return nil, err
} else {
ctx.JSON(200, modelDbResult)
return
return modelDbResult, nil
}
} else if taskType == models.TypeCloudBrainOne {
modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/"
@@ -536,12 +768,30 @@ func QueryTrainModelList(ctx *context.Context) {
modelDbResult, err := storage.GetAllObjectByBucketAndPrefixMinio(bucketName, modelSrcPrefix)
if err != nil {
log.Info("get TypeCloudBrainOne TrainJobListModel failed:", err)
return nil, err
} else {
ctx.JSON(200, modelDbResult)
return
return modelDbResult, nil
}
}
ctx.JSON(200, "")
return nil, errors.New("Not support.")
}

func QueryTrainModelList(ctx *context.Context) {
log.Info("query train job list. start.")
jobName := ctx.Query("jobName")
taskType := ctx.QueryInt("type")
VersionName := ctx.Query("versionName")
if VersionName == "" {
VersionName = ctx.Query("VersionName")
}
modelDbResult, err := getModelFromObjectSave(jobName, taskType, VersionName)
if err != nil {
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
ctx.JSON(200, "")
} else {
ctx.JSON(200, modelDbResult)
return
}
}

func DownloadSingleModelFile(ctx *context.Context) {
@@ -612,7 +862,7 @@ func DownloadSingleModelFile(ctx *context.Context) {
}

func ShowModelInfo(ctx *context.Context) {
ctx.Data["ID"] = ctx.Query("ID")
ctx.Data["ID"] = ctx.Query("id")
ctx.Data["name"] = ctx.Query("name")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)
@@ -620,6 +870,19 @@ func ShowModelInfo(ctx *context.Context) {
ctx.HTML(200, tplModelInfo)
}

func QueryModelById(ctx *context.Context) {
id := ctx.Query("id")
model, err := models.QueryModelById(id)
if err == nil {
model.IsCanOper = isOper(ctx, model.UserId)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
removeIpInfo(model)
ctx.JSON(http.StatusOK, model)
} else {
ctx.JSON(http.StatusNotFound, nil)
}
}

func ShowSingleModel(ctx *context.Context) {
name := ctx.Query("name")

@@ -828,30 +1091,59 @@ func ModifyModel(id string, description string) error {

func ModifyModelInfo(ctx *context.Context) {
log.Info("modify model start.")
id := ctx.Query("ID")
description := ctx.Query("Description")

id := ctx.Query("id")
re := map[string]string{
"code": "-1",
}
task, err := models.QueryModelById(id)
if err != nil {
re["msg"] = err.Error()
log.Error("no such model!", err.Error())
ctx.ServerError("no such model:", err)
ctx.JSON(200, re)
return
}
if !isOper(ctx, task.UserId) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
//ctx.ServerError("no right.", errors.New(ctx.Tr("repo.model_noright")))
re["msg"] = "No right to operation."
ctx.JSON(200, re)
return
}
if task.ModelType == MODEL_LOCAL_TYPE {
name := ctx.Query("name")
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
aimodels := models.QueryModelByName(name, task.RepoId)
if aimodels != nil && len(aimodels) > 0 {
if len(aimodels) == 1 {
if aimodels[0].ID != task.ID {
re["msg"] = ctx.Tr("repo.model.manage.create_error")
ctx.JSON(200, re)
return
}
} else {
re["msg"] = ctx.Tr("repo.model.manage.create_error")
ctx.JSON(200, re)
return
}
}
err = models.ModifyLocalModel(id, name, label, description, engine)

err = ModifyModel(id, description)
} else {
label := ctx.Query("label")
description := ctx.Query("description")
engine := task.Engine
name := task.Name
err = models.ModifyLocalModel(id, name, label, description, int(engine))
}

if err != nil {
log.Info("modify error," + err.Error())
ctx.ServerError("error.", err)
re["msg"] = err.Error()
ctx.JSON(200, re)
return
} else {
ctx.JSON(200, "success")
re["code"] = "0"
ctx.JSON(200, re)
}

}

func QueryModelListForPredict(ctx *context.Context) {
@@ -894,28 +1186,36 @@ func QueryModelListForPredict(ctx *context.Context) {
}

func QueryModelFileForPredict(ctx *context.Context) {
id := ctx.Query("ID")
id := ctx.Query("id")
if id == "" {
id = ctx.Query("ID")
}
ctx.JSON(http.StatusOK, QueryModelFileByID(id))
}

func QueryModelFileByID(id string) []storage.FileInfo {
model, err := models.QueryModelById(id)
if err == nil {
if model.Type == models.TypeCloudBrainTwo {
prefix := model.Path[len(setting.Bucket)+1:]
fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
ctx.JSON(http.StatusOK, fileinfos)
return fileinfos
} else if model.Type == models.TypeCloudBrainOne {
prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
ctx.JSON(http.StatusOK, fileinfos)
return fileinfos
}
} else {
log.Error("no such model!", err.Error())
ctx.ServerError("no such model:", err)
return
}
return nil
}

func QueryOneLevelModelFile(ctx *context.Context) {
id := ctx.Query("ID")
id := ctx.Query("id")
if id == "" {
id = ctx.Query("ID")
}
parentDir := ctx.Query("parentDir")
model, err := models.QueryModelById(id)
if err != nil {
@@ -941,3 +1241,25 @@ func QueryOneLevelModelFile(ctx *context.Context) {
ctx.JSON(http.StatusOK, fileinfos)
}
}

func CreateLocalModel(ctx *context.Context) {
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

ctx.HTML(200, tplCreateLocalModelInfo)
}

func CreateLocalModelForUpload(ctx *context.Context) {
ctx.Data["uuid"] = ctx.Query("uuid")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)
ctx.Data["max_model_size"] = setting.MaxModelSize * MODEL_MAX_SIZE
ctx.HTML(200, tplCreateLocalForUploadModelInfo)
}

func CreateOnlineModel(ctx *context.Context) {
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

ctx.HTML(200, tplCreateOnlineModelInfo)
}

+ 2
- 2
routers/repo/aisafety.go View File

@@ -804,7 +804,7 @@ func createForNPU(ctx *context.Context, jobName string) error {
JobType: string(models.JobTypeModelSafety),
}

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
return err
@@ -901,7 +901,7 @@ func createForGPU(ctx *context.Context, jobName string) error {
LabelName: evaluationIndex,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
return err
}


+ 18
- 17
routers/repo/attachment.go View File

@@ -11,6 +11,7 @@ import (
"fmt"
"mime/multipart"
"net/http"
"path"
"strconv"
"strings"

@@ -311,7 +312,8 @@ func GetAttachment(ctx *context.Context) {
url = setting.PROXYURL + "/obs_proxy_download?uuid=" + attach.UUID + "&file_name=" + attach.Name
log.Info("return url=" + url)
} else {
url, err = storage.ObsGetPreSignedUrl(attach.UUID, attach.Name)
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(attach.UUID[0:1], attach.UUID[1:2], attach.UUID, attach.Name)), "/")
url, err = storage.ObsGetPreSignedUrl(objectName, attach.Name)
if err != nil {
ctx.ServerError("ObsGetPreSignedUrl", err)
return
@@ -415,7 +417,7 @@ func AddAttachment(ctx *context.Context) {
uuid := ctx.Query("uuid")
has := false
if typeCloudBrain == models.TypeCloudBrainOne {
has, err = storage.Attachments.HasObject(models.AttachmentRelativePath(uuid))
has, err = storage.Attachments.HasObject(setting.Attachment.Minio.BasePath + models.AttachmentRelativePath(uuid))
if err != nil {
ctx.ServerError("HasObject", err)
return
@@ -557,7 +559,7 @@ func GetSuccessChunks(ctx *context.Context) {

isExist := false
if typeCloudBrain == models.TypeCloudBrainOne {
isExist, err = storage.Attachments.HasObject(models.AttachmentRelativePath(fileChunk.UUID))
isExist, err = storage.Attachments.HasObject(setting.Attachment.Minio.BasePath + models.AttachmentRelativePath(fileChunk.UUID))
if err != nil {
ctx.ServerError("HasObject failed", err)
return
@@ -593,12 +595,12 @@ func GetSuccessChunks(ctx *context.Context) {
}

if typeCloudBrain == models.TypeCloudBrainOne {
chunks, err = storage.GetPartInfos(fileChunk.UUID, fileChunk.UploadID)
chunks, err = storage.GetPartInfos(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID)), "/"), fileChunk.UploadID)
if err != nil {
log.Error("GetPartInfos failed:%v", err.Error())
}
} else {
chunks, err = storage.GetObsPartInfos(fileChunk.UUID, fileChunk.UploadID, fileName)
chunks, err = storage.GetObsPartInfos(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID, fileName)), "/"), fileChunk.UploadID)
if err != nil {
log.Error("GetObsPartInfos failed:%v", err.Error())
}
@@ -699,13 +701,13 @@ func NewMultipart(ctx *context.Context) {
uuid := gouuid.NewV4().String()
var uploadID string
if typeCloudBrain == models.TypeCloudBrainOne {
uploadID, err = storage.NewMultiPartUpload(uuid)
uploadID, err = storage.NewMultiPartUpload(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/"))
if err != nil {
ctx.ServerError("NewMultipart", err)
return
}
} else {
uploadID, err = storage.NewObsMultiPartUpload(uuid, fileName)
uploadID, err = storage.NewObsMultiPartUpload(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/"))
if err != nil {
ctx.ServerError("NewObsMultiPartUpload", err)
return
@@ -749,8 +751,8 @@ func PutOBSProxyUpload(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("FormFile: %v", RequestBody))
return
}
err := storage.ObsMultiPartUpload(uuid, uploadID, partNumber, fileName, RequestBody.ReadCloser())
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
err := storage.ObsMultiPartUpload(objectName, uploadID, partNumber, fileName, RequestBody.ReadCloser())
if err != nil {
log.Info("upload error.")
}
@@ -759,8 +761,8 @@ func PutOBSProxyUpload(ctx *context.Context) {
func GetOBSProxyDownload(ctx *context.Context) {
uuid := ctx.Query("uuid")
fileName := ctx.Query("file_name")
body, err := storage.ObsDownload(uuid, fileName)
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
body, err := storage.ObsDownloadAFile(setting.Bucket, objectName)
if err != nil {
log.Info("upload error.")
} else {
@@ -805,7 +807,7 @@ func GetMultipartUploadUrl(ctx *context.Context) {
return
}

url, err = storage.GenMultiPartSignedUrl(uuid, uploadID, partNumber, size)
url, err = storage.GenMultiPartSignedUrl(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/"), uploadID, partNumber, size)
if err != nil {
ctx.Error(500, fmt.Sprintf("GenMultiPartSignedUrl failed: %v", err))
return
@@ -815,7 +817,7 @@ func GetMultipartUploadUrl(ctx *context.Context) {
url = setting.PROXYURL + "/obs_proxy_multipart?uuid=" + uuid + "&uploadId=" + uploadID + "&partNumber=" + fmt.Sprint(partNumber) + "&file_name=" + fileName
log.Info("return url=" + url)
} else {
url, err = storage.ObsGenMultiPartSignedUrl(uuid, uploadID, partNumber, fileName)
url, err = storage.ObsGenMultiPartSignedUrl(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/"), uploadID, partNumber)
if err != nil {
ctx.Error(500, fmt.Sprintf("ObsGenMultiPartSignedUrl failed: %v", err))
return
@@ -823,7 +825,6 @@ func GetMultipartUploadUrl(ctx *context.Context) {
log.Info("url=" + url)
}
}

ctx.JSON(200, map[string]string{
"url": url,
})
@@ -855,13 +856,13 @@ func CompleteMultipart(ctx *context.Context) {
}

if typeCloudBrain == models.TypeCloudBrainOne {
_, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks)
_, err = storage.CompleteMultiPartUpload(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID)), "/"), uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err))
return
}
} else {
err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks)
err = storage.CompleteObsMultiPartUpload(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID, fileName)), "/"), uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err))
return
@@ -1013,7 +1014,7 @@ func queryDatasets(ctx *context.Context, attachs []*models.AttachmentUsername) {
}

for _, attch := range attachs {
has, err := storage.Attachments.HasObject(models.AttachmentRelativePath(attch.UUID))
has, err := storage.Attachments.HasObject(setting.Attachment.Minio.BasePath + models.AttachmentRelativePath(attch.UUID))
if err != nil || !has {
continue
}


+ 323
- 0
routers/repo/attachment_model.go View File

@@ -0,0 +1,323 @@
package repo

import (
"fmt"
"path"
"strconv"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/minio_ext"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/upload"
gouuid "github.com/satori/go.uuid"
)

func GetModelChunks(ctx *context.Context) {
fileMD5 := ctx.Query("md5")
typeCloudBrain := ctx.QueryInt("type")
fileName := ctx.Query("file_name")
scene := ctx.Query("scene")
modeluuid := ctx.Query("modeluuid")
log.Info("scene=" + scene + " typeCloudBrain=" + fmt.Sprint(typeCloudBrain))
var chunks string

err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}

fileChunk, err := models.GetModelFileChunkByMD5AndUser(fileMD5, ctx.User.ID, typeCloudBrain, modeluuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.JSON(200, map[string]string{
"uuid": "",
"uploaded": "0",
"uploadID": "",
"chunks": "",
})
} else {
ctx.ServerError("GetFileChunkByMD5", err)
}
return
}

isExist := false
if typeCloudBrain == models.TypeCloudBrainOne {
isExist, err = storage.Attachments.HasObject(fileChunk.ObjectName)
if isExist {
log.Info("The file is exist in minio. has uploaded.path=" + fileChunk.ObjectName)
} else {
log.Info("The file is not exist in minio..")
}
if err != nil {
ctx.ServerError("HasObject failed", err)
return
}
} else {
isExist, err = storage.ObsHasObject(fileChunk.ObjectName)
if isExist {
log.Info("The file is exist in obs. has uploaded. path=" + fileChunk.ObjectName)
} else {
log.Info("The file is not exist in obs.")
}
if err != nil {
ctx.ServerError("ObsHasObject failed", err)
return
}
}

if isExist {
if fileChunk.IsUploaded == models.FileNotUploaded {
log.Info("the file has been uploaded but not recorded")
fileChunk.IsUploaded = models.FileUploaded
if err = models.UpdateModelFileChunk(fileChunk); err != nil {
log.Error("UpdateFileChunk failed:", err.Error())
}
}
modelname := ""
model, err := models.QueryModelById(modeluuid)
if err == nil && model != nil {
modelname = model.Name
}
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": "0",
"modeluuid": modeluuid,
"fileName": fileName,
"modelName": modelname,
})
} else {
if fileChunk.IsUploaded == models.FileUploaded {
log.Info("the file has been recorded but not uploaded")
fileChunk.IsUploaded = models.FileNotUploaded
if err = models.UpdateModelFileChunk(fileChunk); err != nil {
log.Error("UpdateFileChunk failed:", err.Error())
}
}

if typeCloudBrain == models.TypeCloudBrainOne {
chunks, err = storage.GetPartInfos(fileChunk.ObjectName, fileChunk.UploadID)
if err != nil {
log.Error("GetPartInfos failed:%v", err.Error())
}
} else {
chunks, err = storage.GetObsPartInfos(fileChunk.ObjectName, fileChunk.UploadID)
if err != nil {
log.Error("GetObsPartInfos failed:%v", err.Error())
}
}
if err != nil {
models.DeleteModelFileChunk(fileChunk)
ctx.JSON(200, map[string]string{
"uuid": "",
"uploaded": "0",
"uploadID": "",
"chunks": "",
})
} else {
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": "0",
"datasetID": "0",
"fileName": "",
"datasetName": "",
})
}
}
}

func getObjectName(filename string, modeluuid string) string {
return strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, filename)), "/")
}

func NewModelMultipart(ctx *context.Context) {
if !setting.Attachment.Enabled {
ctx.Error(404, "attachment is not enabled")
return
}
fileName := ctx.Query("file_name")
modeluuid := ctx.Query("modeluuid")

err := upload.VerifyFileType(ctx.Query("fileType"), strings.Split(setting.Attachment.AllowedTypes, ","))
if err != nil {
ctx.Error(400, err.Error())
return
}

typeCloudBrain := ctx.QueryInt("type")
err = checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}

if setting.Attachment.StoreType == storage.MinioStorageType {
totalChunkCounts := ctx.QueryInt("totalChunkCounts")
if totalChunkCounts > minio_ext.MaxPartsCount {
ctx.Error(400, fmt.Sprintf("chunk counts(%d) is too much", totalChunkCounts))
return
}

fileSize := ctx.QueryInt64("size")
if fileSize > minio_ext.MaxMultipartPutObjectSize {
ctx.Error(400, fmt.Sprintf("file size(%d) is too big", fileSize))
return
}

uuid := gouuid.NewV4().String()
var uploadID string
var objectName string
if typeCloudBrain == models.TypeCloudBrainOne {
objectName = strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, fileName)), "/")
uploadID, err = storage.NewMultiPartUpload(objectName)
if err != nil {
ctx.ServerError("NewMultipart", err)
return
}
} else {

objectName = strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, fileName)), "/")
uploadID, err = storage.NewObsMultiPartUpload(objectName)
if err != nil {
ctx.ServerError("NewObsMultiPartUpload", err)
return
}
}

_, err = models.InsertModelFileChunk(&models.ModelFileChunk{
UUID: uuid,
UserID: ctx.User.ID,
UploadID: uploadID,
Md5: ctx.Query("md5"),
Size: fileSize,
ObjectName: objectName,
ModelUUID: modeluuid,
TotalChunks: totalChunkCounts,
Type: typeCloudBrain,
})

if err != nil {
ctx.Error(500, fmt.Sprintf("InsertFileChunk: %v", err))
return
}

ctx.JSON(200, map[string]string{
"uuid": uuid,
"uploadID": uploadID,
})
} else {
ctx.Error(404, "storage type is not enabled")
return
}
}

func GetModelMultipartUploadUrl(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
partNumber := ctx.QueryInt("chunkNumber")
size := ctx.QueryInt64("size")
typeCloudBrain := ctx.QueryInt("type")
err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}
fileChunk, err := models.GetModelFileChunkByUUID(uuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.Error(404)
} else {
ctx.ServerError("GetFileChunkByUUID", err)
}
return
}
url := ""
if typeCloudBrain == models.TypeCloudBrainOne {
if size > minio_ext.MinPartSize {
ctx.Error(400, fmt.Sprintf("chunk size(%d) is too big", size))
return
}
url, err = storage.GenMultiPartSignedUrl(fileChunk.ObjectName, uploadID, partNumber, size)
if err != nil {
ctx.Error(500, fmt.Sprintf("GenMultiPartSignedUrl failed: %v", err))
return
}
} else {
url, err = storage.ObsGenMultiPartSignedUrl(fileChunk.ObjectName, uploadID, partNumber)
if err != nil {
ctx.Error(500, fmt.Sprintf("ObsGenMultiPartSignedUrl failed: %v", err))
return
}
log.Info("url=" + url)

}

ctx.JSON(200, map[string]string{
"url": url,
})
}

func CompleteModelMultipart(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
typeCloudBrain := ctx.QueryInt("type")
modeluuid := ctx.Query("modeluuid")
log.Warn("uuid:" + uuid)
log.Warn("modeluuid:" + modeluuid)
log.Warn("typeCloudBrain:" + strconv.Itoa(typeCloudBrain))

err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}
fileChunk, err := models.GetModelFileChunkByUUID(uuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.Error(404)
} else {
ctx.ServerError("GetFileChunkByUUID", err)
}
return
}

if typeCloudBrain == models.TypeCloudBrainOne {
_, err = storage.CompleteMultiPartUpload(fileChunk.ObjectName, uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err))
return
}
} else {
err = storage.CompleteObsMultiPartUpload(fileChunk.ObjectName, uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err))
return
}
}

fileChunk.IsUploaded = models.FileUploaded

err = models.UpdateModelFileChunk(fileChunk)
if err != nil {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}
//更新模型大小信息
UpdateModelSize(modeluuid)

ctx.JSON(200, map[string]string{
"result_code": "0",
})

}

+ 26
- 47
routers/repo/cloudbrain.go View File

@@ -2,7 +2,6 @@ package repo

import (
"bufio"
"code.gitea.io/gitea/modules/urfs_client/urchin"
"encoding/json"
"errors"
"fmt"
@@ -16,6 +15,8 @@ import (
"time"
"unicode/utf8"

"code.gitea.io/gitea/modules/urfs_client/urchin"

"code.gitea.io/gitea/modules/dataset"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"
@@ -398,7 +399,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {

}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -584,7 +585,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -1845,59 +1846,37 @@ func SyncCloudbrainStatus() {
continue
}
if task.Type == models.TypeCloudBrainOne {
result, err := cloudbrain.GetJob(task.JobID)

task, err = cloudbrainTask.SyncCloudBrainOneStatus(task)
if err != nil {
log.Error("GetJob(%s) failed:%v", task.JobName, err)
log.Error("Sync cloud brain one (%s) failed:%v", task.JobName, err)
continue
}

if result != nil {
jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
taskRoles := jobRes.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
oldStatus := task.Status
task.Status = taskRes.TaskStatuses[0].State
if task.Status != string(models.JobWaiting) {
models.ParseAndSetDurationFromCloudBrainOne(jobRes, task)
if task.Status != string(models.JobWaiting) {
if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) {
log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
err = cloudbrain.StopJob(task.JobID)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
oldStatus := task.Status
task.Status = string(models.JobStopped)
if task.EndTime == 0 {
task.EndTime = timeutil.TimeStampNow()
}
task.ComputeAndSetDuration()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
}

var maxDuration int64
if task.JobType == string(models.JobTypeBenchmark) {
maxDuration = setting.BenchmarkMaxDuration
} else if task.JobType == string(models.JobTypeSnn4imagenet) || task.JobType == string(models.JobTypeBrainScore) {
maxDuration = setting.ModelBenchmarkMaxDuration
} else {
maxDuration = setting.MaxDuration
}

if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) {
log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
err = cloudbrain.StopJob(task.JobID)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
task.Status = string(models.JobStopped)
if task.EndTime == 0 {
task.EndTime = timeutil.TimeStampNow()
}
task.ComputeAndSetDuration()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
}

}
} else if task.Type == models.TypeCloudBrainTwo {
if task.JobType == string(models.JobTypeDebug) {
@@ -2509,7 +2488,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tplCloudBrainBenchmarkNew, &form)
@@ -2663,7 +2642,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)


+ 160
- 62
routers/repo/cloudbrain_statistic.go View File

@@ -1,38 +1,87 @@
package repo

import (
"net/http"
"strings"
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
)

func CloudbrainDurationStatisticHour() {

dateTime := time.Now().Format("2006-01-02 15:04:05")
dayTime := time.Now().Format("2006-01-02")
var statisticTime time.Time
var count int64
recordBeginTime, _ := time.ParseInLocation("2006-01-02 15:04:05", setting.Grampus.UsageRateBeginTime, time.Local)
recordDurationUpdateTime, err := models.GetDurationRecordUpdateTime()
if err != nil {
log.Error("Can not get GetDurationRecordBeginTime", err)
return
}
if recordDurationUpdateTime == nil {
statisticTime = recordBeginTime
} else {
statisticTime = time.Unix(int64(recordDurationUpdateTime[0].DateTime), 0)
}
now := time.Now()
currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location())

for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) {
countEach := summaryDurationStat(statisticTime)
count += countEach
statisticTime = statisticTime.Add(+1 * time.Hour)
}
log.Info("summaryDurationStat count: %v", count)
}
func UpdateDurationStatisticHistoryData() int64 {
var count int64
recordBeginTime, _ := time.ParseInLocation("2006-01-02 15:04:05", setting.Grampus.UsageRateBeginTime, time.Local)
now := time.Now()
currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location())
statisticTime := recordBeginTime.Add(+1 * time.Hour)

for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) {
countEach := summaryDurationStat(statisticTime)
count += countEach
statisticTime = statisticTime.Add(+1 * time.Hour)
}
return count
}

m, _ := time.ParseDuration("-1h")
beginTime := currentTime.Add(m).Unix()
endTime := currentTime.Unix()
hourTime := currentTime.Add(m).Hour()
//statisticTime是当前的时辰,比如当前是2019-01-01 12:01:01,那么statisticTime就是2019-01-01 12:00:00
func summaryDurationStat(statisticTime time.Time) int64 {
var count int64
dateTime := timeutil.TimeStamp(statisticTime.Add(-1 * time.Hour).Unix())
beginTime := statisticTime.Add(-1 * time.Hour).Unix()
dayTime := statisticTime.Add(-1 * time.Hour).Format("2006-01-02")
hourTime := statisticTime.Add(-1 * time.Hour).Hour()
endTime := statisticTime.Unix()

ciTasks, err := models.GetCloudbrainByTime(beginTime, endTime)
if err != nil {
log.Info("GetCloudbrainByTime err: %v", err)
return
return 0
}
specMap := make(map[string]*models.Specification)
cloudbrainMap := make(map[string]*models.Cloudbrain)
models.LoadSpecs4CloudbrainInfo(ciTasks)

for _, cloudbrain := range ciTasks {
if _, ok := specMap[cloudbrain.Cloudbrain.Spec.AiCenterCode+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
if cloudbrain.Cloudbrain.Spec != nil {
specMap[cloudbrain.Cloudbrain.Spec.AiCenterCode+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = cloudbrain.Cloudbrain.Spec
if cloudbrain.Cloudbrain.StartTime == 0 {
cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix
}
if cloudbrain.Cloudbrain.EndTime == 0 {
cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.UpdatedUnix
}
cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
if cloudbrain.Cloudbrain.Spec != nil {
if _, ok := cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
if cloudbrain.Cloudbrain.Spec != nil {
cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = &cloudbrain.Cloudbrain
}
}
}
}
@@ -42,69 +91,96 @@ func CloudbrainDurationStatisticHour() {
resourceQueues, err := models.GetCanUseCardInfo()
if err != nil {
log.Info("GetCanUseCardInfo err: %v", err)
return
return 0
}

cardsTotalDurationMap := make(map[string]int)
for _, resourceQueue := range resourceQueues {
cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterName+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType+"/"+resourceQueue.ComputeResource] = resourceQueue.CardsTotalNum * 1 * 60 * 60
if _, ok := cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType]; !ok {
cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType] = resourceQueue.CardsTotalNum * 1 * 60 * 60
} else {
cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType] += resourceQueue.CardsTotalNum * 1 * 60 * 60
}
}

for centerCode, CardTypeInfo := range cloudBrainCenterCodeAndCardTypeInfo {
for cardType, cardDuration := range CardTypeInfo {
spec := specMap[centerCode+"/"+cardType]
if spec != nil {
if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, centerCode, cardType); err != nil {
log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error())
return
for centerCode, CardTypes := range cloudBrainCenterCodeAndCardTypeInfo {
for cardType, cardDuration := range CardTypes {
cloudbrainTable := cloudbrainMap[centerCode+"/"+cardType]
if cloudbrainTable != nil {
if _, err := models.GetDurationStatisticByDate(dayTime, hourTime, centerCode, cardType); err == nil {
if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, centerCode, cardType); err != nil {
log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error())
return 0
}
}
if _, ok := cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource]; !ok {
cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource] = 0

if _, ok := cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType]; !ok {
cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType] = 0
}
cloudbrainDurationStat := models.CloudbrainDurationStatistic{
DateTime: dateTime,
DayTime: dayTime,
HourTime: hourTime,
Cluster: spec.Cluster,
AiCenterName: spec.AiCenterName,
Cluster: cloudbrainTable.Cluster,
AiCenterName: GetAiCenterNameByCode(centerCode, "zh-CN"),
AiCenterCode: centerCode,
AccCardType: cardType,
ComputeResource: spec.ComputeResource,
CardsUseDuration: cardDuration,
CardsTotalDuration: cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource],
CardsTotalDuration: cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType],
CreatedUnix: timeutil.TimeStampNow(),
}
if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
}
delete(cardsTotalDurationMap, spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource)
count++
delete(cardsTotalDurationMap, cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType)
}
}
}

for key, cardsTotalDuration := range cardsTotalDurationMap {
if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, strings.Split(key, "/")[2], strings.Split(key, "/")[3]); err != nil {
log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error())
return
if _, err := models.GetDurationStatisticByDate(dayTime, hourTime, strings.Split(key, "/")[1], strings.Split(key, "/")[2]); err == nil {
if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, strings.Split(key, "/")[1], strings.Split(key, "/")[2]); err != nil {
log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error())
return 0
}
}
cloudbrainDurationStat := models.CloudbrainDurationStatistic{
DateTime: dateTime,
DayTime: dayTime,
HourTime: hourTime,
Cluster: strings.Split(key, "/")[0],
AiCenterName: strings.Split(key, "/")[1],
AiCenterCode: strings.Split(key, "/")[2],
AccCardType: strings.Split(key, "/")[3],
ComputeResource: strings.Split(key, "/")[4],
AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
AiCenterCode: strings.Split(key, "/")[1],
AccCardType: strings.Split(key, "/")[2],
CardsUseDuration: 0,
CardsTotalDuration: cardsTotalDuration,
CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
CreatedUnix: timeutil.TimeStampNow(),
}
if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
}
count++
}

log.Info("finish summary cloudbrainDurationStat")
return count
}

func GetAiCenterNameByCode(centerCode string, language string) string {
var aiCenterName string
aiCenterInfo := cloudbrainService.GetAiCenterInfoByCenterCode(centerCode)
if aiCenterInfo != nil {
if language == "zh-CN" {
aiCenterName = aiCenterInfo.Content
} else {
aiCenterName = aiCenterInfo.ContentEN
}
} else {
aiCenterName = centerCode
}
return aiCenterName
}

func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) map[string]map[string]int {
@@ -112,7 +188,7 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be
var AccCardsNum int
cloudBrainCenterCodeAndCardType := make(map[string]map[string]int)
for _, cloudbrain := range ciTasks {
cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
if cloudbrain.Cloudbrain.StartTime == 0 {
cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix
}
@@ -129,41 +205,63 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be
} else {
AccCardsNum = cloudbrain.Cloudbrain.Spec.AccCardsNum
}
if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode]; !ok {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode] = make(map[string]int)
if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter]; !ok {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter] = make(map[string]int)
}

if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) {
if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
if cloudbrain.Cloudbrain.Spec != nil {
if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) {
if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
} else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
} else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = 0
}
} else {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
} else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
} else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += 0
}
}
} else {
if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
} else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
} else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
} else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
}
} else {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
} else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
} else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
} else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
}
}
}
} else {
if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
} else {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
}
} else {
if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
} else {
cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
}
}

}
}

return cloudBrainCenterCodeAndCardType
}

func CloudbrainUpdateHistoryData(ctx *context.Context) {
err := models.DeleteCloudbrainDurationStatistic()
count := UpdateDurationStatisticHistoryData()
ctx.JSON(http.StatusOK, map[string]interface{}{
"message": 0,
"count": count,
"err": err,
})
}

+ 2
- 318
routers/repo/dataset.go View File

@@ -47,8 +47,8 @@ func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment
permission := false
if !permission && ctx.User != nil {
isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
isInRepoTeam,_:=repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator ||isInRepoTeam {
isInRepoTeam, _ := repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator || isInRepoTeam {
log.Info("Collaborator user may visit the attach.")
permission = true
}
@@ -349,96 +349,6 @@ func DatasetAction(ctx *context.Context) {

}

func CurrentRepoDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

repo := ctx.Repo.Repository
var datasetIDs []int64
dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
return
}
datasetIDs = append(datasetIDs, dataset.ID)
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: datasetIDs,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyDatasets(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
keyword := strings.Trim(ctx.Query("q"), " ")
@@ -593,180 +503,6 @@ func ReferenceDatasetData(ctx *context.Context) {

}

func PublicDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyFavoriteDataset(ctx *context.Context) {
UserId := ctx.User.ID
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
var NotColDatasetIDs []int64
var IsColDatasetIDs []int64
datasetStars, err := models.GetDatasetStarByUser(ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
log.Error("GetDatasetStarByUser failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
//If the dataset has been deleted, it will not be counted
for _, datasetStar := range datasetStars {
IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar)
if err != nil {
log.Error("IsDatasetStarExist error:", err.Error())
}
if IsExist {
DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset)
if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator {
IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID)
} else {
NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID)
}
}
}

NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: NotColDatasetIDs,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
//If is collaborator, there is no need to determine whether the dataset is private or public
IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: IsColDatasetIDs,
NeedIsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
for _, NotColDataset := range NotColDatasets {
IsColDatasets = append(IsColDatasets, NotColDataset)
}
datasets := IsColDatasets
count := NotColcount + IsColcount
sort.Slice(datasets, func(i, j int) bool {
return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix
})

page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pagesize := ctx.QueryInt("pagesize")
if pagesize <= 0 {
pagesize = 5
}
pageDatasetsInfo := getPageDatasets(datasets, page, pagesize)
if pageDatasetsInfo == nil {
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": "[]",
"count": strconv.FormatInt(count, 10),
})
return
}
data, err := json.Marshal(pageDatasetsInfo)
log.Info("data:", data)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})

}
func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo {
begin := (page - 1) * pagesize
end := (page) * pagesize

if begin > len(AttachmentInfos)-1 {
return nil
}
if end > len(AttachmentInfos)-1 {
return AttachmentInfos[begin:]
} else {
return AttachmentInfos[begin:end]
}

}
func getTotalPage(total int64, pageSize int) int {

another := 0
if int(total)%pageSize != 0 {
another = 1
}
return int(total)/pageSize + another

}

func GetDatasetStatus(ctx *context.Context) {

var (
@@ -791,55 +527,3 @@ func GetDatasetStatus(ctx *context.Context) {
"AttachmentStatus": fmt.Sprint(attachment.DecompressState),
})
}
func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("query repo error:", err.Error())
} else {
repo.GetOwner()
if ctx.User != nil {
if repo.Owner.IsOrganization() {
org := repo.Owner
org.Teams, err = org.GetUserTeams(ctx.User.ID)
if err != nil {
log.Error("GetUserTeams error:", err.Error())
return false
}
if org.IsUserPartOfOrg(ctx.User.ID) {
for _, t := range org.Teams {
if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) {
return true
}
}
isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID)
if isOwner {
return isOwner
}
return false
}
}

isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
if isCollaborator {
return true
}
}
}

return false
}
func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) {
dataset, err := models.GetDatasetByID(datasetStar.DatasetID)
if err != nil {
log.Error("query dataset error:", err.Error())
return false, nil, nil, err
} else {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("GetRepositoryByID error:", err.Error())
return false, nil, nil, err
}
return true, repo, dataset, nil
}

}

+ 73
- 62
routers/repo/grampus.go View File

@@ -1,7 +1,6 @@
package repo

import (
"code.gitea.io/gitea/modules/urfs_client/urchin"
"encoding/json"
"errors"
"fmt"
@@ -13,6 +12,9 @@ import (
"strings"
"time"

"code.gitea.io/gitea/modules/urfs_client/urchin"
"code.gitea.io/gitea/routers/response"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

"code.gitea.io/gitea/modules/dataset"
@@ -474,7 +476,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain

}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -509,28 +511,6 @@ func GrampusTrainJobVersionCreate(ctx *context.Context, form auth.CreateGrampusT

}

func checkSpecialPool(ctx *context.Context, resourceType string) string {
grampus.InitSpecialPool()
if grampus.SpecialPools != nil {
for _, pool := range grampus.SpecialPools.Pools {

if pool.IsExclusive && pool.Type == resourceType {

org, _ := models.GetOrgByName(pool.Org)
if org != nil {
isOrgMember, _ := models.IsOrganizationMember(org.ID, ctx.User.ID)
if !isOrgMember {
return ctx.Tr("repo.grampus.no_operate_right")
}
}
}

}

}
return ""
}

func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = true
grampusTrainJobNpuCreate(ctx, form)
@@ -733,7 +713,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
req.PreTrainModelPath = preTrainModelPath
}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
@@ -885,10 +865,10 @@ func GrampusTrainJobShow(ctx *context.Context) {
}
}
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
}
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
}
}
}
@@ -960,15 +940,14 @@ func GrampusGetLog(ctx *context.Context) {
content, err := grampus.GetTrainJobLog(job.JobID)
if err != nil {
log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobName": job.JobName,
"Content": "",
"CanLogDownload": false,
})
return
}
var canLogDownload bool
if err != nil {
canLogDownload = false
} else {
canLogDownload = true
}
canLogDownload := err == nil && job.IsUserHasRight(ctx.User)
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobName": job.JobName,
"Content": content,
@@ -978,6 +957,28 @@ func GrampusGetLog(ctx *context.Context) {
return
}

func GrampusMetrics(ctx *context.Context) {
jobID := ctx.Params(":jobid")
job, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}

result, err := grampus.GetGrampusMetrics(job.JobID)
if err != nil {
log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"])
}
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobID": jobID,
"Interval": result.Interval,
"MetricsInfo": result.MetricsInfo,
})

return
}

func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) {
var command string

@@ -1003,7 +1004,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
if processorType == grampus.ProcessorTypeNPU {
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
unZipDatasetCommand := cloudbrainTask.GenerateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}
@@ -1077,31 +1078,6 @@ func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileNa
return commandDownloadTemp
}

func generateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
if len(datasetNameArray) == 1 { //单数据集
unZipDatasetCommand = "unzip -q '" + datasetName + "';"
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
if strings.HasSuffix(datasetNameTemp, ".tar.gz") {
unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';"
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}
return unZipDatasetCommand
}

func downloadZipCode(ctx *context.Context, codePath, branchName string) error {
archiveType := git.ZIP
archivePath := codePath
@@ -1149,3 +1125,38 @@ func downloadZipCode(ctx *context.Context, codePath, branchName string) error {

return nil
}
func HandleTaskWithAiCenter(ctx *context.Context) {
log.Info("HandleTaskWithAiCenter start")
updateCounts := 0
cloudBrains, err := models.GetC2NetWithAiCenterWrongJob()
if err != nil {
log.Error("GetC2NetWithAiCenterWrongJob failed:" + err.Error())
return
}
if len(cloudBrains) == 0 {
log.Info("HandleC2NetWithAiCenterWrongJob:no task need handle")
return
}
cloudBrainCounts := len(cloudBrains)
for _, task := range cloudBrains {
result, err := grampus.GetJob(task.JobID)
if err != nil {
log.Error("GetJob failed:" + err.Error())
continue
}
if len(result.JobInfo.Tasks) != 0 {
if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
}
updateCounts++
}
}
r := make(map[string]interface{}, 0)
r["cloudBrainCounts"] = cloudBrainCounts
r["updateCounts"] = updateCounts
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}

+ 2
- 2
routers/repo/modelarts.go View File

@@ -1230,7 +1230,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
return
}

err = modelarts.GenerateTrainJob(ctx, req)
_, err = modelarts.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
trainJobNewDataPrepare(ctx)
@@ -2205,7 +2205,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
req.UserCommand = userCommand
req.UserImageUrl = userImageUrl

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
inferenceJobErrorNewDataPrepare(ctx, form)


+ 30
- 0
routers/response/api_response.go View File

@@ -0,0 +1,30 @@
package response

type AiforgeOuterResponse struct {
Code int `json:"code"`
Msg string `json:"msg"`
Data interface{} `json:"data"`
}

func OuterSuccess() *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS}
}

func OuterError(code int, msg string) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: code, Msg: msg}
}

func OuterServerError(msg string) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg}
}

func OuterBizError(err *BizError) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: err.Code, Msg: err.Err}
}

func OuterSuccessWithData(data interface{}) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data}
}
func OuterErrorWithData(code int, msg string, data interface{}) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: code, Msg: msg, Data: data}
}

+ 5
- 1
routers/response/response.go View File

@@ -24,10 +24,14 @@ func ServerError(msg string) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg}
}

func ResponseError(err *BizError) *AiforgeResponse {
func ResponseBizError(err *BizError) *AiforgeResponse {
return &AiforgeResponse{Code: err.Code, Msg: err.Err}
}

func ResponseError(err error) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: err.Error()}
}

func SuccessWithData(data interface{}) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data}
}


+ 2
- 2
routers/response/response_list.go View File

@@ -1,6 +1,7 @@
package response

//repo response
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error"}

var RESOURCE_QUEUE_NOT_AVAILABLE = &BizError{Code: 1001, Err: "resource queue not available"}
var SPECIFICATION_NOT_EXIST = &BizError{Code: 1002, Err: "specification not exist"}
var SPECIFICATION_NOT_AVAILABLE = &BizError{Code: 1003, Err: "specification not available"}
@@ -11,4 +12,3 @@ var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of
//common response
var SYSTEM_ERROR = &BizError{Code: 9009, Err: "System error.Please try again later"}
var INSUFFICIENT_PERMISSION = &BizError{Code: 9003, Err: "insufficient permissions"}
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error permissions"}

+ 20
- 4
routers/routes/routes.go View File

@@ -645,6 +645,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/specification", func() {
m.Get("", admin.GetSpecificationPage)
m.Get("/list", admin.GetResourceSpecificationList)
m.Get("/list/all", admin.GetAllResourceSpecificationList)
m.Get("/scenes/:id", admin.GetResourceSpecificationScenes)
m.Post("/grampus/sync", admin.SyncGrampusSpecs)
m.Post("/add", binding.Bind(models.ResourceSpecificationReq{}), admin.AddResourceSpecification)
@@ -728,6 +729,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/complete_multipart", repo.CompleteMultipart)
})

m.Group("/attachments/model", func() {
m.Get("/get_chunks", repo.GetModelChunks)
m.Get("/new_multipart", repo.NewModelMultipart)
m.Get("/get_multipart_url", repo.GetModelMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteModelMultipart)
})

m.Group("/attachments", func() {
m.Get("/public/query", repo.QueryAllPublicDataset)
m.Get("/private/:username", repo.QueryPrivateDataset)
@@ -1127,10 +1135,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
m.Post("/reference_datasets", reqRepoDatasetWriterJson, bindIgnErr(auth.ReferenceDatasetForm{}), repo.ReferenceDatasetPost)
m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
m.Get("/current_repo", repo.CurrentRepoDataset)
m.Get("/my_datasets", repo.MyDatasets)
m.Get("/public_datasets", repo.PublicDataset)
m.Get("/my_favorite", repo.MyFavoriteDataset)

m.Get("/current_repo_m", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets_m", repo.MyDatasetsMultiple)
@@ -1232,6 +1236,12 @@ func RegisterRoutes(m *macaron.Macaron) {
})
}, context.RepoRef())
m.Group("/modelmanage", func() {
m.Get("/create_local_model_1", repo.CreateLocalModel)
m.Get("/create_local_model_2", repo.CreateLocalModelForUpload)
m.Get("/create_online_model", repo.CreateOnlineModel)
m.Post("/create_local_model", repo.SaveLocalModel)
m.Delete("/delete_model_file", repo.DeleteModelFile)

m.Post("/create_model", repo.SaveModel)
m.Post("/create_model_convert", reqWechatBind, reqRepoModelManageWriter, repo.SaveModelConvert)
m.Post("/create_new_model", repo.SaveNewNameModel)
@@ -1491,6 +1501,12 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/record/list", point.GetPointRecordList)
}, reqSignIn)

m.Group("/resources", func() {
m.Group("/queue", func() {
m.Get("/centers", admin.GetResourceAiCenters)
})
})

if setting.API.EnableSwagger {
m.Get("/swagger.v1.json", templates.JSONRenderer(), routers.SwaggerV1Json)
}


+ 4
- 0
routers/user/home.go View File

@@ -23,6 +23,8 @@ import (
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/repo"
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
issue_service "code.gitea.io/gitea/services/issue"
pull_service "code.gitea.io/gitea/services/pull"

@@ -837,6 +839,8 @@ func Cloudbrains(ctx *context.Context) {
}
models.LoadSpecs4CloudbrainInfo(ciTasks)
for i, _ := range ciTasks {
ciTasks[i] = cloudbrainService.UpdateCloudbrainAiCenter(ciTasks[i])
ciTasks[i].Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(ciTasks[i].Cloudbrain.AiCenter, ctx.Language())
ciTasks[i].CanDebug = true
ciTasks[i].CanDel = true
ciTasks[i].Cloudbrain.ComputeResource = ciTasks[i].ComputeResource


+ 2
- 2
routers/user/notification.go View File

@@ -132,11 +132,11 @@ func getNotifications(c *context.Context) {
}

c.Data["Title"] = c.Tr("notifications")
//c.Data["Keyword"] = keyword
c.Data["Type"] = keyword
c.Data["Status"] = status
c.Data["Notifications"] = notifications

pager.SetDefaultParams(c)
pager.AddParam(c, "q", "Type")
c.Data["Page"] = pager
}



+ 12
- 12
services/cloudbrain/cloudbrainTask/count.go View File

@@ -14,28 +14,28 @@ type StatusInfo struct {
ComputeResource string
}

var cloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)}
var cloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)}
var grampusTwoNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning}
var CloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)}
var CloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)}
var GrampusNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning}
var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeDebug},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeInference},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeBenchmark) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeBenchmark, models.JobTypeBrainScore, models.JobTypeSnn4imagenet},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo, models.TypeCDCenter},
@@ -45,22 +45,22 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: cloudbrainTwoNotFinalStatuses,
NotFinalStatuses: CloudbrainTwoNotFinalStatuses,
ComputeResource: models.NPUResource,
}, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo},
JobType: []models.JobType{models.JobTypeInference},
NotFinalStatuses: cloudbrainTwoNotFinalStatuses,
NotFinalStatuses: CloudbrainTwoNotFinalStatuses,
ComputeResource: models.NPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.GPUResource: {
CloudBrainTypes: []int{models.TypeC2Net},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: grampusTwoNotFinalStatuses,
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.NPUResource: {
CloudBrainTypes: []int{models.TypeC2Net},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: grampusTwoNotFinalStatuses,
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.NPUResource,
}}

@@ -71,7 +71,7 @@ func GetNotFinalStatusTaskCount(uid int64, cloudbrainType int, jobType string, c
}

key := jobNewType + "-" + strconv.Itoa(cloudbrainType)
if len(computeResource) > 0 {
if len(computeResource) > 0 && cloudbrainType == models.TypeC2Net {
key = key + "-" + computeResource[0]
}



+ 631
- 0
services/cloudbrain/cloudbrainTask/inference.go View File

@@ -0,0 +1,631 @@
package cloudbrainTask

import (
"bufio"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"strconv"
"strings"
"unicode/utf8"

"code.gitea.io/gitea/modules/modelarts"

"code.gitea.io/gitea/modules/git"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward/point/account"
)

const CLONE_FILE_PREFIX = "file:///"

func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {

displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
image := strings.TrimSpace(option.Image)
uuid := option.Attachment
jobType := string(models.JobTypeInference)
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
branchName := option.BranchName
bootFile := strings.TrimSpace(option.BootFile)
labelName := option.LabelName
repo := ctx.Repo.Repository

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
defer lock.UnLock()
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err")))
return
}

ckptUrl := setting.Attachment.Minio.RealPath + option.PreTrainModelUrl + option.CkptName
log.Info("ckpt url:" + ckptUrl)
command, err := getInferenceJobCommand(option)
if err != nil {
log.Error("getTrainJobCommand failed: %v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist"))
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
}
}

if !jobNamePattern.MatchString(displayJobName) {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_jobname_err")))
return
}

bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err")))
return
}

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, jobType)
if err != nil {
log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain.morethanonejob")))
return
}
}

if branchName == "" {
branchName = cloudbrain.DefaultBranchName
}
errStr := loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ResultPath)
if errStr != "" {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)

datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.error.dataset_select")))
return
}
spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeInference,
ComputeResource: models.GPU,
Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainOne})
if err != nil || spec == nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification is not available"))
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance")))
return
}
req := cloudbrain.GenerateCloudBrainTaskReq{
Ctx: ctx,
DisplayJobName: displayJobName,
JobName: jobName,
Image: image,
Command: command,
Uuids: uuid,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,
CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
ModelPath: setting.Attachment.Minio.RealPath + option.PreTrainModelUrl,
BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"),
Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"),
JobType: jobType,
Description: option.Description,
BranchName: branchName,
BootFile: option.BootFile,
Params: option.Params,
CommitID: commitID,
ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"),
ModelName: option.ModelName,
ModelVersion: option.ModelVersion,
CkptName: option.CkptName,
TrainUrl: option.PreTrainModelUrl,
LabelName: labelName,
Spec: spec,
}

jobId, err := cloudbrain.GenerateTask(req)
if err != nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func ModelArtsInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {
ctx.Data["PageIsTrainJob"] = true
VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount)
displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := option.Attachment
description := option.Description
workServerNumber := option.WorkServerNumber
engineID, _ := strconv.Atoi(option.ImageID)
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params
repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath
resultObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.ResultPath + VersionOutputPath + "/"
logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/"
//dataPath := "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/"
branchName := option.BranchName
EngineName := option.Image
LabelName := option.LabelName
isLatestVersion := modelarts.IsLatestVersion
VersionCount := modelarts.VersionCountOne
trainUrl := option.PreTrainModelUrl
modelName := option.ModelName
modelVersion := option.ModelVersion
ckptName := option.CkptName
ckptUrl := "/" + option.PreTrainModelUrl + option.CkptName

errStr := checkInferenceJobMultiNode(ctx.User.ID, option.WorkServerNumber)
if errStr != "" {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeInference), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err")))
return
}
defer lock.UnLock()

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeInference))
if err != nil {
log.Error("GetCloudbrainInferenceJobCountByUserID failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting inference task", ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("you have already a running or waiting inference task, can not create more"))
return
}
}

if err := paramCheckCreateInferenceJob(option); err != nil {
log.Error("paramCheckCreateInferenceJob failed:(%v)", err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err")))
return
}

//Determine whether the task name of the task in the project is duplicated
tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeInference), displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist"))
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
}
}

spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeInference,
ComputeResource: models.NPU,
Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainTwo})
if err != nil || spec == nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification not available"))
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance")))
return
}

//todo: del the codeLocalPath
_, err = ioutil.ReadDir(codeLocalPath)
if err == nil {
os.RemoveAll(codeLocalPath)
}

gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branchName)

if err := downloadCode(repo, codeLocalPath, branchName); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

//todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.ResultPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_result: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_result"))
return
}

if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_log"))
return
}

if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

var parameters models.Parameters
param := make([]models.Parameter, 0)
param = append(param, models.Parameter{
Label: modelarts.ResultUrl,
Value: "s3:/" + resultObsPath,
}, models.Parameter{
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})

datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
dataPath := dataUrl
jsondatas, err := json.Marshal(datasUrlList)
if err != nil {
log.Error("Failed to Marshal: %v", err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("json error:"+err.Error()))
return
}
if isMultiDataset {
param = append(param, models.Parameter{
Label: modelarts.MultiDataUrl,
Value: string(jsondatas),
})
}

existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("运行参数错误"))
return
}

for _, parameter := range parameters.Parameter {
if parameter.Label == modelarts.DeviceTarget {
existDeviceTarget = true
}
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
Value: parameter.Value,
})
}
}
}
if !existDeviceTarget {
param = append(param, models.Parameter{
Label: modelarts.DeviceTarget,
Value: modelarts.Ascend,
})
}

req := &modelarts.GenerateInferenceJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
DataUrl: dataPath,
Description: description,
CodeObsPath: codeObsPath,
BootFileUrl: codeObsPath + bootFile,
BootFile: bootFile,
TrainUrl: trainUrl,
WorkServerNumber: workServerNumber,
EngineID: int64(engineID),
LogUrl: logObsPath,
PoolID: getPoolId(),
Uuid: uuid,
Parameters: param, //modelarts train parameters
CommitID: commitID,
BranchName: branchName,
Params: option.Params,
EngineName: EngineName,
LabelName: LabelName,
IsLatestVersion: isLatestVersion,
VersionCount: VersionCount,
TotalVersionCount: modelarts.TotalVersionCount,
ModelName: modelName,
ModelVersion: modelVersion,
CkptName: ckptName,
ResultUrl: resultObsPath,
Spec: spec,
DatasetName: datasetNames,
JobType: string(models.JobTypeInference),
}

jobId, err := modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func getDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) {
var isMultiDataset bool
var dataUrl string
var datasetNames string
var datasUrlList []models.Datasurl
uuids := strings.Split(uuidStr, ";")
if len(uuids) > setting.MaxDatasetNum {
log.Error("the dataset count(%d) exceed the limit", len(uuids))
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset count exceed the limit")
}

datasetInfos := make(map[string]models.DatasetInfo)
attachs, err := models.GetAttachmentsByUUIDs(uuids)
if err != nil || len(attachs) != len(uuids) {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}

for i, tmpUuid := range uuids {
var attach *models.Attachment
for _, tmpAttach := range attachs {
if tmpAttach.UUID == tmpUuid {
attach = tmpAttach
break
}
}
if attach == nil {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}
fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz")
for _, datasetInfo := range datasetInfos {
if fileName == datasetInfo.Name {
log.Error("the dataset name is same: %v", attach.Name)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset name is same")
}
}
if len(attachs) <= 1 {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
isMultiDataset = false
} else {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attachs[0].UUID[0:1], attachs[0].UUID[1:2]) + "/" + attachs[0].UUID + attachs[0].UUID + "/"
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
datasUrlList = append(datasUrlList, models.Datasurl{
DatasetUrl: datasetUrl,
DatasetName: fileName,
})
isMultiDataset = true
}

if i == 0 {
datasetNames = attach.Name
} else {
datasetNames += ";" + attach.Name
}
}

return datasUrlList, dataUrl, datasetNames, isMultiDataset, nil
}
func checkInferenceJobMultiNode(userId int64, serverNum int) string {
if serverNum == 1 {
return ""
}

return "repo.modelarts.no_node_right"

}

func paramCheckCreateInferenceJob(option api.CreateTrainJobOption) error {
if !strings.HasSuffix(strings.TrimSpace(option.BootFile), ".py") {
log.Error("the boot file(%s) must be a python file", strings.TrimSpace(option.BootFile))
return errors.New("启动文件必须是python文件")
}

if option.ModelName == "" {
log.Error("the ModelName(%d) must not be nil", option.ModelName)
return errors.New("模型名称不能为空")
}
if option.ModelVersion == "" {
log.Error("the ModelVersion(%d) must not be nil", option.ModelVersion)
return errors.New("模型版本不能为空")
}
if option.CkptName == "" {
log.Error("the CkptName(%d) must not be nil", option.CkptName)
return errors.New("权重文件不能为空")
}
if option.BranchName == "" {
log.Error("the Branch(%d) must not be nil", option.BranchName)
return errors.New("分支名不能为空")
}

if utf8.RuneCountInString(option.Description) > 255 {
log.Error("the Description length(%d) must not more than 255", option.Description)
return errors.New("描述字符不能超过255个字符")
}

return nil
}

func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string {
err := downloadCode(repo, codePath, branchName)
if err != nil {
return "cloudbrain.load_code_failed"
}

err = uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/")
if err != nil {
return "cloudbrain.load_code_failed"
}

modelPath := setting.JobPath + jobName + resultPath + "/"
err = mkModelPath(modelPath)
if err != nil {
return "cloudbrain.load_code_failed"
}
err = uploadCodeToMinio(modelPath, jobName, resultPath+"/")
if err != nil {
return "cloudbrain.load_code_failed"
}

return ""
}

func downloadCode(repo *models.Repository, codePath, branchName string) error {
//add "file:///" prefix to make the depth valid
if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
return err
}

configFile, err := os.OpenFile(codePath+"/.git/config", os.O_RDWR, 0666)
if err != nil {
log.Error("open file(%s) failed:%v", codePath+"/,git/config", err)
return err
}

defer configFile.Close()

pos := int64(0)
reader := bufio.NewReader(configFile)
for {
line, err := reader.ReadString('\n')
if err != nil {
if err == io.EOF {
log.Error("not find the remote-url")
return nil
} else {
log.Error("read error: %v", err)
return err
}
}

if strings.Contains(line, "url") && strings.Contains(line, ".git") {
originUrl := "\turl = " + repo.CloneLink().HTTPS + "\n"
if len(line) > len(originUrl) {
originUrl += strings.Repeat(" ", len(line)-len(originUrl))
}
bytes := []byte(originUrl)
_, err := configFile.WriteAt(bytes, pos)
if err != nil {
log.Error("WriteAt failed:%v", err)
return err
}
break
}

pos += int64(len(line))
}

return nil
}

func getInferenceJobCommand(option api.CreateTrainJobOption) (string, error) {
var command string
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params

if !strings.HasSuffix(bootFile, ".py") {
log.Error("bootFile(%s) format error", bootFile)
return command, errors.New("bootFile format error")
}

var parameters models.Parameters
var param string
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)
return command, err
}

for _, parameter := range parameters.Parameter {
param += " --" + parameter.Label + "=" + parameter.Value
}
}

param += " --modelname" + "=" + option.CkptName

command += "python /code/" + bootFile + param + " > " + cloudbrain.ResultPath + "/" + option.DisplayJobName + "-" + cloudbrain.LogFile

return command, nil
}

+ 83
- 0
services/cloudbrain/cloudbrainTask/sync_status.go View File

@@ -0,0 +1,83 @@
package cloudbrainTask

import (
"net/http"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
)

var noteBookOKMap = make(map[int64]int, 20)

//if a task notebook url can get two times, the notebook can browser.
const successfulCount = 3

func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error) {
jobResult, err := cloudbrain.GetJob(task.JobID)
if err != nil {

log.Error("GetJob failed:", err)

return task, err
}
result, err := models.ConvertToJobResultPayload(jobResult.Payload)
if err != nil {
log.Error("ConvertToJobResultPayload failed:", err)
return task, err
}
oldStatus := task.Status

if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))

task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
task.ContainerID = taskRes.TaskStatuses[0].ContainerID
}

if (result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobRunning)) ||
task.Status == string(models.JobRunning) || (result.JobStatus.State == string(models.JobRunning) && isNoteBookReady(task)) {

models.ParseAndSetDurationFromCloudBrainOne(result, task)
task.Status = result.JobStatus.State
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:", err)
return task, err
}
}
return task, nil

}

func isNoteBookReady(task *models.Cloudbrain) bool {
if task.JobType != string(models.JobTypeDebug) {
return true
}
noteBookUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName
r := httplib.Get(noteBookUrl)
res, err := r.Response()
if err != nil {
return false
}
if res.StatusCode == http.StatusOK {
count := noteBookOKMap[task.ID]
if count < successfulCount-1 {
noteBookOKMap[task.ID] = count + 1
return false
} else {
delete(noteBookOKMap, task.ID)
return true
}

}
return false

}

+ 1210
- 0
services/cloudbrain/cloudbrainTask/train.go
File diff suppressed because it is too large
View File


+ 1
- 1
services/cloudbrain/resource/resource_queue.go View File

@@ -16,7 +16,7 @@ func AddResourceQueue(req models.ResourceQueueReq) error {
}

func UpdateResourceQueue(queueId int64, req models.ResourceQueueReq) error {
if _, err := models.UpdateResourceQueueById(queueId, models.ResourceQueue{
if _, err := models.UpdateResourceCardsTotalNum(queueId, models.ResourceQueue{
CardsTotalNum: req.CardsTotalNum,
Remark: req.Remark,
}); err != nil {


+ 62
- 7
services/cloudbrain/resource/resource_specification.go View File

@@ -1,20 +1,23 @@
package resource

import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/admin/operate_log"
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"
)

func AddResourceSpecification(doerId int64, req models.ResourceSpecificationReq) error {
@@ -127,10 +130,49 @@ func GetResourceSpecificationList(opts models.SearchResourceSpecificationOptions
if err != nil {
return nil, err
}

return models.NewResourceSpecAndQueueListRes(n, r), nil
}

//GetAllDistinctResourceSpecification returns specification and queue after distinct
//totalSize is always 0 here
func GetAllDistinctResourceSpecification(opts models.SearchResourceSpecificationOptions) (*models.ResourceSpecAndQueueListRes, error) {
opts.Page = 0
opts.PageSize = 1000
opts.OrderBy = models.SearchSpecOrder4Standard
_, r, err := models.SearchResourceSpecification(opts)
if err != nil {
return nil, err
}
nr := distinctResourceSpecAndQueue(r)
return models.NewResourceSpecAndQueueListRes(0, nr), nil
}

func distinctResourceSpecAndQueue(r []models.ResourceSpecAndQueue) []models.ResourceSpecAndQueue {
specs := make([]models.ResourceSpecAndQueue, 0, len(r))
sourceSpecIdMap := make(map[string]models.ResourceSpecAndQueue, 0)
for i := 0; i < len(r); i++ {
spec := r[i]
if spec.SourceSpecId == "" {
specs = append(specs, spec)
continue
}
if _, has := sourceSpecIdMap[spec.SourceSpecId]; has {
//prefer to use on-shelf spec
if sourceSpecIdMap[spec.SourceSpecId].Status != spec.Status && spec.Status == models.SpecOnShelf {
for k, v := range specs {
if v.ResourceSpecification.ID == sourceSpecIdMap[spec.SourceSpecId].ResourceSpecification.ID {
specs[k] = spec
}
}
}
continue
}
specs = append(specs, spec)
sourceSpecIdMap[spec.SourceSpecId] = spec
}
return specs
}

func GetResourceSpecificationScenes(specId int64) ([]models.ResourceSceneBriefRes, error) {
r, err := models.GetSpecScenes(specId)
if err != nil {
@@ -197,6 +239,7 @@ func AddSpecOperateLog(doerId int64, operateType string, newValue, oldValue *mod
}

func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.Specification, error) {
opts.SpecStatus = models.SpecOnShelf
r, err := models.FindSpecs(opts)
if err != nil {
log.Error("FindAvailableSpecs error.%v", err)
@@ -210,6 +253,18 @@ func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.S
return specs, err
}

func FindAvailableSpecs4Show(userId int64, opts models.FindSpecsOptions) ([]*api.SpecificationShow, error) {
specs, err := FindAvailableSpecs(userId, opts)
if err != nil {
return nil, err
}
result := make([]*api.SpecificationShow, len(specs))
for i, v := range specs {
result[i] = convert.ToSpecification(v)
}
return result, nil
}

func filterExclusiveSpecs(r []*models.Specification, userId int64) []*models.Specification {
specs := make([]*models.Specification, 0, len(r))
specMap := make(map[int64]string, 0)


+ 46
- 7
services/cloudbrain/util.go View File

@@ -1,27 +1,29 @@
package cloudbrain

import (
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/setting"
"strings"
)

func GetAiCenterShow(aiCenter string,ctx *context.Context) string{
func GetAiCenterShow(aiCenter string, ctx *context.Context) string {
aiCenterInfo := strings.Split(aiCenter, "+")

if len(aiCenterInfo) == 2{
if setting.C2NetMapInfo!=nil {
if info,ok:=setting.C2NetMapInfo[aiCenterInfo[0]];ok {
if len(aiCenterInfo) == 2 {
if setting.C2NetMapInfo != nil {
if info, ok := setting.C2NetMapInfo[aiCenterInfo[0]]; ok {
if ctx.Language() == "zh-CN" {
return info.Content
} else {
return info.ContentEN
}
}else{
} else {
return aiCenterInfo[1]
}

}else{
} else {
return aiCenterInfo[1]
}

@@ -29,5 +31,42 @@ func GetAiCenterShow(aiCenter string,ctx *context.Context) string{

return ""

}

func GetAiCenterInfoByCenterCode(aiCenterCode string) *setting.C2NetSequenceInfo {
if setting.AiCenterCodeAndNameMapInfo != nil {
if info, ok := setting.AiCenterCodeAndNameMapInfo[aiCenterCode]; ok {
return info
} else {
return nil
}
} else {
return nil
}
}

func getAiCenterCode(aiCenter string) string {
aiCenterInfo := strings.Split(aiCenter, "+")
return aiCenterInfo[0]
}

func UpdateCloudbrainAiCenter(cloudbrain *models.CloudbrainInfo) *models.CloudbrainInfo {
if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne {
cloudbrain.Cloudbrain.AiCenter = models.AICenterOfCloudBrainOne
cloudbrain.Cloudbrain.Cluster = models.OpenICluster
}
if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo {
cloudbrain.Cloudbrain.AiCenter = models.AICenterOfCloudBrainTwo
cloudbrain.Cloudbrain.Cluster = models.OpenICluster
}
if cloudbrain.Cloudbrain.Type == models.TypeCDCenter {
cloudbrain.Cloudbrain.AiCenter = models.AICenterOfChengdu
cloudbrain.Cloudbrain.Cluster = models.OpenICluster
}
if cloudbrain.Cloudbrain.Type == models.TypeC2Net {
cloudbrain.Cloudbrain.AiCenter = getAiCenterCode(cloudbrain.Cloudbrain.AiCenter)
cloudbrain.Cloudbrain.Cluster = models.C2NetCluster
}

return cloudbrain
}

+ 6
- 6
templates/admin/cloudbrain/list.tmpl View File

@@ -170,7 +170,7 @@
</div>
<!-- 智算中心 -->
<div class="one wide column text center nowrap" style="width:8% !important;">
<span style="font-size: 12px;" class="aicenter_{{.DisplayJobName}}_{{$JobID}}">{{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}</span>
<span style="font-size: 12px;" class="aicenter_{{.DisplayJobName}}_{{$JobID}}" title="{{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}">{{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}</span>
</div>
<!-- XPU类型 -->
<div class="one wide column text center nowrap" style="width:8% !important;">
@@ -184,16 +184,16 @@
spanEl.setAttribute('title', cardType);
spanEl.innerText = cardType;
var cluster = spec.Cluster || '--';
var cluster = {{.Cluster}} || '--';
var clusterName = document.querySelector('.cloudbrain_debug').dataset['cluster' + cluster[0] + cluster.toLocaleLowerCase().slice(1)] || '--';
spanEl = document.querySelector('.cluster_{{.DisplayJobName}}_{{$JobID}}');
spanEl.setAttribute('title', cluster);
spanEl.innerText = clusterName;

var aiCenter = spec.AiCenterName || '--';
spanEl = document.querySelector('.aicenter_{{.DisplayJobName}}_{{$JobID}}');
spanEl.setAttribute('title', aiCenter);
spanEl.innerText = aiCenter;
// var aiCenter = spec.AiCenterName || '--';
// spanEl = document.querySelector('.aicenter_{{.DisplayJobName}}_{{$JobID}}');
// spanEl.setAttribute('title', aiCenter);
// spanEl.innerText = aiCenter;
})();
</script>
<!-- 创建者 -->


+ 4
- 6
templates/admin/cloudbrain/search.tmpl View File

@@ -71,22 +71,20 @@
document.addEventListener('DOMContentLoaded', function() {
$.ajax({
type: "GET",
url: "/api/v1/cloudbrain/get_center_info",
url: "/api/v1/cloudbrainboard/cloudbrain/resource_queues",
dataType: "json",
data: {},
success: function (res) {
var data = res || [];
var data = res.resourceQueues || [];
var aiCenterSelEl = $('#aiCenter-sel');
var itemEl = aiCenterSelEl.find('.menu .item').eq(0);
var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter');
var selectAiCenterName = '';
var lang = document.querySelector('html').getAttribute('lang') || 'en-US';
var except = ['', 'more'];
for (var i = 0, iLen = data.length; i < iLen; i++) {
var dataI = data[i];
var aiCenterCode = dataI.name;
if (except.indexOf(aiCenterCode) >= 0) continue;
var aiCenterName = lang === 'en-US' ? dataI.content_en : dataI.content;
var aiCenterCode = dataI.AiCenterCode;
var aiCenterName = dataI.AiCenterName;
var itemClone = itemEl.clone();
var oHref = itemClone.attr('href');
var oId = itemClone.attr('id');


+ 4
- 6
templates/admin/cloudbrain/search_dashboard.tmpl View File

@@ -85,22 +85,20 @@
document.addEventListener('DOMContentLoaded', function() {
$.ajax({
type: "GET",
url: "/api/v1/cloudbrain/get_center_info",
url: "/api/v1/cloudbrainboard/cloudbrain/resource_queues",
dataType: "json",
data: {},
success: function (res) {
var data = res || [];
var data = res.resourceQueues || [];
var aiCenterSelEl = $('#aiCenter-sel');
var itemEl = aiCenterSelEl.find('.menu .item').eq(0);
var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter');
var selectAiCenterName = '';
var lang = document.querySelector('html').getAttribute('lang') || 'en-US';
var except = ['', 'more'];
for (var i = 0, iLen = data.length; i < iLen; i++) {
var dataI = data[i];
var aiCenterCode = dataI.name;
if (except.indexOf(aiCenterCode) >= 0) continue;
var aiCenterName = lang === 'en-US' ? dataI.content_en : dataI.content;
var aiCenterCode = dataI.AiCenterCode;
var aiCenterName = dataI.AiCenterName;
var itemClone = itemEl.clone();
var oHref = itemClone.attr('href');
var oId = itemClone.attr('id');


+ 21
- 2
templates/base/footer_content.tmpl View File

@@ -24,11 +24,30 @@
<div class="text">{{.LangName}}</div>
<div class="menu">
{{range .AllLangs}}
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a>
<!-- <a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a> -->
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="javascript:;" olang="{{$.Lang}}" lang="{{.Lang}}" >{{.Name}}</a>
{{end}}
</div>
</div>

<script>
;(function() {
document.addEventListener('DOMContentLoaded', function() {
$('.ui.language .menu .item').on('click', function() {
var lang = $(this).attr('lang');
var oLang = $(this).attr('olang');
if (oLang === lang) return;
var origin = window.location.origin;
var pathname = window.location.pathname;
var search = window.location.search;
var hash = window.location.hash;
var oHref = window.location.href;
var urlSearchParams = new URLSearchParams(search);
urlSearchParams.set('lang', lang);
window.location.href = origin + pathname + '?' + urlSearchParams.toString() + hash;
});
});
})();
</script>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class="item" target="_blank"><i class="compass icon" ></i> {{.i18n.Tr "custom.Platform_Tutorial"}}</a>
{{if .EnableSwagger}}<a href="/api/swagger" class="item"><i class="plug icon"></i> API</a>{{end}}
{{if .IsSigned}}


+ 21
- 1
templates/base/footer_content_fluid.tmpl View File

@@ -22,10 +22,30 @@
<div class="text">{{.LangName}}</div>
<div class="menu">
{{range .AllLangs}}
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a>
<!--<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a>-->
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="javascript:;" olang="{{$.Lang}}" lang="{{.Lang}}" >{{.Name}}</a>
{{end}}
</div>
</div>
<script>
;(function() {
document.addEventListener('DOMContentLoaded', function() {
$('.ui.language .menu .item').on('click', function() {
var lang = $(this).attr('lang');
var oLang = $(this).attr('olang');
if (oLang === lang) return;
var origin = window.location.origin;
var pathname = window.location.pathname;
var search = window.location.search;
var hash = window.location.hash;
var oHref = window.location.href;
var urlSearchParams = new URLSearchParams(search);
urlSearchParams.set('lang', lang);
window.location.href = origin + pathname + '?' + urlSearchParams.toString() + hash;
});
});
})();
</script>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class="item" target="_blank"><i class="compass icon"></i> {{.i18n.Tr "custom.Platform_Tutorial"}} </a>
{{if .EnableSwagger}}<a href="/api/swagger" class="item"><i class="plug icon" ></i> API</a>{{end}}
{{if .IsSigned}}


+ 2
- 2
templates/base/head_navbar.tmpl View File

@@ -38,7 +38,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui simple dropdown item" id='dropdown_explore'>
@@ -78,7 +78,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>


+ 2
- 2
templates/base/head_navbar_fluid.tmpl View File

@@ -38,7 +38,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_explore'>
@@ -77,7 +77,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_PageHome'>


+ 2
- 2
templates/base/head_navbar_home.tmpl View File

@@ -30,7 +30,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_explore'>
@@ -70,7 +70,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_PageHome'>


+ 2
- 2
templates/base/head_navbar_pro.tmpl View File

@@ -40,7 +40,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_explore'>
@@ -80,7 +80,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_PageHome'>


+ 1
- 1
templates/custom/max_log.tmpl View File

@@ -29,7 +29,7 @@
<div class="ui message message-max{{.VersionName}}" style="display: none;">
<div id="header"></div>
</div>
<div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;">
<div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;height: 100%">
<div class="ui inverted active dimmer">
<div class="ui loader"></div>
</div>


+ 2
- 4
templates/repo/cloudbrain/inference/new.tmpl View File

@@ -331,9 +331,7 @@
$('#model_name_version').empty()
let html = ''
nameMap[value].forEach(element => {
let {TrainTaskInfo} = element
TrainTaskInfo = JSON.parse(TrainTaskInfo)
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`
});
$('#model_name_version').append(html)
$("#select_model_version").removeClass("loading")
@@ -387,7 +385,7 @@
}
function loadCheckpointList(value){
return new Promise((resolve,reject)=>{
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{ID:value}, (data) => {
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{id:value}, (data) => {
resolve(data)
})
})


+ 0
- 71
templates/repo/cloudbrain/inference/show.tmpl View File

@@ -262,8 +262,6 @@
<div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);">
<a class="active item"
data-tab="first">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item" data-tab="second"
onclick="javascript:parseInfo()">{{$.i18n.Tr "repo.cloudbrain.runinfo"}}</a>
<a class="item log_bottom" data-tab="third"
data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
<a class="item load-model-file" data-tab="four"
@@ -515,25 +513,6 @@

</div>
</div>

<div class="ui tab" data-tab="second">
<div>
<div class="ui message message{{.VersionName}}" style="display: none;">
<div id="header"></div>
</div>
<div class="ui attached"
style="height: 390px !important; overflow: auto;">
<input type="hidden" id="json_value" value="{{$.result.JobStatus.AppExitDiagnostics}}">
<input type="hidden" id="ExitDiagnostics" value="{{$.ExitDiagnostics}}">
<span id="info_display" class="info_text">

</span>
</div>

</div>

</div>

<div class="ui tab" data-tab="third">
<div class="file-info">
<a id="{{.VersionName}}-log-down"
@@ -616,56 +595,6 @@
$(document).ready(function () {
$('.secondary.menu .item').tab();
});

let userName
let repoPath
let jobID
let downlaodFlag = {{ $.canDownload }}
let taskID = {{ $.task.ID }}
let realJobName = {{ $.task.JobName }}
function parseInfo() {
let jsonValue = document.getElementById("json_value").value;
let jsonObj = JSON.parse(jsonValue);
let podRoleName = jsonObj["podRoleName"];
let html = "";
if (podRoleName != null) {
let task0 = podRoleName["task1-0"];
let podEvents = jsonObj["podEvents"];
let podEventArray = podEvents[task0];
if (podEventArray != null) {
for (var i = 0; i < podEventArray.length; i++) {
if (podEventArray[i]["reason"] != "") {
html += "<p><b>[" + podEventArray[i]["reason"] + "]</b></p>";
html += "<p>" + podEventArray[i]["message"] + "</p>";
html += "<p>" + podEventArray[i]["action"] + "</p>";
}
}
}
let extras = jsonObj["extras"];
if (extras != null) {
for (var i = 0; i < extras.length; i++) {
if (extras[i]["reason"] != "") {
html += "<p><b>[" + extras[i]["reason"] + "]</b></p>";
html += "<p>" + extras[i]["message"] + "</p>";
html += "<p>" + extras[i]["action"] + "</p>";
}
}
}
}

let string = document.getElementById("ExitDiagnostics").value;
string = string.replace(/\r\n/g, "<br>")
string = string.replace(/\n/g, "<br>");
string = string.replace(/(\r\n)|(\n)/g, '<br>');

if (string != "") {
html += "<p><b>[ExitDiagnostics]</b></p>";
html += "<p>" + string + "</p>";
}

document.getElementById("info_display").innerHTML = html;
}

;(function() {
var SPEC = {{ .Spec }};
var showPoint = false;


+ 31
- 161
templates/repo/cloudbrain/trainjob/show.tmpl View File

@@ -284,15 +284,10 @@
<div class="content-pad">
<div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);">
<a class="active item"
data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item" data-tab="second{{$k}}"
onclick="javascript:parseInfo()">{{$.i18n.Tr "repo.cloudbrain.runinfo"}}</a>
data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>

<a class="item log_bottom" data-tab="third{{$k}}"
data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>

data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
<a class="item load-model-file" data-tab="four{{$k}}" data-gpu-flag="true" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/cloudbrain/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first{{$k}}">
@@ -430,9 +425,6 @@
</div>
</td>
</tr>


<tr class="ti-no-ng-animate">
<td class="ti-no-ng-animate ti-text-form-label text-width80">
{{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}}
@@ -504,25 +496,6 @@

</div>
</div>

<div class="ui tab" data-tab="second{{$k}}">
<div>
<div class="ui message message{{.VersionName}}" style="display: none;">
<div id="header"></div>
</div>
<div class="ui attached log" id="log_state{{.VersionName}}"
style="height: 390px !important; overflow: auto;">
<input type="hidden" id="json_value" value="{{$.result.JobStatus.AppExitDiagnostics}}">
<input type="hidden" id="ExitDiagnostics" value="{{$.ExitDiagnostics}}">
<span id="info_display" class="info_text">

</span>
</div>

</div>

</div>

<div class="ui tab" data-tab="third{{$k}}">
<div class="file-info">
<a id="{{.VersionName}}-log-down"
@@ -565,29 +538,22 @@
<input type="hidden" name="init_log" value>
<pre id="log_file{{.VersionName}}"></pre>
</div>

</div>

</div>

<div class="ui tab" data-tab="four{{$k}}">
<input type="hidden" name="model{{.VersionName}}" value="-1">
<input type="hidden" name="modelback{{.VersionName}}" value="-1">
<div class='ui breadcrumb model_file_bread' id='file_breadcrumb{{.VersionName}}'>
<div class="active section">result</div>
<div class="divider"> / </div>

</div>
<div id="dir_list{{.VersionName}}">

</div>
<div style="display:flex;align-items: center;justify-content: end;color: #f2711c;">
<i class="ri-error-warning-line" style="margin-right:0.5rem;"></i>
<span>{{$.i18n.Tr "repo.file_limit_100"}}</span>
</div>
</div>

</div>
</div>
</div>
@@ -633,24 +599,24 @@

<div class="required inline field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" id="VersionName" name="VersionName" value="V0001">
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input type="hidden" id="versionName" name="versionName" value="V0001">
<input style="width: 45%;" id="JobName" readonly required>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<div class="ui dropdown selection search width70" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -677,12 +643,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -700,11 +666,8 @@
<button class="ui button cancel">{{.i18n.Tr "repo.cloudbrain.cancel"}}</button>
</div>
</div>


</div>
</div>

</div>
{{template "base/footer" .}}
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js"></script>
@@ -712,7 +675,15 @@
<script src="{{StaticUrlPrefix}}/js/specsuse.js?v={{MD5 AppVer}}" type="text/javascript"></script>

<script>
var setting = {
var userName;
var repoPath;
$(document).ready(function(){
var url = window.location.href;
var urlArr = url.split('/')
userName = urlArr.slice(-5)[0]
repoPath = urlArr.slice(-4)[0]
});
var setting = {
check: {
enable: true,
chkboxType: {"Y":"ps", "N":"ps"}
@@ -850,23 +821,19 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val("V0001")
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val("V0001")
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
$('#choice_Engine .default.text').css({ "color": "rgb(0, 0, 0,0.87)" })
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
createModelName();
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -887,8 +854,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},
@@ -920,110 +893,7 @@
$('.secondary.menu .item').tab();
});

let userName
let repoPath
let jobID
let downlaodFlag = {{ $.canDownload }}
let taskID = {{ $.task.ID }}
let realJobName = {{ $.task.JobName }}
$(document).ready(function () {
let url = window.location.href;
let urlArr = url.split('/')
userName = urlArr.slice(-5)[0]
repoPath = urlArr.slice(-4)[0]
jobID = urlArr.slice(-1)[0]
})
function stopBubbling(e) {
e = window.event || e;
if (e.stopPropagation) {
e.stopPropagation(); //阻止事件 冒泡传播
} else {
e.cancelBubble = true; //ie兼容
}
}

function loadLog(version_name) {
document.getElementById("mask").style.display = "block"
let startLine = $('input[name=end_line]').val();
if(startLine==""){
startLine=0;
}
let endLine = $('input[name=end_line]').val();
if(endLine==""){
endLine = 50;
}
$.get(`/${userName}/${repoPath}/cloudbrain/train-job/${jobID}/get_log?endLine=${endLine}&startLine=${startLine}`, (data) => {
$('input[name=end_line]').val(data.EndLine)
$('input[name=start_line]').val(data.StartLine)
$(`#log_file${version_name}`).text(data.Content)
document.getElementById("mask").style.display = "none"
}).fail(function (err) {
console.log(err);
document.getElementById("mask").style.display = "none"
});
}

function refreshStatus(version_name) {
$.get(`/api/v1/repos/${userName}/${repoPath}/cloudbrain/${taskID}?version_name=${versionname}`, (data) => {
// header status and duration
//$(`#${version_name}-duration-span`).text(data.JobDuration)
$(`#${version_name}-status-span span`).text(data.JobStatus)
$(`#${version_name}-status-span i`).attr("class", data.JobStatus)
// detail status and duration
//$('#'+version_name+'-duration').text(data.JobDuration)
$('#' + version_name + '-status').text(data.JobStatus)
loadLog(version_name)


}).fail(function (err) {
console.log(err);
});
stopBubbling(arguments.callee.caller.arguments[0])
}

function parseInfo() {
let jsonValue = document.getElementById("json_value").value;
let jsonObj = JSON.parse(jsonValue);
let podRoleName = jsonObj["podRoleName"];
let html = "";
if (podRoleName != null) {
let task0 = podRoleName["task1-0"];
let podEvents = jsonObj["podEvents"];
let podEventArray = podEvents[task0];
if (podEventArray != null) {
for (var i = 0; i < podEventArray.length; i++) {
if (podEventArray[i]["reason"] != "") {
html += "<p><b>[" + podEventArray[i]["reason"] + "]</b></p>";
html += "<p>" + podEventArray[i]["message"] + "</p>";
html += "<p>" + podEventArray[i]["action"] + "</p>";
}
}
}
let extras = jsonObj["extras"];
if (extras != null) {
for (var i = 0; i < extras.length; i++) {
if (extras[i]["reason"] != "") {
html += "<p><b>[" + extras[i]["reason"] + "]</b></p>";
html += "<p>" + extras[i]["message"] + "</p>";
html += "<p>" + extras[i]["action"] + "</p>";
}
}
}
}

let string = document.getElementById("ExitDiagnostics").value;
string = string.replace(/\r\n/g, "<br>")
string = string.replace(/\n/g, "<br>");
string = string.replace(/(\r\n)|(\n)/g, '<br>');

if (string != "") {
html += "<p><b>[ExitDiagnostics]</b></p>";
html += "<p>" + string + "</p>";
}

document.getElementById("info_display").innerHTML = html;
}

;(function() {
var SPEC = {{ .Spec }};
var showPoint = false;


+ 32
- 25
templates/repo/grampus/trainjob/show.tmpl View File

@@ -238,11 +238,8 @@
<span>
<div style="float: right;">
{{$.CsrfTokenHtml}}
</div>
<div class="ac-display-inblock title_text acc-margin-bottom">

<span class="cti-mgRight-sm">{{TimeSinceUnix1 .CreatedUnix}}</span>
<span class="cti-mgRight-sm">
{{$.i18n.Tr "repo.modelarts.current_version"}}:{{.VersionName}}</span>
@@ -260,7 +257,6 @@
<span class="refresh-status" data-tooltip="刷新" style="cursor: pointer;" data-inverted="" data-version="{{.VersionName}}">
<i class="redo icon redo-color"></i>
</span>

</div>
<div style="float: right;">
{{if and ($.canDownload) (ne .Status "WAITING") ($.Permission.CanWrite $.UnitTypeModelManage) }}
@@ -269,7 +265,6 @@
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-create-model">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{end}}

</div>
</span>
</span>
@@ -282,6 +277,9 @@

<a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item log_bottom" data-tab="second{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
{{ if eq $.Spec.ComputeResource "NPU"}}
<a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}" data-path="{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/metrics">{{$.i18n.Tr "cloudbrain.resource_use"}}</a>
{{end}}
<a class="item load-model-file" data-tab="third{{$k}}" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first{{$k}}">
@@ -564,6 +562,14 @@

</div>

</div>
<div class="ui tab" data-tab="four{{$k}}" style="position: relative;">
<i class="ri-refresh-line metric_chart"
style="position: absolute;right: 25%;color:#3291f8;z-index:99;cursor: pointer;"
data-version="{{.VersionName}}"></i>
<div id="metric-{{.VersionName}}" style="height: 260px;width: 870px;">
</div>
</div>
<div class="ui tab" data-tab="third{{$k}}">
<input type="hidden" name="model{{.VersionName}}" value="-1">
@@ -624,24 +630,24 @@

<div class="required inline field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" id="VersionName" name="VersionName" value="V0001">
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input type="hidden" id="versionName" name="versionName" value="V0001">
<input style="width: 45%;" id="JobName" readonly required>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<div class="ui dropdown selection search width70" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -669,12 +675,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -762,7 +768,6 @@
function showMenu() {
var cityObj = $("#modelSelectedFile");
var cityOffset = $("#modelSelectedFile").offset();
//$("#menuContent").css({left:cityOffset.left + "px", top:cityOffset.top + cityObj.outerHeight() + "px"}).slideDown("fast");
$("#menuContent").slideDown("fast");
$("body").bind("mousedown", onBodyDown);
}
@@ -861,11 +866,10 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val("V0001")
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val("V0001")
if(obj.ComputeResource=="NPU"){
if (obj.EngineName != null && obj.EngineName != "") {
@@ -873,16 +877,16 @@
srcEngine = srcEngine.trim().toLowerCase();
if (srcEngine == 'tensorflow') {
$('#choice_Engine .default.text').text("TensorFlow");
$('#choice_Engine input[name="Engine"]').val(1)
$('#choice_Engine input[name="engine"]').val(1)
}
if (srcEngine == 'mindspore') {
$('#choice_Engine .default.text').text("MindSpore");
$('#choice_Engine input[name="Engine"]').val(2)
$('#choice_Engine input[name="engine"]').val(2)
}
}
}else{
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
}
$('#choice_Engine .default.text').css({ "color": "rgb(0, 0, 0,0.87)" })
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
@@ -890,9 +894,6 @@
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -914,8 +915,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 2
- 4
templates/repo/modelarts/inferencejob/new.tmpl View File

@@ -362,9 +362,7 @@
$('#model_name_version').empty()
let html = ''
nameMap[value].forEach(element => {
let {TrainTaskInfo} = element
TrainTaskInfo = JSON.parse(TrainTaskInfo)
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`
});
$('#model_name_version').append(html)
$("#select_model_version").removeClass("loading")
@@ -418,7 +416,7 @@
}
function loadCheckpointList(value){
return new Promise((resolve,reject)=>{
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{ID:value}, (data) => {
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{id:value}, (data) => {
resolve(data)
})
})


+ 25
- 24
templates/repo/modelarts/trainjob/show.tmpl View File

@@ -321,7 +321,7 @@
data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item log_bottom" data-tab="second{{$k}}"
data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
<a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "cloudbrain.resource_use"}}</a>
<a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}" data-path="{{$.RepoRelPath}}/modelarts/train-job/{{.JobID}}/metric_statistics?version_name={{.VersionName}}&statistic_type=each&metrics=">{{$.i18n.Tr "cloudbrain.resource_use"}}</a>
<a class="item load-model-file" data-tab="third{{$k}}" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first{{$k}}">
@@ -662,29 +662,29 @@
<div class="two inline fields ">
<div class="required ten wide field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>&nbsp;
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input class="width83" id="JobName" readonly required>

</div>
<div class="required six widde field">
<label>{{.i18n.Tr "repo.model.manage.version"}}</label>
<input class="width70" id="VersionName" name="VersionName" readonly required>
<input class="width70" id="versionName" name="versionName" readonly required>
</div>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<input type="hidden" id="Engine" name="Engine" required>
<input style="width: 45%;" id="Engine_name" name="Engine_name" readonly required maxlength="255">
<input type="hidden" id="engine" name="engine" required>
<input style="width: 45%;" id="engine_name" name="engine_name" readonly required maxlength="255">
</div>
<div class="unite min_title inline fields required">
<div class="field required">
@@ -699,12 +699,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -726,13 +726,12 @@

</div>
</div>
</div>
{{template "base/footer" .}}

<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js"></script>
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.excheck.js"></script>
<script>
<script>
var setting = {
check: {
enable: true,
@@ -899,27 +898,23 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val(obj.VersionName).addClass('model_disabled')
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val(obj.VersionName).addClass('model_disabled')
if(obj.EngineID ==122 || obj.EngineID ==35 || obj.EngineID ==-1 || obj.EngineID ==37){
$('input[name="Engine_name"]').val("MindSpore").addClass('model_disabled');
$('input[name="Engine"]').val(2);
$('input[name="engine_name"]').val("MindSpore").addClass('model_disabled');
$('input[name="engine"]').val(2);
}
if(obj.EngineID ==121 || obj.EngineID ==38){
$('input[name="Engine_name"]').val("TensorFlow").addClass('model_disabled');
$('input[name="Engine"]').val(1);
$('input[name="engine_name"]').val("TensorFlow").addClass('model_disabled');
$('input[name="engine"]').val(1);
}
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
createModelName();
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -940,8 +935,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 39
- 38
templates/repo/modelmanage/convertIndex.tmpl View File

@@ -93,7 +93,7 @@
<div class="ui grid stackable item">
<div class="row">
<div class="three wide column padding0">
<a class="title" href="{{$.RepoLink}}/modelmanage/show_model_convert_info?ID={{.ID}}" title="{{.Name}}" style="font-size: 14px;">
<a class="title" href="{{$.RepoLink}}/modelmanage/show_model_convert_info?id={{.ID}}" title="{{.Name}}" style="font-size: 14px;">
<span class="fitted" style="width: 90%;vertical-align: middle;">{{.Name}}</span>
</a>
</div>
@@ -141,7 +141,7 @@
</form>

{{if .IsCanOper}}
<a id="ai-download-{{.ID}}" href="{{$.Repository.HTMLURL}}/modelmanage/download_model_convert/{{.ID}}?AllDownload=true&a=1" class='ui basic {{if eq .Status "SUCCEEDED" "COMPLETED"}}blue {{else}}disabled {{end}}button' style="border-radius: .28571429rem;">
<a id="ai-download-{{.ID}}" href="{{$.Repository.HTMLURL}}/modelmanage/download_model_convert/{{.ID}}?allDownload=true&a=1" class='ui basic {{if eq .Status "SUCCEEDED" "COMPLETED"}}blue {{else}}disabled {{end}}button' style="border-radius: .28571429rem;">
{{$.i18n.Tr "repo.modelconvert.download"}}
</a>
{{else}}
@@ -233,7 +233,7 @@
</div>
<div class="ui dropdown selection search eight wide field" id="choice_version">
<input type="hidden" id="ModelVersion" name="ModelVersion" required>
<input type="hidden" id="modelVersion" name="modelVersion" required>
<div class="default text">{{$.i18n.Tr "repo.modelconvert.selectversion"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="model-version">
@@ -246,7 +246,7 @@
<label for="choice_file">{{$.i18n.Tr "repo.model.manage.modelfile"}}</label>
</div>
<div class="ui dropdown selection search eight wide field" id="choice_file">
<input type="hidden" id="ModelFile" name="ModelFile" required>
<input type="hidden" id="modelFile" name="modelFile" required>
<div class="default text">{{$.i18n.Tr "repo.modelconvert.selectmodelfile"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="model-file">
@@ -260,10 +260,10 @@
</div>
<div class="unite min_title inline fields required">
<div class="three wide field right aligned">
<label for="SrcEngine">{{$.i18n.Tr "repo.modelconvert.srcengine"}}</label>
<label for="srcEngine">{{$.i18n.Tr "repo.modelconvert.srcengine"}}</label>
</div>
<select id="SrcEngine" class="ui search dropdown eight wide field" placeholder="" style='color:#000000;' name="SrcEngine" onchange="javascript:srcEngineChanged()">
<select id="srcEngine" class="ui search dropdown eight wide field" placeholder="" style='color:#000000;' name="srcEngine" onchange="javascript:srcEngineChanged()">
</select>
</div>
@@ -289,30 +289,30 @@
<div class="unite min_title inline fields required">
<div class="three wide field right aligned">
<label for="DestFormat">{{$.i18n.Tr "repo.modelconvert.outputformat"}}</label>
<label for="destFormat">{{$.i18n.Tr "repo.modelconvert.outputformat"}}</label>
</div>
<select id="DestFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="DestFormat">
<select id="destFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="destFormat">
</select>
</div>
<div class="unite min_title inline fields">
<div class="three wide field right aligned">
<label for="NetOutputFormat">{{$.i18n.Tr "repo.modelconvert.netoutputdata"}}&nbsp;&nbsp;</label>
<label for="netOutputFormat">{{$.i18n.Tr "repo.modelconvert.netoutputdata"}}&nbsp;&nbsp;</label>
</div>
<select id="NetOutputFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="NetOutputFormat">
<select id="netOutputFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="netOutputFormat">

</select>
</div>
<div class="unite min_title inline fields">
<div class="three wide field right aligned">
<label for="Description">{{$.i18n.Tr "repo.modelconvert.taskdesc"}}&nbsp;&nbsp;</label>
<label for="description">{{$.i18n.Tr "repo.modelconvert.taskdesc"}}&nbsp;&nbsp;</label>
</div>
<div class="twelve wide field">
<textarea id="Description" name="Description" rows="1" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 256)"></textarea>
<textarea id="description" name="description" rows="1" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 256)"></textarea>
</div>
</div>
<div class="unite min_title inline field">
@@ -364,9 +364,9 @@
$("#task_name").removeClass("error")
}

data['desc']= $('#Description').val()
data['modelId'] = $('#ModelVersion').val()
data['SrcEngine'] = $('#SrcEngine').val();
data['desc']= $('#description').val()
data['modelId'] = $('#modelVersion').val()
data['srcEngine'] = $('#srcEngine').val();
data['inputshape']= $('#inputshape').val();

if(inputshapeNotValid(data['inputshape'])){
@@ -379,10 +379,10 @@
}

data['inputdataformat']= $('#inputdataformat').val();
data['DestFormat'] = $('#DestFormat').val();
data['NetOutputFormat']= $('#NetOutputFormat').val();
data['ModelFile'] = $('#ModelFile').val();
if(data['ModelFile']==""){
data['destFormat'] = $('#destFormat').val();
data['netOutputFormat']= $('#netOutputFormat').val();
data['modelFile'] = $('#modelFile').val();
if(data['modelFile']==""){
$('.ui.error.message').text("{{.i18n.Tr "repo.modelconvert.modelfileempty"}}")
$('.ui.error.message').css('display','block')
$("#ModelFile_Div").addClass("error")
@@ -392,11 +392,11 @@
}
$.post(`${repolink}/modelmanage/create_model_convert`,data,(result) => {
console.log("result=" + result);
if(result.result_code ==0){
if(result.code ==0){
$('.ui.modal.second').modal('hide');
window.location.reload();
}else{
$('.ui.error.message').text(result.message)
$('.ui.error.message').text(result.msg)
$('.ui.error.message').css('display','block')
}
})
@@ -456,7 +456,7 @@
$('#choice_version').dropdown({
onChange:function(value){
console.log("model version:" + value);
$('#choice_version input[name="ModelVersion"]').val(value)
$('#choice_version input[name="modelVersion"]').val(value)
loadModelFile(value);
}
})
@@ -464,26 +464,26 @@
$('#choice_file').dropdown({
onChange:function(value){
console.log("model file:" + value);
$('#choice_file input[name="ModelFile"]').val(value)
$('#choice_file input[name="modelFile"]').val(value)
}
})

})

function srcEngineChanged(){
var ele = window.document.getElementById("SrcEngine");
var ele = window.document.getElementById("srcEngine");
var index=ele.selectedIndex;
var options=ele.options;
var option = options[index];
console.log("SrcEngine value=" + option);
console.log("srcEngine value=" + option);
let destFormatHtml = "<option name=\"ONNX\" value=\"0\">ONNX</option>";
let netOutputFormatHtml = "<option name=\"FP32\" value=\"0\">FP32</option>";
if(option==null || option =="undefined" || option.value == 0){
destFormatHtml += "<option name=\"TensorRT\" value=\"1\">TensorRT</option>"
netOutputFormatHtml += "<option name=\"FP16\" value=\"1\">FP16</option>";
}
$('#DestFormat').html(destFormatHtml);
$('#NetOutputFormat').html(netOutputFormatHtml);
$('#destFormat').html(destFormatHtml);
$('#netOutputFormat').html(netOutputFormatHtml);
}
function loadModelList(){
@@ -509,7 +509,7 @@
if(modelId ==null || modelId ==""){
console.log("modelId is null");
}else{
$.get(`${repolink}/modelmanage/query_modelfile_for_predict?ID=${modelId}`, (data) => {
$.get(`${repolink}/modelmanage/query_modelfile_for_predict?id=${modelId}`, (data) => {
const n_length = data.length
let file_html=''
let firstFileName =''
@@ -526,7 +526,7 @@
}
$("#model-file").append(file_html)
$('#choice_file .default.text').text(firstFileName)
$('#choice_file input[name="ModelFile"]').val(firstFileName)
$('#choice_file input[name="modelFile"]').val(firstFileName)
})

}
@@ -550,19 +550,19 @@
n_length = versionList.length
let train_html=''
for (let i=0;i<n_length;i++){
train_html += `<div class="item" data-value="${versionList[i].ID}">${versionList[i].Version}</div>`
train_html += `<div class="item" data-value="${versionList[i].id}">${versionList[i].version}</div>`
train_html += '</div>'
}
$("#model-version").append(train_html)
$('#choice_version .default.text').text(versionList[0].Version)
$('#choice_version input[name="ModelVersion"]').val(versionList[0].ID)
loadModelFile(versionList[0].ID);
$('#choice_version .default.text').text(versionList[0].version)
$('#choice_version input[name="modelVersion"]').val(versionList[0].id)
loadModelFile(versionList[0].id);
}
setEngineValue(value);
}
function setEngineValue(value){
$('#SrcEngine').dropdown('clear');
$('#srcEngine').dropdown('clear');
console.log("setEngineValue value=" + value);
let html = ""
html +="<option name=\"PyTorch\" " + getSelected(0,value) + " value=\"0\">PyTorch</option>";
@@ -570,7 +570,8 @@
html +="<option name=\"MindSpore\" " + getSelected(2,value) + " value=\"2\">MindSpore</option>";
html +="<option name=\"PaddlePaddle\" " + getSelected(4,value) + " value=\"4\">PaddlePaddle</option>";
html +="<option name=\"MXNet\" " + getSelected(6,value) + " value=\"6\">MXNet</option>";
$('#SrcEngine').html(html);

$('#srcEngine').html(html);
srcEngineChanged();
}
function getSelected(engineOption, modelName){
@@ -580,13 +581,13 @@
let nameMap = modelData.nameMap
let versionList = nameMap[modelName]
if(versionList != null && versionList.length >0){
if(versionList[0].Engine == engineOption){
if(versionList[0].engine == engineOption){
return "selected=\"selected\"";
}else{
if((versionList[0].Engine==122 || versionList[0].Engine==37) && engineOption==2){
if((versionList[0].engine==122 || versionList[0].engine==37) && engineOption==2){
return "selected=\"selected\"";
}
if((versionList[0].Engine==121 || versionList[0].Engine==38) && engineOption==1){
if((versionList[0].engine==121 || versionList[0].engine==38) && engineOption==1){
return "selected=\"selected\"";
}
}


+ 10
- 0
templates/repo/modelmanage/create_local_1.tmpl View File

@@ -0,0 +1,10 @@
{{template "base/head" .}}
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-modelmanage-local-create-1.css?v={{MD5 AppVer}}" />
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<div class="ui container">
<div id="__vue-root"></div>
</div>
</div>
<script src="{{StaticUrlPrefix}}/js/vp-modelmanage-local-create-1.js?v={{MD5 AppVer}}"></script>
{{template "base/footer" .}}

+ 11
- 0
templates/repo/modelmanage/create_local_2.tmpl View File

@@ -0,0 +1,11 @@
{{template "base/head" .}}
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-modelmanage-local-create-2.css?v={{MD5 AppVer}}" />
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<script>var MAX_MODEL_SIZE = {{ .max_model_size }};</script>
<div class="ui container">
<div id="__vue-root"></div>
</div>
</div>
<script src="{{StaticUrlPrefix}}/js/vp-modelmanage-local-create-2.js?v={{MD5 AppVer}}"></script>
{{template "base/footer" .}}

+ 581
- 0
templates/repo/modelmanage/create_online.tmpl View File

@@ -0,0 +1,581 @@
{{template "base/head" .}}
<link rel="stylesheet" href="/self/ztree/css/zTreeStyle/zTreeStyle.css" type="text/css">
<style>
#newmodel .header {
height: 45px;
border: 1px solid #d4d4d5;
border-radius: 5px 5px 0 0;
font-size: 14px;
background: #f0f0f0;
display: flex;
align-items: center;
}
#newmodel .content {
margin-top: -1px;
border: 1px solid #d4d4d5;
border-top: none;
}
.inline.fields .right.aligned label{
width: 100% !important;
text-align: right;
}
.inline .ui.dropdown .text {
color: rgba(0, 0, 0, .87) !important;
max-width: 360px;
}
.newtext{
margin-left: 12px !important
}
.menuContent{
position: absolute;
background: #ffffff;
left: 0;
right: 26px;
top: 36px;
z-index:999;
border: 1px solid #96c8da;
border-top: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
box-shadow: 0 2px 3px 0 rgb(34 36 38 / 15%);
}
</style>
<div id="mask">
<div id="loadingPage">
<div class="rect1"></div>
<div class="rect2"></div>
<div class="rect3"></div>
<div class="rect4"></div>
<div class="rect5"></div>
</div>
</div>
{{$repository := .Repository.ID}}
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<div class="ui container">
<div id="newmodel">
<div class="ui second">
<div class="header" style="padding: 1rem;background-color: rgba(240, 240, 240, 100);">
<h4 id="model_header">{{.i18n.Tr "repo.model.manage.import_online_model"}}</h4>
</div>
<div class="content content-padding">
<form id="formId" class="ui form dirty">
<input class="ays-ignore" type="hidden" name="initModel" value="{{$.MODEL_COUNT}}">
<div class="ui error message"></div>
<input class="ays-ignore" type="hidden" name="_csrf" value="">
<div class="inline fields">
<div class="required two wide field right aligned">
<label for="jobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
</div>
<div class="required thirteen wide inline field">
<div class="ui dropdown selection search loading" id="choice_model">
<input class="ays-ignore" type="hidden" id="jobId" name="jobId" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-name">
</div>
</div>
<label for="versionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<span>&nbsp;</span>
<div class="ui dropdown selection search" id="choice_version">
<input class="ays-ignore" type="hidden" id="versionName" name="versionName" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.version"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-version">

</div>
</div>
</div>
</div>
<div class="required inline fields" id="modelname">
<div class="two wide field right aligned">
<label for="name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
</div>
<div class="eight wide field">
<input class="ays-ignore" id="name" name="name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
</div>
<div class="required inline fields" id="verionName" style="display:none;">
<div class="two wide field right aligned">
<label for="version">{{.i18n.Tr "repo.model.manage.version"}}</label>
</div>
<div class="eight wide field">
<input class="ays-ignore" id="version" name="version" value="" readonly required maxlength="255">
</div>
</div>
<div class="unite min_title inline fields required">
<div class="two wide field right aligned">
<label for="Engine">{{.i18n.Tr "repo.model.manage.engine"}}</label>
</div>
<div class="ui ten wide field dropdown selection" id="choice_Engine">
<input class="ays-ignore" type="hidden" id="engine" name="engine" required>
<div class="default text newtext">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">

</div>
</div>
</div>
<div class="unite min_title inline fields required">
<div class="two wide field right aligned">
<label for="modelSelectedFile">{{.i18n.Tr "repo.model.manage.modelfile"}}</label>
</div>
<div class="thirteen wide field" style="position:relative">
<input class="ays-ignore" id="modelSelectedFile" type="text" readonly required onclick="showMenu();" name="modelSelectedFile" >
<div id="menuContent" class="menuContent" style="display:none;">
<ul id="treeDemo" class="ztree"></ul>
</div>
</div>
</div>
<div class="inline fields">
<div class="two wide field right aligned">
<label for="Label">{{.i18n.Tr "repo.model.manage.modellabel"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<input class="ays-ignore" id="label" name="label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
</div>
<div class="inline fields">
<div class="two wide field right aligned">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<textarea id="description" class="ays-ignore" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
onkeyup="this.value=this.value.substring(0, 256)"></textarea>
</div>
</div>
</form>
<div class="inline field" style="margin-left:140px;margin-top:28px;">
<button id="submitId" type="button" class="ui create_train_job green button" onclick="submitSaveModel()"
style="">
{{.i18n.Tr "repo.model.manage.sava_model"}}
</button>
<button style="margin-left:0px;" class="ui button cancel" onclick="backToModelListPage()">{{.i18n.Tr "repo.cloudbrain.cancel"}}</button>
</div>
</div>
</div>
</div>
</div>
</div>
{{template "base/footer" .}}
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js"></script>
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.excheck.js"></script>
<script>
;(function() {
var setting = {
check: {
enable: true,
chkboxType: {"Y":"ps", "N":"ps"}
},
view: {
dblClickExpand: false
},
callback: {
beforeClick: beforeClick,
onCheck: onCheck
}
};

function beforeClick(treeId, treeNode) {
var zTree = $.fn.zTree.getZTreeObj("treeDemo");
zTree.checkNode(treeNode, !treeNode.checked, null, true);
return false;
}
function onCheck(e, treeId, treeNode) {
var zTree = $.fn.zTree.getZTreeObj("treeDemo"),
nodes = zTree.getCheckedNodes(true),
v = "";
for (var i=0, l=nodes.length; i<l; i++) {
if(nodes[i].isParent){
continue;
}
var pathNodes = nodes[i].getPath();
var path ="";
for(var j=0;j<pathNodes.length;j++){
if(j ==0){
path += pathNodes[j].name;
}else{
path += "/" + pathNodes[j].name;
}
}
v += path + ";";
}
if (v.length > 0 ) v = v.substring(0, v.length-1);
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", v);
}
function showMenu() {
var cityObj = $("#modelSelectedFile");
var cityOffset = $("#modelSelectedFile").offset();
// $("#menuContent").css({left:cityOffset.left + "px", top:cityOffset.top + cityObj.outerHeight() + "px"}).slideDown("fast");
$("#menuContent").slideDown("fast");

$("body").bind("mousedown", onBodyDown);
}
window.showMenu = showMenu;

function hideMenu() {
$("#menuContent").fadeOut("fast");
$("body").unbind("mousedown", onBodyDown);
}
function onBodyDown(event) {
if (!(event.target.id == "menuBtn" || event.target.id == "modelSelectedFile" || event.target.id == "menuContent" || $(event.target).parents("#menuContent").length>0)) {
hideMenu();
}
}

$(document).ready(function(){
//$.fn.zTree.init($("#treeDemo"), setting, zNodes);
});
let repolink = {{.RepoLink }}
let repoId = {{ $repository }}
const { _AppSubUrl, _StaticUrlPrefix, csrf } = window.config;
$('input[name="_csrf"]').val(csrf)
let modelData;
function createModelName() {
let repoName = location.pathname.split('/')[2]
let modelName = repoName + '_model_' + Math.random().toString(36).substr(2, 4)
$('#name').val(modelName)
$('#version').val("0.0.1")
}
let dirKey="isOnlyDir--:&";
/*
function showcreate(obj) {
$('.ui.modal.second')
.modal({
centered: false,
onShow: function () {
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
loadTrainList()
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('#choice_model').dropdown('clear')
$('#choice_version').dropdown('clear')
$('#choice_Engine').dropdown('clear')
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')

}
})
.modal('show')
}
*/
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
loadTrainList()

$(function () {
$('#choice_model').dropdown({
onChange: function (value) {
$("#choice_version").addClass("loading")
$('#choice_version').dropdown('clear')
$("#job-version").empty()
loadTrainVersion(value)
}
})

$('#choice_version').dropdown({
onChange: function (value) {
console.log("model version:" + value);
if (modelData != null) {
for (var i = 0; i < modelData.length; i++) {
if (modelData[i].VersionName == value) {
setEngine(modelData[i]);
loadModelFile(modelData[i]);
break;
}
}
}
}
})
});

function versionAdd(version) {
let versionArray = version.split('.')
if (versionArray[2] == '9') {
if (versionArray[1] == '9') {
versionArray[0] = String(Number(versionArray[1]) + 1)
versionArray[1] = '0'
} else {
versionArray[1] = String(Number(versionArray[1]) + 1)
}
versionArray[2] = '0'
} else {
versionArray[2] = String(Number(versionArray[2]) + 1)
}
return versionArray.join('.')
}

function loadTrainList() {
$.get(`${repolink}/modelmanage/query_train_job?repoId=${repoId}`, (data) => {

const n_length = data.length
if(n_length > 0){
let train_html = ''
for (let i = 0; i < n_length; i++) {
train_html += `<div class="item" data-value="${data[i].JobID}">${data[i].DisplayJobName}</div>`
train_html += '</div>'
}
$("#job-name").append(train_html)
$("#choice_model").removeClass("loading")
$('#choice_model .default.text').text(data[0].DisplayJobName)
$('#choice_model input[name="jobId"]').val(data[0].JobID)
loadTrainVersion()
}else{
$("#choice_model").removeClass("loading")
}
})
}

function loadTrainVersion(value) {
let tmp = $('#choice_model input[name="jobId"]').val();
let jobId = !value ? $('#choice_model input[name="jobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?jobId=${jobId}`, (data) => {
const n_length = data.length
let train_html = '';
modelData = data;
for (let i = 0; i < n_length; i++) {
var VersionName = data[i].VersionName || 'V0001';
train_html += `<div class="item" data-value="${VersionName}">${VersionName}</div>`
train_html += '</div>'
}
if (data.length) {
$("#job-version").append(train_html)
$("#choice_version").removeClass("loading")
var versionName = data[0].VersionName;
if (versionName == null || versionName == "") {
versionName = "V0001";
}
$('#choice_version .default.text').text(versionName)
$('#choice_version input[name="versionName"]').val(versionName)
setEngine(data[0])
loadModelFile(data[0])
}

})
}

function loadModelFile(trainJob){
console.log("trainJob=", trainJob);
$('#choice_file').dropdown('clear')
$("#model-file").empty()
if(trainJob ==null || trainJob ==""){
console.log("trainJob is null");
}else{
let type = trainJob.Type;
if(type == 2){
if(trainJob.ComputeResource=="NPU"){
type=1;
}else{
type=0;
}
}
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&versionName=${trainJob.VersionName}`, (data) => {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
const n_length = data.length
let file_html=''
let firstFileName =''
var zNodes=[];
var nodesMap={};
for (let i=0;i<n_length;i++){
var parentNodeMap = nodesMap;
var fileSplits = data[i].FileName.split("/");
for(let j=0;j < fileSplits.length;j++){
if(fileSplits[j] == ""){
break;
}
if(parentNodeMap[fileSplits[j]] == null){
parentNodeMap[fileSplits[j]] = {};
}
parentNodeMap = parentNodeMap[fileSplits[j]];
}
}
for (let i=0;i<n_length;i++){
var parentNodeMap = nodesMap;
var fileSplits = data[i].FileName.split("/");
for(let j=0;j < fileSplits.length;j++){
if(fileSplits[j] == ""){
if(data[i].FileName[data[i].FileName.length -1] =="/"){
if(Object.keys(parentNodeMap).length ==0){
parentNodeMap[dirKey]="true";
}
}
break;
}
parentNodeMap = parentNodeMap[fileSplits[j]];
}
}
convertToNode(zNodes,nodesMap);
$.fn.zTree.init($("#treeDemo"), setting, zNodes);
})
}
}

function convertToNode(nodeList,nodesMap){
var keyList = Object.keys(nodesMap);
keyList.sort(function(a,b){
return a-b;
});
var isFirst = true;
for(var i=0; i<keyList.length;i++){
var node = {};
node["name"] = keyList[i];
nodeList.push(node);
if(nodesMap[keyList[i]] != null && Object.keys(nodesMap[keyList[i]]).length >0){
if(nodesMap[keyList[i]][dirKey] != null){
node["open"] = false;
node["isParent"] = true;
}else{
node["children"]=[];
if(isFirst){
node["open"] = true;
isFirst= false;
}
convertToNode(node["children"],nodesMap[keyList[i]]);
}
}
}
}

function setEngine(trainJob) {
console.log("trainJob=", trainJob);
$('#choice_Engine').dropdown('clear')
$("#job-Engine").empty()
if (trainJob.EngineName != null && trainJob.EngineName != "") {
srcEngine = trainJob.EngineName.split('-')[0]
srcEngine = srcEngine.trim().toLowerCase();
let selectedText = "PyTorch";
let selectedValue = 0;
let itemHtml = "<option class=\"item\" data-value=\"0\">PyTorch</option>";
if (srcEngine == 'tensorflow') {
selectedText = "TensorFlow";
selectedValue = 1;
itemHtml += "<option class=\"active item\" data-value=\"1\">TensorFlow</option>";
} else {
itemHtml += "<option class=\"item\" data-value=\"1\">TensorFlow</option>";
}
if (srcEngine == 'mindspore') {
selectedText = "MindSpore";
selectedValue = 2;
itemHtml += "<option class=\"active item\" data-value=\"2\">MindSpore</option>";
} else {
itemHtml += "<option class=\"item\" data-value=\"2\">MindSpore</option>";
}
itemHtml += "<option class=\"item\" data-value=\"4\">PaddlePaddle</option>"
itemHtml += "<option class=\"item\" data-value=\"5\">OneFlow</option>"
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"

$('#choice_Engine .default.text').text(selectedText)
$('#choice_Engine input[name="engine"]').val(selectedValue)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
} else {
let itemHtml = "<option class=\"active item\" data-value=\"0\">PyTorch</option>";
itemHtml += "<option class=\"item\" data-value=\"1\">TensorFlow</option>"
itemHtml += "<option class=\"item\" data-value=\"2\">MindSpore</option>"
itemHtml += "<option class=\"item\" data-value=\"4\">PaddlePaddle</option>"
itemHtml += "<option class=\"item\" data-value=\"5\">OneFlow</option>"
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="engine"]').val(0)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
}
}

function check() {
let jobid = document.getElementById("jobId").value;
let versionname = document.getElementById("versionName").value;
let name = document.getElementById("name").value;
let version = document.getElementById("version").value;
let modelSelectedFile = document.getElementById("modelSelectedFile").value;
if (name == "") {
$("#modelname").closest('.required').addClass("error");
return false;
} else {
$("#modelname").closest('.required').removeClass("error");
}
if (versionname == "") {
$("#verionname").closest('.required').addClass("error");
return false;
} else {
$("#verionname").closest('.required').removeClass("error");
}
if (jobid == "") {
$("#jobId").closest('.required').addClass("error");
return false;
} else {
$("#jobId").closest('.required').removeClass("error");
}
if (modelSelectedFile == "") {
$("#modelSelectedFile").closest('.required').addClass("error");
return false;
} else {
$("#modelSelectedFile").closest('.required').removeClass("error");
}
if (versionname == "") {
$("#versionName").closest('.required').addClass("error");
return false;
} else {
$("#versionName").closest('.required').removeClass("error");
}
return true;
}

function submitSaveModel() {
let flag = check();
if (!flag) return false;
$(".ui.error.message").hide();
let cName = $("input[name='name']").val();
let version = $("input[name='version']").val();
let data = $("#formId").serialize();
const initModel = $("input[name='initModel']").val();
let url_href = location.href.split("create_online_model")[0] + 'create_new_model';
$("#mask").css({ display: "block", "z-index": "9999" });
$.ajax({
url: url_href,
type: "POST",
data: data,
success: function (res) {
backToModelListPage();
},
error: function (xhr) {
// 隐藏 loading
// 只有请求不正常(状态码不为200)才会执行
$(".ui.error.message").text(xhr.responseText);
$(".ui.error.message").show();
},
complete: function (xhr) {
$("#mask").css({ display: "none", "z-index": "1" });
},
});
}

function backToModelListPage() {
let url_href = location.href.split("create_online_model")[0] + 'show_model';
window.location.href = url_href;
}
window.submitSaveModel = submitSaveModel;
window.backToModelListPage = backToModelListPage;
})();
</script>

+ 53
- 28
templates/repo/modelmanage/index.tmpl View File

@@ -25,6 +25,23 @@
border-bottom-left-radius: 4px;
box-shadow: 0 2px 3px 0 rgb(34 36 38 / 15%);
}
.m-blue-btn {
background-color: rgb(22, 132, 252) !important;
}
.m-blue-btn:hover {
background-color: #66b1ff !important;
color: #fff;
}

.m-blue-btn:focus {
background-color: #66b1ff !important;
color: #fff;
}

.m-blue-btn:active {
background-color: #3a8ee6 !important;
color: #fff;
}
</style>
<link rel="stylesheet" href="/self/ztree/css/zTreeStyle/zTreeStyle.css" type="text/css">

@@ -57,8 +74,10 @@
</div>
<div class="column right aligned">
<!-- -->
<a class="ui button {{if .Permission.CanWrite $.UnitTypeModelManage}} blue m-blue-btn {{else}} disabled {{end}}"
href="{{.RepoLink}}/modelmanage/create_local_model_1">{{$.i18n.Tr "repo.model.manage.import_local_model"}}</a>
<a class="ui button {{if .Permission.CanWrite $.UnitTypeModelManage}} green {{else}} disabled {{end}}"
onclick="showcreate(this)">{{$.i18n.Tr "repo.model.manage.import_new_model"}}</a>
href="{{.RepoLink}}/modelmanage/create_online_model">{{$.i18n.Tr "repo.model.manage.import_online_model"}}</a>
</div>
</div>
{{if eq $.MODEL_COUNT 0}}
@@ -66,6 +85,7 @@
<div class="ui icon header bgtask-header-pic"></div>
<div class="bgtask-content-header">{{$.i18n.Tr "repo.model.manage.notcreatemodel"}}</div>
<div class="bgtask-content">
<!--
{{if $.RepoIsEmpty}}
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.model.manage.init1"}}<a href="{{.RepoLink}}">{{$.i18n.Tr "repo.model.manage.init2"}}</a></div>
{{end}}
@@ -73,6 +93,8 @@
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.model.manage.createtrainjob_tip"}}<a
href="{{.RepoLink}}/modelarts/train-job">&nbsp;{{$.i18n.Tr "repo.model.manage.createtrainjob"}}</a></div>
{{end}}
-->
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.model.manage.createmodel_tip"}}<a href="{{.RepoLink}}/modelarts/train-job">&nbsp;{{$.i18n.Tr "repo.model.manage.createtrainjob"}}</a></div>
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.platform_instructions1"}}<a href="https://git.openi.org.cn/zeizei/OpenI_Learning">&nbsp;{{$.i18n.Tr "repo.platform_instructions2"}}&nbsp;</a>{{$.i18n.Tr "repo.platform_instructions3"}}</div>

</div>
@@ -138,20 +160,20 @@
<input type="hidden" name="_csrf" value="">
<div class="inline fields">
<div class="required two wide field right aligned">
<label for="JobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
<label for="jobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
</div>
<div class="required thirteen wide inline field">
<div class="ui dropdown selection search loading" id="choice_model">
<input type="hidden" id="JobId" name="JobId" required>
<input type="hidden" id="jobId" name="jobId" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-name">
</div>
</div>
<label for="VersionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<label for="versionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<span>&nbsp;</span>
<div class="ui dropdown selection search" id="choice_version">
<input type="hidden" id="VersionName" name="VersionName" required>
<input type="hidden" id="versionName" name="versionName" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.version"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-version">
@@ -162,18 +184,18 @@
</div>
<div class="required inline fields" id="modelname">
<div class="two wide field right aligned">
<label for="Name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<label for="name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
</div>
<div class="eight wide field">
<input id="name" name="Name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
<input id="name" name="name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
</div>
<div class="required inline fields" id="verionname">
<div class="required inline fields" id="verionName">
<div class="two wide field right aligned">
<label for="Version">{{.i18n.Tr "repo.model.manage.version"}}</label>
<label for="version">{{.i18n.Tr "repo.model.manage.version"}}</label>
</div>
<div class="eight wide field">
<input id="version" name="Version" value="" readonly required maxlength="255">
<input id="version" name="version" value="" readonly required maxlength="255">
</div>
</div>

@@ -182,7 +204,7 @@
<label for="Engine">{{.i18n.Tr "repo.model.manage.engine"}}</label>
</div>
<div class="ui ten wide field dropdown selection search" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text newtext">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -209,7 +231,7 @@
<label for="Label">{{.i18n.Tr "repo.model.manage.modellabel"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<input id="label" name="Label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
<input id="label" name="label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
</div>
<div class="inline fields">
@@ -217,7 +239,7 @@
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<textarea id="Description" name="Description" rows="3"
<textarea id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -331,7 +353,7 @@
centered: false,
onShow: function () {
$('#model_header').text({{.i18n.Tr "repo.model.manage.import_new_model"}})
$('input[name="Version"]').addClass('model_disabled')
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
@@ -368,7 +390,7 @@
console.log("model version:" + value);
if (modelData != null) {
for (var i = 0; i < modelData.length; i++) {
if (modelData[i].VersionName == value) {
if (modelData[i].versionName == value) {
setEngine(modelData[i])
loadModelFile(modelData[i])
break;
@@ -406,7 +428,7 @@
$("#job-name").append(train_html)
$("#choice_model").removeClass("loading")
$('#choice_model .default.text').text(data[0].DisplayJobName)
$('#choice_model input[name="JobId"]').val(data[0].JobID)
$('#choice_model input[name="jobId"]').val(data[0].JobID)
loadTrainVersion()
}else{
$("#choice_model").removeClass("loading")
@@ -414,13 +436,15 @@
})
}
function loadTrainVersion(value) {
let JobID = !value ? $('#choice_model input[name="JobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?JobID=${JobID}`, (data) => {
let tmp = $('#choice_model input[name="jobId"]').val();
let jobId = !value ? $('#choice_model input[name="jobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?jobId=${jobId}`, (data) => {
const n_length = data.length
let train_html = '';
modelData = data;
for (let i = 0; i < n_length; i++) {
train_html += `<div class="item" data-value="${data[i].VersionName}">${data[i].VersionName}</div>`
var VersionName = data[i].VersionName || 'V0001';
train_html += `<div class="item" data-value="${VersionName}">${VersionName}</div>`
train_html += '</div>'
}
if (data.length) {
@@ -431,7 +455,7 @@
versionName = "V0001";
}
$('#choice_version .default.text').text(versionName)
$('#choice_version input[name="VersionName"]').val(versionName)
$('#choice_version input[name="versionName"]').val(versionName)
setEngine(data[0])
loadModelFile(data[0])
}
@@ -453,7 +477,9 @@
type=0;
}
}
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&VersionName=${trainJob.VersionName}`, (data) => {
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&versionName=${trainJob.VersionName}`, (data) => {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
const n_length = data.length
let file_html=''
let firstFileName =''
@@ -518,12 +544,12 @@
}
}
}
function setEngine(modelVersion) {
console.log("modelVersion=" + modelVersion);
function setEngine(trainJob) {
console.log("trainJob=" + trainJob);
$('#choice_Engine').dropdown('clear')
$("#job-Engine").empty()
if (modelVersion.EngineName != null && modelVersion.EngineName != "") {
srcEngine = modelVersion.EngineName.split('-')[0]
if (trainJob.EngineName != null && trainJob.EngineName != "") {
srcEngine = trainJob.EngineName.split('-')[0]
srcEngine = srcEngine.trim().toLowerCase();
let selectedText = "PyTorch";
let selectedValue = 0;
@@ -548,7 +574,7 @@
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"

$('#choice_Engine .default.text').text(selectedText)
$('#choice_Engine input[name="Engine"]').val(selectedValue)
$('#choice_Engine input[name="engine"]').val(selectedValue)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
} else {
@@ -560,10 +586,9 @@
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
}
}
</script>

+ 5
- 530
templates/repo/modelmanage/showinfo.tmpl View File

@@ -1,535 +1,10 @@
{{template "base/head" .}}
<div class="repository">
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-modelmanage-common-detail.css?v={{MD5 AppVer}}" />
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<style>
.model_header_text{
font-size: 14px;
color: #101010;
font-weight: bold;
}
.ti_form{
text-align: left;
max-width: 100%;
vertical-align: middle;
}
.ti-text-form-label {
padding-bottom: 20px;
padding-right: 20px;
color: #8a8e99;
font-size: 14px;
white-space: nowrap !important;
width: 80px;
line-height: 30px;
}
.ti-text-form-content {
line-height: 30px;
padding-bottom: 20px;
width: 100%;
}
.change-version{
min-width: auto !important;
border: 1px solid rgba(187, 187, 187, 100) !important;
border-radius: .38571429rem !important;
margin-left: 1.5em;
}
.title-word-elipsis{
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
width: 30%;
}
.word-elipsis{
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
padding-right: 80px;
}
.half-table{
width: 50%;
float: left;
}
.text-width80 {
width: 100px;
line-height: 30px;
}
.tableStyle{
width:100%;
table-layout: fixed;
}
.iword-elipsis{
display: inline-block;
width: 80%;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
</style>
<div class="ui container">
<h4 class="ui header" id="vertical-segment">
<!-- <a href="javascript:window.history.back();"><i class="arrow left icon"></i>返回</a> -->
<div class="ui breadcrumb">
<a class="section" href="{{$.RepoLink}}/modelmanage/show_model">
{{$.i18n.Tr "repo.model.manage.model_manage"}}
</a>
<div class="divider"> / </div>
<div class="active section">{{.name}}</div>
</div>
<select class="ui dropdown tiny change-version" id="dropdown" onchange="changeInfo(this.value)">
</select>
</h4>
<div id="showInfo" style="border:1px solid #e2e2e2;padding: 20px 60px;margin-top:24px">
<div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);">
<a class="active item" data-tab="first">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item" data-tab="second">{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first">
<div class="half-table">
<span class="model_header_text">{{$.i18n.Tr "repo.model.manage.baseinfo"}}</span>
<table class="tableStyle" style="margin-top:20px;">
<tbody>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.model_name"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="ModelName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.version"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Version" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.migrate_items_labels"}}</td>
<td class="ti-text-form-content">
<div id="Label" style="overflow: hidden;width: 95%;">
</div>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.model_size"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Size" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.createtime"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="CreateTime" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.description"}}</td>
<td class="ti-text-form-content" >
<div id="edit-td" style="display:flex">
<span id="Description" title="" class="iword-elipsis"></span>
<i id="edit-pencil" data-id="" data-desc="" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></i>
</div>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job"}}</td>
<td class="ti-text-form-content word-elipsis">
<a id="DisplayJobNameHref" class="title" style="font-size: 14px;" target="_blank">
<span id="DisplayJobName" class="fitted" style="width: 90%;vertical-align: middle;"></span>
</a>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.code_version"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="CodeBranch" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.start_file"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="BootFile" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.train_dataset"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="DatasetName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Parameters" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.AI_Engine"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="EngineName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.standard"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="FlavorName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.compute_node"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="WorkServerNumber" title=""></span></td>
</tr>
</tbody>
</table>
</div>
<div class="half-table">
<span class="model_header_text">{{$.i18n.Tr "repo.model.manage.model_accuracy"}}</span>
<table class="tableStyle" style="margin-top:20px;">
<tbody>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.Accuracy"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Accuracy" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">F1</td>
<td class="ti-text-form-content word-elipsis"><span id="F1" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.Precision"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Precision" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.Recall"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Recall" title=""></span></td>
</tr>
</tbody>
</table>
</div>
<div style="clear: both;"></div>
</div>
<div class="ui tab" data-tab="second">
<input type="hidden" name="model" value="-1">
<input type="hidden" name="modelback" value="-1">
<div class='ui breadcrumb model_file_bread' id='file_breadcrumb'>
<div class="active section"></div>
<div class="divider"> / </div>
</div>
<div id="dir_list">
</div>
</div>
</div>
</div>
<div id="__vue-root"></div>
</div>
</div>
<script src="{{StaticUrlPrefix}}/js/vp-modelmanage-common-detail.js?v={{MD5 AppVer}}"></script>
{{template "base/footer" .}}
<script>
let url = location.href.split('show_model')[0]
let trainJobUrl =url.split('modelmanage')[0]
let ID = location.search.split('?name=').pop()
$(document).ready(function(){
$('.secondary.menu .item').tab();
});
$(document).ready(loadInfo);
function changeInfo(version){
$.get(`${url}show_model_info_api?name=${ID}`,(data)=>{
let versionData = data.filter((item)=>{
return item.Version === version
})
let returnArray = []
returnArray = transObj(versionData)
let [initObj,initModelAcc,id] = returnArray
editorCancel('','')
renderInfo(initObj,initModelAcc,id)
loadModelFile(versionData[0].ID,versionData[0].Version,'','','init')
})
}
function loadInfo(){
$.get(`${url}show_model_info_api?name=${ID}`,(data)=>{
let html = ''
for (let i=0;i<data.length;i++){
if(!data[i].IsCanOper){
$("#edit-pencil").css("display","none")
}
html += `<option value="${data[i].Version}">${data[i].Version}</option>`
}
$('#dropdown').append(html)
let returnArray = []
returnArray = transObj(data)
let [initObj,initModelAcc,id] = returnArray
renderInfo(initObj,initModelAcc,id)
loadModelFile(data[0].ID,data[0].Version,'','','init')
})
}
function getEngineName(model){
if(model.Engine == 0){
return "PyTorch";
}else if(model.Engine == 1 || model.Engine == 121 || model.Engine == 38){
return "TensorFlow";
}else if(model.Engine == 2 || model.Engine == 122 || model.Engine == 35 || model.Engine == 37){
return "MindSpore";
}else if(model.Engine == 3){
return "Other";
}else if(model.Engine == 4){
return "PaddlePaddle";
}else if(model.Engine == 5){
return "OneFlow";
}else if(model.Engine == 6){
return "MXNet";
}
else{
return "Other"
}
}
function transObj(data){
let {ID,Name,Version,Label,Size,Description,CreatedUnix,Accuracy,CodeBranch,CodeCommitID,TrainTaskInfo} = data[0]
let modelAcc = JSON.parse(Accuracy)
TrainTaskInfo = JSON.parse(TrainTaskInfo)
// Parameters = JSON.parse(Parameters)
let {Parameters} = TrainTaskInfo
let EngineName = getEngineName(data[0])
Parameters = JSON.parse(Parameters)
Parameters = Parameters.parameter.length === 0 ? '--':Parameters.parameter
let size = tranSize(Size)
let time = transTime(CreatedUnix)
let initObj = {
ModelName:Name || '--',
Version:Version,
Label:Label || '--',
Size:size,
CreateTime:time,
Description:Description || '--',
CodeBranch:CodeBranch || '--',
CodeCommitID:CodeCommitID || '--',
BootFile:TrainTaskInfo.BootFile || '--',
DatasetName:TrainTaskInfo.DatasetName || '--',
Parameters:TrainTaskInfo.Parameters || '--',
FlavorName:TrainTaskInfo.FlavorName || '--',
WorkServerNumber:TrainTaskInfo.WorkServerNumber || '1',
Parameters:Parameters,
EngineName:EngineName,
DisplayJobName:TrainTaskInfo.DisplayJobName || '--',
TrainJobVersionName:TrainTaskInfo.VersionName || '',
CloudBrainJobID:TrainTaskInfo.JobID|| '',
CloudBrainType:TrainTaskInfo.Type,
}
let initModelAcc = {
Accuracy: modelAcc.Accuracy || '--',
F1: modelAcc.F1 || '--',
Precision:modelAcc.Precision || '--',
Recall: modelAcc.Recall || '--'
}
return [initObj,initModelAcc,ID]
}
function transTime(time){
let date = new Date(time * 1000);//时间戳为10位需*1000,时间戳为13位的话不需乘1000
let Y = date.getFullYear() + '-';
let M = (date.getMonth()+1 < 10 ? '0'+(date.getMonth()+1):date.getMonth()+1) + '-';
let D = (date.getDate()< 10 ? '0'+date.getDate():date.getDate())+ ' ';
let h = (date.getHours() < 10 ? '0'+date.getHours():date.getHours())+ ':';
let m = (date.getMinutes() < 10 ? '0'+date.getMinutes():date.getMinutes()) + ':';
let s = date.getSeconds() < 10 ? '0'+date.getSeconds():date.getSeconds();
return Y+M+D+h+m+s;
}
function tranSize(value){
if(null==value||value==''){
return "0 Bytes";
}
var unitArr = new Array("Bytes","KB","MB","GB","TB","PB","EB","ZB","YB");
var index=0;
var srcsize = parseFloat(value);
index=Math.floor(Math.log(srcsize)/Math.log(1024));
var size =srcsize/Math.pow(1024,index);
size=size.toFixed(2);//保留的小数位数
return size+unitArr[index];
}
function editorFn(context){
let id= context.dataset.id
let text = context.dataset.desc
let textValue = text.replace(/enter;/g,'\r\n')
$('#edit-td').replaceWith(`<div id='edit-div' style='width:80%;display: inline-block;'><textarea id='textarea-value' value='' rows='3' maxlength='255' style='width:80%;white-space: nowrap;' id='edit-text'>${textValue}</textarea><i class='check icon' style='color: #50d4ab;' onclick='editorSure("${text}","${id}")'></i><i class='times icon' style='color: #f66f6a;' onclick='editorCancel("${text}","${id}")'></i></div>`);
}
function editorCancel(text,id){
let objkey = text.replace(/enter;/g,'\r\n')
$('#edit-div').replaceWith(`<div id="edit-td" style="display:flex;"><span id="Description" title="${objkey}" class="iword-elipsis">${objkey}</span><i id="edit-pencil" data-id="${id}" data-desc="${text}" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></div>`)
}
function editorSure(text,id){
let description=$('#textarea-value').val()
let sourcetext = $('#textarea-value').val().replace(/\n/g,'enter;')
let data = {
ID:id,
Description:description
}
$.ajax({
url:`${url}modify_model`,
type:'PUT',
data:data
}).done((res)=>{
$('#edit-div').replaceWith(`<div id="edit-td" style="display:flex;"><span id="Description" title="${description}" class="iword-elipsis">${description}</span><i id="edit-pencil" data-id="${id}" data-desc="${sourcetext}" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></div>`)
})
}
function renderInfo(obj,accObj,id){
for(let key in obj){
if(key==="Description"){
let descriptionText=obj[key].replace(/\r\n|\n/g,'enter;')
$(`#${key}`).text(obj[key])
$(`#${key}`).attr("title",obj[key])
$('#edit-pencil').attr("data-id",id)
$('#edit-pencil').attr("data-desc",descriptionText)
}
else if(key==="Label"){
$('#Label').empty()
if(obj[key]==='--'){
$('#Label').text(obj[key])
}else{
let labelArray = obj[key].trim().replace(/ +/g,' ').split(' ')
let html=''
for(let i=0;i<labelArray.length;i++){
html += `<a class="ui label" title="${labelArray[i]}">${labelArray[i]}</a>`
}
$('#Label').append(html)
}
}
else if(key==="CodeCommitID"){
let codeCommit = obj[key].slice(0,10)
let html = `<a style="margin-left:1rem" class="ui label" title="${codeCommit}">${codeCommit}</a>`
$('#CodeBranch').append(html)

}
else if(key==="DisplayJobName"){
let type=obj["CloudBrainType"]
let href=""
if(type==1){
href=trainJobUrl + "modelarts/train-job/" + obj["CloudBrainJobID"]
}else if(type==0){
href=trainJobUrl + "cloudbrain/train-job/" + obj["CloudBrainJobID"]
}else if(type==2){
href=trainJobUrl + "grampus/train-job/" + obj["CloudBrainJobID"]
}
$(`#DisplayJobNameHref`).attr("href",href)
$(`#DisplayJobNameHref`).attr("title",obj[key])
$(`#${key}`).text(obj[key])

let versionName = obj["TrainJobVersionName"]
if(versionName!=""){
let html = `<span style="margin-left:1rem" class="ui label">${versionName}</span>`
$('#DisplayJobName').append(html)
}
}
else if(key==="Parameters"){
if(obj[key]==='--'){
$(`#${key}`).text(obj[key])
}else{
const parameterArray = obj[key].map(element => {
let labelValue = `${element.label}=${element.value}`
return labelValue
});
const parameter = parameterArray.join('; ')
$(`#${key}`).text(parameter)
$(`#${key}`).attr("title",parameter)
}
}
else{
$(`#${key}`).text(obj[key])
$(`#${key}`).attr("title",obj[key])
}
}
for(let key in accObj){
$(`#${key}`).text(accObj[key])
$(`#${key}`).attr("title",accObj[key])
}
}

function loadModelFile(ID,version_name,parents,filename,init){
$.get(`${url}query_onelevel_modelfile?ID=${ID}&parentDir=${parents}`, (data) => {
$('#dir_list').empty()
renderDir(data,ID,version_name)
if(init==="init"){
$('input[name=model]').val("")
$('input[name=modelback]').val(version_name)
$('#file_breadcrumb').empty()
let htmlBread = ""
htmlBread += `<div class='active section'>${version_name}</div>`
htmlBread += "<div class='divider'> / </div>"
$('#file_breadcrumb').append(htmlBread)
}else{
renderBrend(ID,version_name,parents,filename,init)
}
})
}
function renderSize(value){
if(null==value||value==''){
return "0 Bytes";
}
var unitArr = new Array("Bytes","KB","MB","GB","TB","PB","EB","ZB","YB");
var index=0;
var srcsize = parseFloat(value);
index=Math.floor(Math.log(srcsize)/Math.log(1024));
var size =srcsize/Math.pow(1024,index);
size=size.toFixed(2);//保留的小数位数
return size+unitArr[index];
}

function renderBrend(ID,version_name,parents,filename,init){
if(init=="folder"){
let htmlBrend = ""
let sectionName=$('#file_breadcrumb .active.section').text()
let parents1 = $('input[name=model]').val()
let filename1 = $('input[name=modelback]').val()
if(parents1===""){
$('#file_breadcrumb .active.section').replaceWith(`<a class='section' onclick="loadModelFile('${ID}','${version_name}','${parents1}','','init')">${sectionName}</a>`)
}else{
$('#file_breadcrumb .active.section').replaceWith(`<a class='section' onclick="loadModelFile('${ID}','${version_name}','${parents1}','${filename1}')">${sectionName}</a>`)
}
htmlBrend += `<div class='active section'>${filename}</div>`
htmlBrend += "<div class='divider'> / </div>"
$('#file_breadcrumb').append(htmlBrend)
$('input[name=model]').val(parents)
$('input[name=modelback]').val(filename)
}else{
$('input[name=model]').val(parents)
$('input[name=modelback]').val(filename)
let selectEle = $('#file_breadcrumb a.section').filter(
(index, item) => {
return item.text == filename;
}
);
selectEle.nextAll().remove();
selectEle.after("<div class='divider'> / </div>");
selectEle.replaceWith(`<div class='active section'>${filename}</div>`);
}
}
function renderDir(data,ID,version_name){
let html=""
html += "<div class='ui grid' style='margin:0;'>"
html += "<div class='row' style='padding: 0;'>"
html += "<div class='ui sixteen wide column' style='padding:1rem;'>"
html += "<div class='dir list'>"
html += "<table id='repo-files-table' class='ui single line table pad20'>"
html += '<tbody>'
for(let i=0;i<data.length;i++){
let dirs_size = renderSize(data[i].Size)
html += "<tr>"
html += "<td class='name six wid'>"
html += "<span class='truncate'>"
html += "<span class='octicon octicon-file-directory'>"
html += "</span>"
if(data[i].IsDir){
html += `<a onclick="loadModelFile('${ID}','${version_name}','${data[i].ParenDir}','${data[i].FileName}','folder')">`
html += "<span class='fitted'><i class='folder icon' width='16' height='16' aria-hidden='true'></i>" + data[i].FileName + "</span>"
}else{
html += `<a href="${url}${ID}/downloadsingle?parentDir=${data[i].ParenDir}&fileName=${data[i].FileName}">`
html += "<span class='fitted'><i class='file icon' width='16' height='16' aria-hidden='true'></i>" + data[i].FileName + "</span>"
}
html += '</a>'
html += "</span>"
html += "</td>"
html += "<td class='message seven wide'>"
if(data[i].IsDir){
html += "<span class='truncate has-emoji'></span>"
}else{
html += "<span class='truncate has-emoji'>"+ `${dirs_size}` + "</span>"
}
html += "</td>"

html += "<td class='text right age three wide'>"
html += "<span class='truncate has-emoji'>" + data[i].ModTime + "</span>"
html += "</td>"
html += "</tr>"
}
html += "</tbody>"
html += "</table>"
html += "</div>"
html += "</div>"
html += "</div>"
html += "</div>"
$('#dir_list').append(html)
}
</script>

+ 3
- 1
templates/repo/modelsafety/show.tmpl View File

@@ -861,8 +861,10 @@
$('td.ti-text-form-content.spec div').text(specStr);
SPEC && $('td.ti-text-form-content.resorce_type div').text(getListValueWithKey(ACC_CARD_TYPE, SPEC.AccCardType));
}
var oLogHref = $('#-log-down').attr('href');
var repoPath = {{$.RepoRelPath}};
var oLogHref = `/api/v1/repos/${repoPath}/cloudbrain`;
$('#-log-down').attr('href', oLogHref + `/${res.ID}/download_log_file`);
$('.full-log-dialog').attr('data-href', oLogHref + `/${res.ID}/download_log_file`);
if (res.ResultJson) {
try {
resultData = JSON.parse(res.ResultJson);


+ 2
- 1
templates/repo/view_file.tmpl View File

@@ -1,4 +1,5 @@
<div class="{{TabSizeClass .Editorconfig .FileName}} non-diff-file-content">

<div class="{{TabSizeClass .Editorconfig .FileName}} non-diff-file-content gallery">
<h4 class="file-header ui top attached header">
<div class="file-header-left">
{{if .ReadmeInList}}


+ 7
- 7
templates/user/dashboard/cloudbrains.tmpl View File

@@ -65,7 +65,7 @@
</div>
</div>
</div>
{{range .Tasks}}
{{range .Tasks}}
{{if .Repo}}
<div class="ui grid stackable item">
<div class="row">
@@ -154,7 +154,7 @@
<!-- 智算中心 -->
<div class="one wide column text center nowrap" style="width:8% !important;">
<span style="font-size: 12px;" class="aicenter_{{.DisplayJobName}}_{{$JobID}}">{{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}</span>
<span style="font-size: 12px;" class="aicenter_{{.DisplayJobName}}_{{$JobID}}" title="{{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}">{{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}</span>
</div>
<!-- XPU类型 -->
<div class="one wide column text center nowrap" style="width:10% !important;">
@@ -168,16 +168,16 @@
spanEl.setAttribute('title', cardType);
spanEl.innerText = cardType;
var cluster = spec.Cluster || '--';
var cluster = {{.Cluster}} || '--';
var clusterName = document.querySelector('.cloudbrain_debug').dataset['cluster' + cluster[0] + cluster.toLocaleLowerCase().slice(1)] || '--';
spanEl = document.querySelector('.cluster_{{.DisplayJobName}}_{{$JobID}}');
spanEl.setAttribute('title', cluster);
spanEl.innerText = clusterName;

var aiCenter = spec.AiCenterName || '--';
spanEl = document.querySelector('.aicenter_{{.DisplayJobName}}_{{$JobID}}');
spanEl.setAttribute('title', aiCenter);
spanEl.innerText = aiCenter;
// var aiCenter = spec.AiCenterName || '--';
// spanEl = document.querySelector('.aicenter_{{.DisplayJobName}}_{{$JobID}}');
// spanEl.setAttribute('title', aiCenter);
// spanEl.innerText = aiCenter;
})();
</script>
<!-- 项目 -->


+ 1
- 1
templates/user/dashboard/navbar.tmpl View File

@@ -12,7 +12,7 @@
{{.i18n.Tr "home.switch_dashboard_context"}}
</div>
<div class="scrolling menu items">
<a class="{{if eq .ContextUser.ID .SignedUser.ID}}active selected{{end}} item" href="{{AppSubUrl}}/{{if .PageIsIssues}}issues{{else if .PageIsPulls}}pulls{{else if .PageIsMilestonesDashboard}}milestones{{end}}">
<a class="{{if eq .ContextUser.ID .SignedUser.ID}}active selected{{end}} item" href="{{AppSubUrl}}/{{if .PageIsIssues}}issues{{else if .PageIsPulls}}pulls{{else if .PageIsMilestonesDashboard}}milestones{{else}}dashboard{{end}}">
<img class="ui avatar image" src="{{.SignedUser.RelAvatarLink}}" width="28" height="28">
{{.SignedUser.Name}}
</a>


+ 124
- 91
web_src/js/components/Model.vue View File

@@ -13,117 +13,118 @@
:header-cell-style="tableHeaderStyle"
>
<el-table-column
prop="Name"
prop="name"
:label="i18n.model_name"
align="left"
min-width="17%"
min-width="20%"
>
<template slot-scope="scope">
<div class="expand-icon" v-if="scope.row.hasChildren === false">
<i class="el-icon-arrow-right"></i>
</div>
<!-- <i class="el-icon-time"></i> -->
<span v-if="!scope.row.Children" :class="scope.row.modelType == '1' ? 'm-local' : 'm-online'">{{ scope.row.modelType == '1' ? i18n.local : i18n.online }}</span>
<a
class="text-over"
:href="showinfoHref + scope.row.Name"
:title="scope.row.Name"
>{{ scope.row.Name }}</a
:href="showinfoHref + encodeURIComponent(scope.row.name)"
:title="scope.row.name"
>{{ scope.row.name }}</a
>
</template>
</el-table-column>
<el-table-column
prop="Status"
prop="status"
:label="i18n.model_status"
align="center"
min-width="6.5%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.Status_title">
<i style="vertical-align: middle" :class="scope.row.Status"></i
<span class="text-over" :title="scope.row.status_title">
<i style="vertical-align: middle" :class="scope.row.status"></i
></span>
</template>
</el-table-column>
<el-table-column
prop="Version"
prop="version"
:label="i18n.model_version"
align="center"
min-width="6%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.Version">{{
scope.row.Version
<span class="text-over" :title="scope.row.version">{{
scope.row.version
}}</span>
</template>
</el-table-column>
<el-table-column
prop="VersionCount"
prop="versionCount"
:label="i18n.model_version_num"
align="center"
min-width="7%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.VersionCount">{{
scope.row.VersionCount
<span class="text-over" :title="scope.row.versionCount">{{
scope.row.versionCount
}}</span>
</template>
</el-table-column>

<el-table-column
prop="Size"
prop="size"
:label="i18n.model_size"
align="center"
min-width="10%"
>
<template slot-scope="scope">
<span class="text-over">{{ renderSize(scope.row.Size) }}</span>
<span class="text-over">{{ renderSize(scope.row.size) }}</span>
</template>
</el-table-column>
<el-table-column
prop="EngineName"
prop="engineName"
:label="i18n.model_egine"
align="center"
min-width="8%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.EngineName">{{
scope.row.EngineName
<span class="text-over" :title="scope.row.engineName">{{
scope.row.engineName
}}</span>
</template>
</el-table-column>
<el-table-column
prop="ComputeResource"
prop="computeResource"
:label="i18n.model_compute_resource"
align="center"
min-width="8%"
>
<template slot-scope="scope">
<span class="text-over">{{ scope.row.ComputeResource }}</span>
<span class="text-over">{{ scope.row.computeResource }}</span>
</template>
</el-table-column>
<el-table-column
prop="CreatedUnix"
prop="createdUnix"
:label="i18n.model_create_time"
align="center"
min-width="13.75%"
>
<template slot-scope="scope">
{{ transTime(scope.row.CreatedUnix) }}
{{ transTime(scope.row.createdUnix) }}
</template>
</el-table-column>
<el-table-column
prop="UserName"
prop="userName"
:label="i18n.model_creator"
align="center"
min-width="6.75%"
>
<template slot-scope="scope">
<a
:href="!scope.row.UserName ? '#' : '/' + scope.row.UserName"
:title="scope.row.UserName || defaultAvatarName"
:href="!scope.row.userName ? '#' : '/' + scope.row.userName"
:title="scope.row.userName || defaultAvatarName"
>
<img
class="ui avatar image"
:src="scope.row.UserRelAvatarLink || defaultAvatar"
:src="scope.row.userRelAvatarLink || defaultAvatar"
/>
</a>
</template>
@@ -131,37 +132,41 @@

<el-table-column
:label="i18n.model_operation"
min-width="17%"
min-width="15%"
align="center"
>
<template slot-scope="scope">
<div class="space-around">
<a
<div class="space-around" >
<!--<a
:style="{
visibility: !scope.row.Children ? 'visible' : 'hidden',
}"
:class="{ disabled: !scope.row.IsCanOper }"
:class="{ disabled: !scope.row.isCanOper }"
@click="
showcreateVue(
scope.row.Name,
scope.row.Version,
scope.row.Label
scope.row.name,
scope.row.version,
scope.row.label
)
"
>{{ i18n.model_create_new_ver }}</a
>
<a
:href="loadhref + scope.row.ID"
:class="{ disabled: !scope.row.IsCanOper }"
>{{ i18n.model_download }}</a
>
<a
:class="{ disabled: !scope.row.IsCanDelete }"
>-->
<a class="op-btn"
v-show="scope.row.modelType == 1"
:href="url + 'create_local_model_1?type=1&name=' + encodeURIComponent(scope.row.name) + '&id=' + scope.row.id"
:class="{ disabled: !scope.row.isCanOper }"
>{{ i18n.modify }}</a>
<a class="op-btn" v-show="scope.row.modelType != 1" style="color:transparent;cursor:default;" >{{ i18n.modify }}</a>
<a class="op-btn"
:href="loadhref + scope.row.id"
:class="{ disabled: !scope.row.isCanOper }"
>{{ i18n.model_download }}</a>
<a class="op-btn"
:class="{ disabled: !scope.row.isCanDelete }"
@click="
deleteModel(scope.row.ID, scope.row.cName, scope.row.rowKey)
deleteModel(scope.row.id, scope.row.cName, scope.row.rowKey)
"
>{{ i18n.model_delete }}</a
>
>{{ i18n.model_delete }}</a>
</div>
</template>
</el-table-column>
@@ -219,17 +224,18 @@ export default {
},
})
.then((res) => {
let TrainTaskInfo;
let trainTaskInfo;
let tableData;
tableData = res.data;
for (let i = 0; i < tableData.length; i++) {
TrainTaskInfo = JSON.parse(tableData[i].TrainTaskInfo);
tableData[i].EngineName = this.getEngineName(tableData[i]);
tableData[i].ComputeResource = TrainTaskInfo.ComputeResource;
tableData[i].cName = tableData[i].Name;
tableData[i].rowKey = tableData[i].ID + Math.random();
tableData[i].Name = "";
tableData[i].VersionCount = "";
trainTaskInfo = JSON.parse(tableData[i].trainTaskInfo || '{}');
tableData[i].engineName = this.getEngineName(tableData[i]);
// tableData[i].computeResource = trainTaskInfo.ComputeResource;
tableData[i].computeResource = tableData[i].type == '0' ? 'CPU/GPU' : 'NPU';
tableData[i].cName = tableData[i].name;
tableData[i].rowKey = tableData[i].id + Math.random();
tableData[i].name = "";
tableData[i].versionCount = "";
tableData[i].Children = true;
}
resolve(tableData || []);
@@ -258,10 +264,10 @@ export default {
centered: false,
onShow: function () {
$("#model_header").text(title);
$('input[name="Name"]').addClass("model_disabled");
$('input[name="Name"]').attr("readonly", "readonly");
$('input[name="name"]').addClass("model_disabled");
$('input[name="name"]').attr("readonly", "readonly");
$('input[name="modelSelectedFile"]').attr("readonly", "readonly");
$('input[name="Version"]').addClass("model_disabled");
$('input[name="version"]').addClass("model_disabled");
$(".ui.dimmer").css({
"background-color": "rgb(136, 136, 136,0.7)",
});
@@ -274,8 +280,8 @@ export default {
},
onHide: function () {
document.getElementById("formId").reset();
$('input[name="Name"]').removeClass("model_disabled");
$('input[name="Name"]').removeAttr("readonly");
$('input[name="name"]').removeClass("model_disabled");
$('input[name="name"]').removeAttr("readonly");
$('input[name="modelSelectedFile"]').removeAttr("readonly");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
@@ -290,8 +296,8 @@ export default {
.modal("show");
},
check() {
let jobid = document.getElementById("JobId").value;
let versionname = document.getElementById("VersionName").value;
let jobid = document.getElementById("jobId").value;
let versionname = document.getElementById("versionName").value;
let name = document.getElementById("name").value;
let version = document.getElementById("version").value;
let modelSelectedFile =
@@ -333,8 +339,8 @@ export default {
let context = this;
let flag = this.check();
if (flag) {
let cName = $("input[name='Name']").val();
let version = $("input[name='Version']").val();
let cName = $("input[name='name']").val();
let version = $("input[name='version']").val();
let data = $("#formId").serialize();
const initModel = $("input[name='initModel']").val();
let url_href =
@@ -387,7 +393,7 @@ export default {
let childrenIndex = store.states.lazyTreeNodeMap[
parentRow.rowKey
].findIndex((child) => child.rowKey == row.rowKey);
parentRow.VersionCount = parentRow.VersionCount - 1;
parentRow.versionCount = parentRow.versionCount - 1;
const parent = store.states.lazyTreeNodeMap[parentRow.rowKey];
if (parent.length === 1) {
this.getModelList();
@@ -398,7 +404,7 @@ export default {
}
},
deleteModel(id, name, rowKey) {
let row = { cName: name, ID: id, rowKey: rowKey };
let row = { cName: name, id: id, rowKey: rowKey };
let _this = this;
let flag = 1;
$(".ui.basic.modal.first")
@@ -410,7 +416,7 @@ export default {
_this.$axios
.delete(_this.url + "delete_model", {
params: {
ID: id,
id: id,
},
})
.then((res) => {
@@ -442,21 +448,21 @@ export default {
.modal("show");
},
getEngineName(model) {
if (model.Engine == 0) {
if (model.engine == 0) {
return "PyTorch";
} else if (model.Engine == 1 || model.Engine == 121) {
} else if (model.engine == 1 || model.engine == 121) {
return "TensorFlow";
} else if (
model.Engine == 2 ||
model.Engine == 122 ||
model.Engine == 35
model.engine == 2 ||
model.engine == 122 ||
model.engine == 35
) {
return "MindSpore";
} else if (model.Engine == 4) {
} else if (model.engine == 4) {
return "PaddlePaddle";
} else if (model.Engine == 5) {
} else if (model.engine == 5) {
return "OneFlow";
} else if (model.Engine == 6) {
} else if (model.engine == 6) {
return "MXNet";
} else {
return "Other";
@@ -474,40 +480,40 @@ export default {
try {
this.loadNodeMap.clear();
this.$axios
.get(location.href + "_api", {
.get(this.url + "show_model_api", {
params: this.params,
})
.then((res) => {
$(".ui.grid").removeAttr("style");
$("#loadContainer").removeClass("loader");
let TrainTaskInfo;
let trainTaskInfo;
this.tableData = res.data.data;
for (let i = 0; i < this.tableData.length; i++) {
TrainTaskInfo = JSON.parse(this.tableData[i].TrainTaskInfo);
this.tableData[i].cName = this.tableData[i].Name;
this.tableData[i].rowKey = this.tableData[i].ID + Math.random();
this.tableData[i].EngineName = this.getEngineName(
trainTaskInfo = JSON.parse(this.tableData[i].trainTaskInfo || '{}');
this.tableData[i].cName = this.tableData[i].name;
this.tableData[i].rowKey = this.tableData[i].id + Math.random();
this.tableData[i].engineName = this.getEngineName(
this.tableData[i]
);
this.tableData[i].ComputeResource = TrainTaskInfo.ComputeResource;
this.tableData[i].hasChildren =
res.data.data[i].VersionCount === 1 ? false : true;
if (this.tableData[i].Status !== 1) {
// this.tableData[i].computeResource = trainTaskInfo.ComputeResource;
this.tableData[i].computeResource = this.tableData[i].type == '0' ? 'CPU/GPU' : 'NPU';
this.tableData[i].hasChildren = res.data.data[i].versionCount === 1 ? false : true;
if (this.tableData[i].status !== 1) {
countStatus++;
}

switch (this.tableData[i].Status) {
switch (this.tableData[i].status) {
case 1:
this.tableData[i].Status = "WAITING";
this.tableData[i].Status_title = this.i18n.model_wait;
this.tableData[i].status = "WAITING";
this.tableData[i].status_title = this.i18n.model_wait;
break;
case 2:
this.tableData[i].Status = "FAILED";
this.tableData[i].Status_title = this.tableData[i].StatusDesc;
this.tableData[i].status = "FAILED";
this.tableData[i].status_title = this.tableData[i].statusDesc;
break;
default:
this.tableData[i].Status = "SUCCEEDED";
this.tableData[i].Status_title = this.i18n.model_success;
this.tableData[i].status = "SUCCEEDED";
this.tableData[i].status_title = this.i18n.model_success;
break;
}
}
@@ -531,7 +537,7 @@ export default {
},
computed: {
loadhref() {
return this.url + "downloadall?ID=";
return this.url + "downloadall?id=";
},
showinfoHref() {
return this.url + "show_model_info?name=";
@@ -615,6 +621,24 @@ export default {
white-space: nowrap;
}

.m-local {
background-color: rgb(22, 132, 252);
color: white;
padding: 2px 3px;
border-radius: 4px;
font-size: 12px;
margin-right: 2px;
}

.m-online {
background-color: rgb(91, 185, 115);
color: white;
padding: 2px 3px;
border-radius: 4px;
font-size: 12px;
margin-right: 2px;
}

.el-icon-arrow-right {
font-family: element-icons !important;
speak: none;
@@ -677,6 +701,15 @@ export default {
justify-content: space-around;
}

.op-btn-c {
text-align: right;
padding-right: 20px;
}

.op-btn {
margin: 0 0 0 5px;
}

.disabled {
cursor: default;
pointer-events: none;


+ 1
- 1
web_src/js/components/basic/editDialog.vue View File

@@ -1,6 +1,6 @@
<template>

<el-dialog :close-on-click-modal="!deleteLoading" v-dlg-drag :title="dialogTitle" :visible.sync="deleteDialog">
<el-dialog :close-on-click-modal="!deleteLoading" :title="dialogTitle" :visible.sync="deleteDialog">
<div class="message-box__content">


+ 6
- 6
web_src/js/components/images/Images.vue View File

@@ -120,13 +120,13 @@
</template>
</el-table-column>
<el-table-column
prop="createdUnix"
prop="updatedUnix"
label="创建时间"
align="center"
min-width="14%"
>
<template slot-scope="scope">
{{ scope.row.createdUnix | transformTimestamp }}
{{ scope.row.updatedUnix | transformTimestamp }}
</template>
</el-table-column>
<el-table-column align="center" min-width="21%" label="操作">
@@ -369,13 +369,13 @@
</template>
</el-table-column>
<el-table-column
prop="createdUnix"
prop="updatedUnix"
label="创建时间"
align="center"
min-width="14%"
>
<template slot-scope="scope">
{{ scope.row.createdUnix | transformTimestamp }}
{{ scope.row.updatedUnix | transformTimestamp }}
</template>
</el-table-column>
<el-table-column align="center" min-width="21%" label="操作">
@@ -595,13 +595,13 @@
</template>
</el-table-column>
<el-table-column
prop="createdUnix"
prop="updatedUnix"
label="创建时间"
align="center"
min-width="14%"
>
<template slot-scope="scope">
{{ scope.row.createdUnix | transformTimestamp }}
{{ scope.row.updatedUnix | transformTimestamp }}
</template>
</el-table-column>
<el-table-column align="center" min-width="21%" label="操作">


+ 2
- 2
web_src/js/components/images/adminImages.vue View File

@@ -75,9 +75,9 @@
</a>
</template>
</el-table-column>
<el-table-column prop="createdUnix" label="创建时间" align="center" min-width="13%">
<el-table-column prop="updatedUnix" label="创建时间" align="center" min-width="13%">
<template slot-scope="scope">
{{scope.row.createdUnix | transformTimestamp}}
{{scope.row.updatedUnix | transformTimestamp}}
</template>
</el-table-column>
<el-table-column align="center" min-width="23%" label="操作">


+ 4
- 4
web_src/js/features/cloudbrainShow.js View File

@@ -799,9 +799,9 @@ export default async function initCloudrainSow() {
if (value) {
let html = "";
nameMap[value].forEach((element) => {
let { TrainTaskInfo } = element;
TrainTaskInfo = JSON.parse(TrainTaskInfo);
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`;
//let { trainTaskInfo } = element;
//trainTaskInfo = JSON.parse(trainTaskInfo);
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`;
});
$("#model_name_version").append(html);
const initVersionText = $(
@@ -937,7 +937,7 @@ export default async function initCloudrainSow() {
return new Promise((resolve, reject) => {
$.get(
`${RepoLink}/modelmanage/query_modelfile_for_predict`,
{ ID: value },
{ id: value },
(data) => {
resolve(data);
}


+ 1
- 1
web_src/js/features/cloudrbanin.js View File

@@ -398,7 +398,7 @@ export default async function initCloudrain() {
$(`#${jobName}`).popup("toggle");
} else {
let versionData = data.filter((item) => {
return item.Version === versionName;
return item.version === versionName;
});
if (versionData.length == 0) {
$(`#${jobName}`).popup("toggle");


+ 6
- 0
web_src/js/features/i18nVue.js View File

@@ -105,6 +105,9 @@ export const i18nVue = {
file_sync_fail:"文件同步失败",
no_file_to_download:"没有文件可以下载",
task_not_finished:"任务还未结束,稍后再来看看",
local:"本地",
online:"线上",
modify:"修改",
},
US: {
computer_vision: "computer vision",
@@ -216,5 +219,8 @@ export const i18nVue = {
file_sync_fail:"File synchronization failed",
no_file_to_download:"No files can be downloaded",
task_not_finished:"Task not finished yet, please wait",
local:"Local",
online:"Online",
modify:"Modify",
},
};

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save