Browse Source

Merge remote-tracking branch 'origin/V20221116' into zouap

tags/v1.22.11.3^2
zouap 3 years ago
parent
commit
6bd0565c50
100 changed files with 7667 additions and 21442 deletions
  1. +10
    -10
      README.md
  2. +90
    -58
      models/ai_model_manage.go
  3. +2
    -140
      models/attachment.go
  4. +21
    -0
      models/base_message.go
  5. +15
    -1
      models/cloudbrain.go
  6. +2
    -2
      models/dataset.go
  7. +75
    -0
      models/file_chunk.go
  8. +2
    -0
      models/models.go
  9. +2
    -2
      models/repo.go
  10. +3
    -3
      models/resource_queue.go
  11. +2
    -2
      models/resource_scene.go
  12. +21
    -8
      models/resource_specification.go
  13. +377
    -227
      models/user_business_analysis.go
  14. +200
    -0
      models/user_business_struct.go
  15. +20
    -23
      modules/auth/modelarts.go
  16. +6
    -6
      modules/cloudbrain/cloudbrain.go
  17. +111
    -0
      modules/convert/cloudbrain.go
  18. +4
    -4
      modules/grampus/grampus.go
  19. +26
    -0
      modules/grampus/resty.go
  20. +13
    -13
      modules/modelarts/modelarts.go
  21. +7
    -7
      modules/setting/setting.go
  22. +2
    -2
      modules/storage/minio.go
  23. +26
    -10
      modules/storage/minio_ext.go
  24. +24
    -25
      modules/storage/obs.go
  25. +45
    -0
      modules/structs/attachment.go
  26. +84
    -0
      modules/structs/cloudbrain.go
  27. +7
    -0
      modules/structs/tagger.go
  28. +6
    -2
      options/locale/locale_en-US.ini
  29. +6
    -2
      options/locale/locale_zh-CN.ini
  30. +17
    -19346
      package-lock.json
  31. +22
    -1
      routers/admin/resources.go
  32. +62
    -0
      routers/api/v1/api.go
  33. +25
    -0
      routers/api/v1/repo/attachments.go
  34. +97
    -37
      routers/api/v1/repo/cloudbrain.go
  35. +123
    -0
      routers/api/v1/repo/datasets.go
  36. +141
    -0
      routers/api/v1/repo/images.go
  37. +71
    -0
      routers/api/v1/repo/mlops.go
  38. +16
    -40
      routers/api/v1/repo/modelarts.go
  39. +115
    -0
      routers/api/v1/repo/modelmanage.go
  40. +36
    -0
      routers/api/v1/repo/spec.go
  41. +3
    -1
      routers/private/internal.go
  42. +44
    -23
      routers/repo/ai_model_convert.go
  43. +402
    -80
      routers/repo/ai_model_manage.go
  44. +2
    -2
      routers/repo/aisafety.go
  45. +18
    -17
      routers/repo/attachment.go
  46. +323
    -0
      routers/repo/attachment_model.go
  47. +26
    -47
      routers/repo/cloudbrain.go
  48. +2
    -318
      routers/repo/dataset.go
  49. +73
    -62
      routers/repo/grampus.go
  50. +2
    -2
      routers/repo/modelarts.go
  51. +30
    -0
      routers/response/api_response.go
  52. +5
    -1
      routers/response/response.go
  53. +2
    -2
      routers/response/response_list.go
  54. +20
    -4
      routers/routes/routes.go
  55. +2
    -2
      routers/user/notification.go
  56. +12
    -12
      services/cloudbrain/cloudbrainTask/count.go
  57. +631
    -0
      services/cloudbrain/cloudbrainTask/inference.go
  58. +83
    -0
      services/cloudbrain/cloudbrainTask/sync_status.go
  59. +1210
    -0
      services/cloudbrain/cloudbrainTask/train.go
  60. +1
    -1
      services/cloudbrain/resource/resource_queue.go
  61. +62
    -7
      services/cloudbrain/resource/resource_specification.go
  62. +21
    -2
      templates/base/footer_content.tmpl
  63. +21
    -1
      templates/base/footer_content_fluid.tmpl
  64. +2
    -2
      templates/base/head_navbar.tmpl
  65. +2
    -2
      templates/base/head_navbar_fluid.tmpl
  66. +2
    -2
      templates/base/head_navbar_home.tmpl
  67. +2
    -2
      templates/base/head_navbar_pro.tmpl
  68. +1
    -1
      templates/custom/max_log.tmpl
  69. +2
    -4
      templates/repo/cloudbrain/inference/new.tmpl
  70. +30
    -101
      templates/repo/cloudbrain/trainjob/show.tmpl
  71. +32
    -25
      templates/repo/grampus/trainjob/show.tmpl
  72. +2
    -4
      templates/repo/modelarts/inferencejob/new.tmpl
  73. +25
    -24
      templates/repo/modelarts/trainjob/show.tmpl
  74. +39
    -38
      templates/repo/modelmanage/convertIndex.tmpl
  75. +10
    -0
      templates/repo/modelmanage/create_local_1.tmpl
  76. +11
    -0
      templates/repo/modelmanage/create_local_2.tmpl
  77. +581
    -0
      templates/repo/modelmanage/create_online.tmpl
  78. +53
    -28
      templates/repo/modelmanage/index.tmpl
  79. +5
    -530
      templates/repo/modelmanage/showinfo.tmpl
  80. +3
    -1
      templates/repo/modelsafety/show.tmpl
  81. +2
    -1
      templates/repo/view_file.tmpl
  82. +1
    -1
      templates/user/dashboard/navbar.tmpl
  83. +124
    -91
      web_src/js/components/Model.vue
  84. +1
    -1
      web_src/js/components/basic/editDialog.vue
  85. +6
    -6
      web_src/js/components/images/Images.vue
  86. +2
    -2
      web_src/js/components/images/adminImages.vue
  87. +4
    -4
      web_src/js/features/cloudbrainShow.js
  88. +1
    -1
      web_src/js/features/cloudrbanin.js
  89. +6
    -0
      web_src/js/features/i18nVue.js
  90. +41
    -10
      web_src/js/index.js
  91. +2
    -0
      web_src/js/vendor/fancybox.esm.js
  92. +1
    -0
      web_src/less/index.less
  93. +791
    -0
      web_src/less/vendor/fancyapp.less
  94. +106
    -0
      web_src/vuepages/apis/modules/modelmanage.js
  95. +13
    -0
      web_src/vuepages/apis/modules/resources.js
  96. +3
    -0
      web_src/vuepages/const/index.js
  97. +62
    -1
      web_src/vuepages/langs/config/en-US.js
  98. +61
    -0
      web_src/vuepages/langs/config/zh-CN.js
  99. +3
    -5
      web_src/vuepages/pages/model/tuomin/index.vue
  100. +706
    -0
      web_src/vuepages/pages/modelmanage/common/modelmanage-common-detail.vue

+ 10
- 10
README.md View File

@@ -2,7 +2,7 @@

<h1><img src="public/img/favicon.png" alt="logo" width="30" height="30">AiForge - 启智AI开发协作平台</h1>

[![release](https://img.shields.io/badge/release-1.21.11.1-blue)](https://git.openi.org.cn/OpenI/aiforge/releases/latest)
[![release](https://img.shields.io/badge/release-1.21.11.1-blue)](https://openi.pcl.ac.cn/OpenI/aiforge/releases/latest)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)


@@ -10,7 +10,7 @@

启智AI开发协作平台是一个在线Web应用,旨在为人工智能算法、模型开发提供在线协同工作环境,它提供了<b>代码托管、数据集管理与共享、免费云端算力资源支持(GPU/NPU)、共享镜像</b>等功能。

[启智AI开发协作平台](https://git.openi.org.cn) 是使用本项目构建的在线服务,您可以直接点击链接访问试用。
[启智AI开发协作平台](https://openi.pcl.ac.cn) 是使用本项目构建的在线服务,您可以直接点击链接访问试用。

本项目是基于[Gitea](https://github.com/go-gitea/gitea)发展而来的,我们对其进行了Fork并基于此扩展了人工智能开发中需要的功能,如数据集管理和模型训练等。对于和代码托管相关的功能,您可以参考[Gitea的文档](https://docs.gitea.io/zh-cn/)。

@@ -20,7 +20,7 @@
后端服务涵盖了AI模型开发流水线,包括代码协同开发、数据管理、模型调试、训练、推理和部署等(*目前尚未支持模型部署*)。在不同的开发阶段,我们还将提供丰富的开发工具供用户使用,如数据标注、数据筛选、模型转换、模型压缩、代码检测等。我们也欢迎社区提供更多丰富的工具接入,提高利用平台进行开发的效率。
![系统架构图](assets/架构图.png)
## 在线服务使用
本项目的在线服务平台的详细使用帮助文档,可参阅本项目[百科](https://git.openi.org.cn/OpenI/aiforge/wiki)内容。
本项目的在线服务平台的详细使用帮助文档,可参阅本项目[百科](https://openi.pcl.ac.cn/OpenI/aiforge/wiki)内容。
- 如何创建账号
- 如何创建组织及管理成员权限
- 如何创建项目仓库
@@ -39,22 +39,22 @@
[从源代码安装说明](https://docs.gitea.io/zh-cn/install-from-source/)

## 授权许可
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://git.openi.org.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://openi.pcl.ac.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。


## 需要帮助?
如果您在使用或者开发过程中遇到问题,可以在以下渠道咨询:
- 点击[这里](https://git.openi.org.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**)
- 点击[这里](https://openi.pcl.ac.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**)
- 加入微信群实时交流,获得进一步的支持
<img src="https://git.openi.org.cn/OpenI/aiforge/wiki/raw/img/wechatgroup.jpg" width=200px />
<img src="https://openi.pcl.ac.cn/OpenI/aiforge/wiki/raw/img/wechatgroup.jpg" width=200px />

## 启智社区小白训练营:
- 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://git.openi.org.cn/zeizei/OpenI_Learning)
- 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://openi.pcl.ac.cn/zeizei/OpenI_Learning)

## 平台引用
如果本平台对您的科研工作提供了帮助,可在论文致谢中加入:
英文版:```Thanks for the support provided by OpenI Community (https://git.openi.org.cn).```
中文版:```感谢启智社区提供的技术支持(https://git.openi.org.cn)。```
英文版:```Thanks for the support provided by OpenI Community (https://openi.pcl.ac.cn).```
中文版:```感谢启智社区提供的技术支持(https://openi.pcl.ac.cn)。```

如果您的成果中引用了本平台,也欢迎在下述开源项目中提交您的成果信息:
https://git.openi.org.cn/OpenIOSSG/references
https://openi.pcl.ac.cn/OpenIOSSG/references

+ 90
- 58
models/ai_model_manage.go View File

@@ -12,67 +12,68 @@ import (
)

type AiModelManage struct {
ID string `xorm:"pk"`
Name string `xorm:"INDEX NOT NULL"`
Version string `xorm:"NOT NULL"`
VersionCount int `xorm:"NOT NULL DEFAULT 0"`
New int `xorm:"NOT NULL"`
Type int `xorm:"NOT NULL"`
Size int64 `xorm:"NOT NULL"`
Description string `xorm:"varchar(2000)"`
Label string `xorm:"varchar(1000)"`
Path string `xorm:"varchar(400) NOT NULL"`
DownloadCount int `xorm:"NOT NULL DEFAULT 0"`
Engine int64 `xorm:"NOT NULL DEFAULT 0"`
Status int `xorm:"NOT NULL DEFAULT 0"`
StatusDesc string `xorm:"varchar(500)"`
Accuracy string `xorm:"varchar(1000)"`
AttachmentId string `xorm:"NULL"`
RepoId int64 `xorm:"INDEX NULL"`
CodeBranch string `xorm:"varchar(400) NULL"`
CodeCommitID string `xorm:"NULL"`
UserId int64 `xorm:"NOT NULL"`
UserName string
UserRelAvatarLink string
TrainTaskInfo string `xorm:"text NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
IsCanOper bool
IsCanDelete bool
ID string `xorm:"pk" json:"id"`
Name string `xorm:"INDEX NOT NULL" json:"name"`
ModelType int `xorm:"NULL" json:"modelType"`
Version string `xorm:"NOT NULL" json:"version"`
VersionCount int `xorm:"NOT NULL DEFAULT 0" json:"versionCount"`
New int `xorm:"NOT NULL" json:"new"`
Type int `xorm:"NOT NULL" json:"type"`
Size int64 `xorm:"NOT NULL" json:"size"`
Description string `xorm:"varchar(2000)" json:"description"`
Label string `xorm:"varchar(1000)" json:"label"`
Path string `xorm:"varchar(400) NOT NULL" json:"path"`
DownloadCount int `xorm:"NOT NULL DEFAULT 0" json:"downloadCount"`
Engine int64 `xorm:"NOT NULL DEFAULT 0" json:"engine"`
Status int `xorm:"NOT NULL DEFAULT 0" json:"status"`
StatusDesc string `xorm:"varchar(500)" json:"statusDesc"`
Accuracy string `xorm:"varchar(1000)" json:"accuracy"`
AttachmentId string `xorm:"NULL" json:"attachmentId"`
RepoId int64 `xorm:"INDEX NULL" json:"repoId"`
CodeBranch string `xorm:"varchar(400) NULL" json:"codeBranch"`
CodeCommitID string `xorm:"NULL" json:"codeCommitID"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
TrainTaskInfo string `xorm:"text NULL" json:"trainTaskInfo"`
CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
}

type AiModelConvert struct {
ID string `xorm:"pk"`
Name string `xorm:"INDEX NOT NULL"`
Status string `xorm:"NULL"`
StatusResult string `xorm:"NULL"`
SrcEngine int `xorm:"NOT NULL DEFAULT 0"`
RepoId int64 `xorm:"INDEX NULL"`
ModelId string `xorm:"NOT NULL"`
ModelName string `xorm:"NULL"`
ModelVersion string `xorm:"NOT NULL"`
ModelPath string `xorm:"NULL"`
DestFormat int `xorm:"NOT NULL DEFAULT 0"`
NetOutputFormat int `xorm:"NULL"`
UserId int64 `xorm:"NOT NULL"`
CloudBrainTaskId string `xorm:"NULL"`
ModelArtsVersionId string `xorm:"NULL"`
ContainerID string
ContainerIp string
RunTime int64 `xorm:"NULL"`
TrainJobDuration string
InputShape string `xorm:"varchar(2000)"`
InputDataFormat string `xorm:"NOT NULL"`
Description string `xorm:"varchar(2000)"`
Path string `xorm:"varchar(400) NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
StartTime timeutil.TimeStamp
EndTime timeutil.TimeStamp
UserName string
UserRelAvatarLink string
IsCanOper bool
IsCanDelete bool
ID string `xorm:"pk" json:"id"`
Name string `xorm:"INDEX NOT NULL" json:"name"`
Status string `xorm:"NULL" json:"status"`
StatusResult string `xorm:"NULL" json:"statusResult"`
SrcEngine int `xorm:"NOT NULL DEFAULT 0" json:"srcEngine"`
RepoId int64 `xorm:"INDEX NULL" json:"repoId"`
ModelId string `xorm:"NOT NULL" json:"modelId"`
ModelName string `xorm:"NULL" json:"modelName"`
ModelVersion string `xorm:"NOT NULL" json:"modelVersion"`
ModelPath string `xorm:"NULL" json:"modelPath"`
DestFormat int `xorm:"NOT NULL DEFAULT 0" json:"destFormat"`
NetOutputFormat int `xorm:"NULL" json:"netOutputFormat"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
CloudBrainTaskId string `xorm:"NULL" json:"cloudBrainTaskId"`
ModelArtsVersionId string `xorm:"NULL" json:"modelArtsVersionId"`
ContainerID string `json:"containerID"`
ContainerIp string `json:"containerIp"`
RunTime int64 `xorm:"NULL" json:"runTime"`
TrainJobDuration string `json:"trainJobDuration"`
InputShape string `xorm:"varchar(2000)" json:"inputShape"`
InputDataFormat string `xorm:"NOT NULL" json:"inputDataFormat"`
Description string `xorm:"varchar(2000)" json:"description"`
Path string `xorm:"varchar(400) NOT NULL" json:"path"`
CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
StartTime timeutil.TimeStamp `json:"startTime"`
EndTime timeutil.TimeStamp `json:"endTime"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
}

type AiModelQueryOptions struct {
@@ -287,6 +288,37 @@ func ModifyModelDescription(id string, description string) error {
return nil
}

func ModifyLocalModel(id string, name, label, description string, engine int) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("name", "label", "description", "engine").Update(&AiModelManage{
Description: description,
Name: name,
Label: label,
Engine: int64(engine),
})
if err != nil {
return err
}
log.Info("success to update description from db.re=" + fmt.Sprint((re)))
return nil
}

func ModifyModelSize(id string, size int64) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("size").Update(&AiModelManage{
Size: size,
})
if err != nil {
return err
}
log.Info("success to update size from db.re=" + fmt.Sprint((re)))
return nil
}

func ModifyModelStatus(id string, modelSize int64, status int, modelPath string, statusDesc string) error {
var sess *xorm.Session
sess = x.ID(id)


+ 2
- 140
models/attachment.go View File

@@ -61,30 +61,6 @@ type AttachmentUsername struct {
Name string
}

type AttachmentInfo struct {
Attachment `xorm:"extends"`
Repo *Repository `xorm:"extends"`
RelAvatarLink string `xorm:"extends"`
UserName string `xorm:"extends"`
Recommend bool `xorm:"-"`
}

type AttachmentsOptions struct {
ListOptions
DatasetIDs []int64
DecompressState int
Type int
UploaderID int64
NeedDatasetIDs bool
NeedIsPrivate bool
IsPrivate bool
JustNeedZipFile bool
NeedRepoInfo bool
Keyword string
RecommendOnly bool
UserId int64
}

func (a *Attachment) AfterUpdate() {
if a.DatasetID > 0 {
datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment))
@@ -158,7 +134,8 @@ func (a *Attachment) S3DownloadURL() string {
if a.Type == TypeCloudBrainOne {
url, _ = storage.Attachments.PresignedGetURL(setting.Attachment.Minio.BasePath+AttachmentRelativePath(a.UUID), a.Name)
} else if a.Type == TypeCloudBrainTwo {
url, _ = storage.ObsGetPreSignedUrl(a.UUID, a.Name)
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(a.UUID[0:1], a.UUID[1:2], a.UUID, a.Name)), "/")
url, _ = storage.ObsGetPreSignedUrl(objectName, a.Name)
}

return url
@@ -493,19 +470,6 @@ func getPrivateAttachments(e Engine, userID int64) ([]*AttachmentUsername, error
return attachments, nil
}

func getAllUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) {
attachments := make([]*AttachmentUsername, 0, 10)
if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+
"= `user`.id").Where("decompress_state= ? and attachment.type = ? and (uploader_id= ? or is_private = ?)", DecompressStateDone, TypeCloudBrainOne, userID, false).Find(&attachments); err != nil {
return nil, err
}
return attachments, nil
}

func GetAllUserAttachments(userID int64) ([]*AttachmentUsername, error) {
return getAllUserAttachments(x, userID)
}

func getModelArtsUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) {
attachments := make([]*AttachmentUsername, 0, 10)
if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+
@@ -587,7 +551,6 @@ func AttachmentsByDatasetOption(datasets []int64, opts *SearchDatasetOptions) ([
)
}


attachments := make([]*Attachment, 0)
if err := sess.Table(&Attachment{}).Where(cond).Desc("id").
Find(&attachments); err != nil {
@@ -601,107 +564,6 @@ func GetAllAttachmentSize() (int64, error) {
return x.SumInt(&Attachment{}, "size")
}

func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
sess := x.NewSession()
defer sess.Close()

var cond = builder.NewCond()
if opts.NeedDatasetIDs {
cond = cond.And(
builder.In("attachment.dataset_id", opts.DatasetIDs),
)
}

if opts.UploaderID > 0 {
cond = cond.And(
builder.Eq{"attachment.uploader_id": opts.UploaderID},
)
}

if (opts.Type) >= 0 {
cond = cond.And(
builder.Eq{"attachment.type": opts.Type},
)
}

if opts.NeedIsPrivate {
cond = cond.And(
builder.Eq{"attachment.is_private": opts.IsPrivate},
)
}
if opts.RecommendOnly {
cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id").
From("attachment").
Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true")))
}

if opts.JustNeedZipFile {
var DecompressState []int32
DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed)
cond = cond.And(
builder.In("attachment.decompress_state", DecompressState),
)
}

var count int64
var err error
if len(opts.Keyword) == 0 {
count, err = sess.Where(cond).Count(new(Attachment))
} else {
lowerKeyWord := strings.ToLower(opts.Keyword)

cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord}))
count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo))

}

if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
}

if opts.Page >= 0 && opts.PageSize > 0 {
var start int
if opts.Page == 0 {
start = 0
} else {
start = (opts.Page - 1) * opts.PageSize
}
sess.Limit(opts.PageSize, start)
}

sess.OrderBy("attachment.created_unix DESC")
attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum)
if err := sess.Table(&Attachment{}).Where(cond).
Find(&attachments); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}

if opts.NeedRepoInfo {
for _, attachment := range attachments {
dataset, err := GetDatasetByID(attachment.DatasetID)
if err != nil {
return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err)
}
attachment.Recommend = dataset.Recommend
repo, err := GetRepositoryByID(dataset.RepoID)
if err == nil {
attachment.Repo = repo
} else {
return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err)
}
user, err := GetUserByID(attachment.UploaderID)
if err == nil {
attachment.RelAvatarLink = user.RelAvatarLink()
attachment.UserName = user.Name
} else {
return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err)
}
}
}

return attachments, count, nil
}

func GetAllDatasetContributorByDatasetId(datasetId int64) ([]*User, error) {
r := make([]*User, 0)
if err := x.Select("distinct(public.user.*)").Table("attachment").Join("LEFT", "user", "public.user.ID = attachment.uploader_id").Where("attachment.dataset_id = ?", datasetId).Find(&r); err != nil {


+ 21
- 0
models/base_message.go View File

@@ -14,3 +14,24 @@ func BaseErrorMessage(message string) BaseMessage {
1, message,
}
}

type BaseMessageApi struct {
Code int `json:"code"`
Message string `json:"message"`
}

var BaseOKMessageApi = BaseMessageApi{
0, "",
}

func BaseErrorMessageApi(message string) BaseMessageApi {
return BaseMessageApi{
1, message,
}
}

type BaseMessageWithDataApi struct {
Code int `json:"code"`
Message string `json:"message"`
Data interface{} `json:"data"`
}

+ 15
- 1
models/cloudbrain.go View File

@@ -291,6 +291,13 @@ func (task *Cloudbrain) IsRunning() bool {
status == string(JobRunning) || status == GrampusStatusRunning
}

func (task *Cloudbrain) IsUserHasRight(user *User) bool {
if user == nil {
return false
}
return user.IsAdmin || user.ID == task.UserID
}

func ConvertDurationToStr(duration int64) string {
if duration <= 0 {
return DURATION_STR_ZERO
@@ -2030,10 +2037,17 @@ func GetStoppedJobWithNoStartTimeEndTime() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
return cloudbrains, x.SQL("select * from cloudbrain where status in (?,?,?,?,?,?,?) and (start_time is null or end_time is null) limit 100", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded).Find(&cloudbrains)
}
func GetC2NetWithAiCenterWrongJob() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
return cloudbrains, x.
In("status", ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed).
Where("type = ?", TypeC2Net).
Find(&cloudbrains)
}

func GetModelSafetyTestTask() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
sess := x.Where("job_type = ?", string(JobTypeModelSafety))
sess := x.Where("job_type=?", string(JobTypeModelSafety))
err := sess.Find(&cloudbrains)
return cloudbrains, err
}


+ 2
- 2
models/dataset.go View File

@@ -22,8 +22,8 @@ const (

type Dataset struct {
ID int64 `xorm:"pk autoincr"`
Title string `xorm:"INDEX NOT NULL"`
Status int32 `xorm:"INDEX"` // normal_private: 0, pulbic: 1, is_delete: 2
Title string `xorm:"INDEX NOT NULL""`
Status int32 `xorm:"INDEX""` // normal_private: 0, pulbic: 1, is_delete: 2
Category string
Description string `xorm:"TEXT"`
DownloadTimes int64


+ 75
- 0
models/file_chunk.go View File

@@ -28,6 +28,23 @@ type FileChunk struct {
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

type ModelFileChunk struct {
ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"INDEX"`
Md5 string `xorm:"INDEX"`
ModelUUID string `xorm:"INDEX"`
ObjectName string `xorm:"DEFAULT ''"`
IsUploaded int `xorm:"DEFAULT 0"` // not uploaded: 0, uploaded: 1
UploadID string `xorm:"UNIQUE"` //minio upload id
TotalChunks int
Size int64
UserID int64 `xorm:"INDEX"`
Type int `xorm:"INDEX DEFAULT 0"`
CompletedParts []string `xorm:"DEFAULT ''"` // chunkNumber+etag eg: ,1-asqwewqe21312312.2-123hjkas
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

// GetFileChunkByMD5 returns fileChunk by given id
func GetFileChunkByMD5(md5 string) (*FileChunk, error) {
return getFileChunkByMD5(x, md5)
@@ -49,6 +66,21 @@ func GetFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int) (*Fi
return getFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain)
}

func GetModelFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int, uuid string) (*ModelFileChunk, error) {
return getModelFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain, uuid)
}

func getModelFileChunkByMD5AndUser(e Engine, md5 string, userID int64, typeCloudBrain int, uuid string) (*ModelFileChunk, error) {
fileChunk := new(ModelFileChunk)

if has, err := e.Where("md5 = ? and user_id = ? and type = ? and model_uuid= ?", md5, userID, typeCloudBrain, uuid).Get(fileChunk); err != nil {
return nil, err
} else if !has {
return nil, ErrFileChunkNotExist{md5, ""}
}
return fileChunk, nil
}

func getFileChunkByMD5AndUser(e Engine, md5 string, userID int64, typeCloudBrain int) (*FileChunk, error) {
fileChunk := new(FileChunk)

@@ -76,6 +108,21 @@ func getFileChunkByUUID(e Engine, uuid string) (*FileChunk, error) {
return fileChunk, nil
}

func GetModelFileChunkByUUID(uuid string) (*ModelFileChunk, error) {
return getModelFileChunkByUUID(x, uuid)
}

func getModelFileChunkByUUID(e Engine, uuid string) (*ModelFileChunk, error) {
fileChunk := new(ModelFileChunk)

if has, err := e.Where("uuid = ?", uuid).Get(fileChunk); err != nil {
return nil, err
} else if !has {
return nil, ErrFileChunkNotExist{"", uuid}
}
return fileChunk, nil
}

// InsertFileChunk insert a record into file_chunk.
func InsertFileChunk(fileChunk *FileChunk) (_ *FileChunk, err error) {
if _, err := x.Insert(fileChunk); err != nil {
@@ -85,6 +132,14 @@ func InsertFileChunk(fileChunk *FileChunk) (_ *FileChunk, err error) {
return fileChunk, nil
}

// InsertFileChunk insert a record into file_chunk.
func InsertModelFileChunk(fileChunk *ModelFileChunk) (_ *ModelFileChunk, err error) {
if _, err := x.Insert(fileChunk); err != nil {
return nil, err
}
return fileChunk, nil
}

func DeleteFileChunkById(uuid string) (*FileChunk, error) {
return deleteFileChunkById(x, uuid)
}
@@ -106,6 +161,17 @@ func deleteFileChunkById(e Engine, uuid string) (*FileChunk, error) {
}
}

func UpdateModelFileChunk(fileChunk *ModelFileChunk) error {
return updateModelFileChunk(x, fileChunk)
}

func updateModelFileChunk(e Engine, fileChunk *ModelFileChunk) error {
var sess *xorm.Session
sess = e.Where("uuid = ?", fileChunk.UUID)
_, err := sess.Cols("is_uploaded").Update(fileChunk)
return err
}

// UpdateFileChunk updates the given file_chunk in database
func UpdateFileChunk(fileChunk *FileChunk) error {
return updateFileChunk(x, fileChunk)
@@ -127,3 +193,12 @@ func deleteFileChunk(e Engine, fileChunk *FileChunk) error {
_, err := e.ID(fileChunk.ID).Delete(fileChunk)
return err
}

func DeleteModelFileChunk(fileChunk *ModelFileChunk) error {
return deleteModelFileChunk(x, fileChunk)
}

func deleteModelFileChunk(e Engine, fileChunk *ModelFileChunk) error {
_, err := e.ID(fileChunk.ID).Delete(fileChunk)
return err
}

+ 2
- 0
models/models.go View File

@@ -136,6 +136,7 @@ func init() {
new(ImageTopic),
new(ImageTopicRelation),
new(FileChunk),
new(ModelFileChunk),
new(BlockChain),
new(RecommendOrg),
new(AiModelManage),
@@ -185,6 +186,7 @@ func init() {
new(UserAnalysisPara),
new(Invitation),
new(CloudbrainDurationStatistic),
new(UserSummaryCurrentYear),
)

gonicNames := []string{"SSL", "UID"}


+ 2
- 2
models/repo.go View File

@@ -223,10 +223,10 @@ type Repository struct {
BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"`

// git clone and git pull total count
CloneCnt int64 `xorm:"NOT NULL DEFAULT 0"`
CloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"clone_cnt"`

// only git clone total count
GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0"`
GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"git_clone_cnt"`

CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`


+ 3
- 3
models/resource_queue.go View File

@@ -143,6 +143,9 @@ func InsertResourceQueue(queue ResourceQueue) (int64, error) {
func UpdateResourceQueueById(queueId int64, queue ResourceQueue) (int64, error) {
return x.ID(queueId).Update(&queue)
}
func UpdateResourceCardsTotalNum(queueId int64, queue ResourceQueue) (int64, error) {
return x.ID(queueId).Cols("cards_total_num", "remark").Update(&queue)
}

func SearchResourceQueue(opts SearchResourceQueueOptions) (int64, []ResourceQueue, error) {
var cond = builder.NewCond()
@@ -313,9 +316,6 @@ func SyncGrampusQueues(updateList []ResourceQueue, insertList []ResourceQueue, e
if _, err = sess.In("id", deleteSpcIds).Update(&ResourceSpecification{Status: SpecOffShelf}); err != nil {
return err
}
if _, err = sess.In("spec_id", deleteSpcIds).Delete(&ResourceSceneSpec{}); err != nil {
return err
}
}

}


+ 2
- 2
models/resource_scene.go View File

@@ -116,7 +116,7 @@ func InsertResourceScene(r ResourceSceneReq) error {

//check
specs := make([]ResourceSpecification, 0)
cond := builder.In("id", r.SpecIds).And(builder.Eq{"status": SpecOnShelf})
cond := builder.In("id", r.SpecIds)
if err := sess.Where(cond).Find(&specs); err != nil {
return err
}
@@ -175,7 +175,7 @@ func UpdateResourceScene(r ResourceSceneReq) error {
}
//check specification
specs := make([]ResourceSpecification, 0)
cond := builder.In("id", r.SpecIds).And(builder.Eq{"status": SpecOnShelf})
cond := builder.In("id", r.SpecIds)
if err := sess.Where(cond).Find(&specs); err != nil {
return err
}


+ 21
- 8
models/resource_specification.go View File

@@ -12,6 +12,13 @@ const (
SpecOffShelf
)

type SearchSpecOrderBy int

const (
SearchSpecOrderById SearchSpecOrderBy = iota
SearchSpecOrder4Standard
)

type ResourceSpecification struct {
ID int64 `xorm:"pk autoincr"`
QueueId int64 `xorm:"INDEX"`
@@ -85,6 +92,7 @@ type SearchResourceSpecificationOptions struct {
Status int
Cluster string
AvailableCode int
OrderBy SearchSpecOrderBy
}

type SearchResourceBriefSpecificationOptions struct {
@@ -168,6 +176,7 @@ type FindSpecsOptions struct {
UseShareMemGiB bool
//if true,find specs no matter used or not used in scene. if false,only find specs used in scene
RequestAll bool
SpecStatus int
}

type Specification struct {
@@ -232,10 +241,18 @@ func SearchResourceSpecification(opts SearchResourceSpecificationOptions) (int64
return 0, nil, err
}

var orderby = ""
switch opts.OrderBy {
case SearchSpecOrder4Standard:
orderby = "resource_queue.compute_resource asc,resource_queue.acc_card_type asc,resource_specification.acc_cards_num asc,resource_specification.cpu_cores asc,resource_specification.mem_gi_b asc,resource_specification.share_mem_gi_b asc"
default:
orderby = "resource_specification.id desc"
}

r := make([]ResourceSpecAndQueue, 0)
err = x.Where(cond).
Join("INNER", "resource_queue", "resource_queue.ID = resource_specification.queue_id").
Desc("resource_specification.id").
OrderBy(orderby).
Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).
Unscoped().Find(&r)
if err != nil {
@@ -269,10 +286,6 @@ func ResourceSpecOffShelf(id int64) (int64, error) {
}
sess.Close()
}()
//delete scene spec relation
if _, err = sess.Where("spec_id = ?", id).Delete(&ResourceSceneSpec{}); err != nil {
return 0, err
}

param := ResourceSpecification{
Status: SpecOffShelf,
@@ -317,9 +330,6 @@ func SyncGrampusSpecs(updateList []ResourceSpecification, insertList []ResourceS
if _, err = sess.Cols("status", "is_available").In("id", deleteIds).Update(&ResourceSpecification{Status: SpecOffShelf, IsAvailable: false}); err != nil {
return err
}
if _, err = sess.In("spec_id", deleteIds).Delete(&ResourceSceneSpec{}); err != nil {
return err
}
}

//update exists specs
@@ -384,6 +394,9 @@ func FindSpecs(opts FindSpecsOptions) ([]*Specification, error) {
if opts.UseShareMemGiB {
cond = cond.And(builder.Eq{"resource_specification.share_mem_gi_b": opts.ShareMemGiB})
}
if opts.SpecStatus > 0 {
cond = cond.And(builder.Eq{"resource_specification.status": opts.SpecStatus})
}
r := make([]*Specification, 0)
s := x.Where(cond).
Join("INNER", "resource_queue", "resource_queue.id = resource_specification.queue_id")


+ 377
- 227
models/user_business_analysis.go View File

@@ -3,12 +3,15 @@ package models
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"sort"
"strconv"
"strings"
"time"

"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
"xorm.io/xorm"
@@ -19,185 +22,6 @@ const (
BATCH_INSERT_SIZE = 50
)

type UserBusinessAnalysisAll struct {
ID int64 `xorm:"pk"`

CountDate int64 `xorm:"pk"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 10
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

DataDate string `xorm:"NULL"`

//cloudbraintask
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysis struct {
ID int64 `xorm:"pk"`
DataDate string `xorm:"pk"`
CountDate int64 `xorm:"NULL"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 6
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisQueryOptions struct {
ListOptions
UserName string
@@ -499,7 +323,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
DataDate := currentTimeNow.Format("2006-01-02 15:04")

CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -517,16 +341,16 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -752,7 +576,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
startTime := currentTimeNow.AddDate(0, 0, -1)

CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -764,13 +588,13 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("query commit code errr.")
} else {
log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
//CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
//CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)

OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
@@ -778,14 +602,19 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

InvitationMap := queryUserInvitationCount(start_unix, end_unix)

DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"

bonusMap := make(map[string]map[string]int)
if tableName == "user_business_analysis_current_year" {
bonusMap = getBonusMap()
log.Info("truncate all data from table:user_summary_current_year ")
statictisSess.Exec("TRUNCATE TABLE user_summary_current_year")
}
cond := "type != 1 and is_active=true"
count, err := sess.Where(cond).Count(new(User))
if err != nil {
@@ -883,6 +712,37 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1
}
}
if tableName == "user_business_analysis_current_year" {
//年度数据
subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
mostActiveDay := ""
if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
mostActiveDay = getMostActiveJson(userInfo)
}
scoreMap := make(map[string]float64)
repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
scoreMap["datasetscore"] = datasetscore
codeInfo, codescore := getCodeInfo(dateRecordAll)
scoreMap["codescore"] = codescore
cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap, scoreMap)
playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap)
re := &UserSummaryCurrentYear{
ID: dateRecordAll.ID,
Name: dateRecordAll.Name,
Email: dateRecordAll.Email,
Phone: dateRecordAll.Phone,
RegistDate: dateRecordAll.RegistDate,
DateCount: int(subTime.Hours()) / 24,
MostActiveDay: mostActiveDay,
RepoInfo: repoInfo,
DataSetInfo: dataSetInfo,
CodeInfo: codeInfo,
CloudBrainInfo: cloudBrainInfo,
PlayARoll: playARoll,
}
statictisSess.Insert(re)
}
}
if len(dateRecordBatch) > 0 {
err := insertTable(dateRecordBatch, tableName, statictisSess)
@@ -890,6 +750,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
if err != nil {
log.Info("insert all data failed." + err.Error())
}

}
indexTotal += PAGE_SIZE
if indexTotal >= count {
@@ -911,6 +772,204 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount))
}

func getBonusMap() map[string]map[string]int {
bonusMap := make(map[string]map[string]int)
url := setting.RecommentRepoAddr + "bonus/record.txt"
content, err := GetContentFromPromote(url)
if err == nil {
filenames := strings.Split(content, "\n")
for i := 0; i < len(filenames); i++ {
url = setting.RecommentRepoAddr + "bonus/" + filenames[i]
csvContent, err1 := GetContentFromPromote(url)
if err1 == nil {
//read csv
lines := strings.Split(csvContent, "\n")
for j := 1; j < len(lines); j++ {
aLine := strings.Split(lines[j], ",")
if len(aLine) < 7 {
continue
}
userName := aLine[1]
//email := lines[2]
record, ok := bonusMap[userName]
if !ok {
record = make(map[string]int)
}
record["times"] = getMapKeyStringValue("times", record) + getIntValue(aLine[3])
record["total_bonus"] = getMapKeyStringValue("total_bonus", record) + getIntValue(aLine[4])
record["total_cardtime"] = getMapKeyStringValue("total_cardtime", record) + getIntValue(aLine[5])
record["total_giveup"] = getMapKeyStringValue("total_giveup", record) + getIntValue(aLine[6])
}
}
}
}
return bonusMap
}

func getIntValue(val string) int {
i, err := strconv.Atoi(val)
if err == nil {
return i
}
return 0
}

func getPlayARoll(bonusMap map[string]map[string]int, userName string, scoreMap map[string]float64) string {
bonusInfo := make(map[string]string)
record, ok := bonusMap[userName]
if ok {
rollscore := 0.0
bonusInfo["times"] = fmt.Sprint(record["times"])
if record["times"] >= 4 {
rollscore = float64(record["times"]) / float64(4)
}
scoreMap["rollscore"] = rollscore
bonusInfo["total_bonus"] = fmt.Sprint(record["total_bonus"])
bonusInfo["total_cardtime"] = fmt.Sprint(record["total_cardtime"])
bonusInfo["total_giveup"] = fmt.Sprint(record["total_giveup"])
bonusInfoJson, _ := json.Marshal(bonusInfo)
return string(bonusInfoJson)
} else {
return ""
}
}

func getCloudBrainInfo(dateRecordAll UserBusinessAnalysisAll, CloudBrainTaskItemMap map[string]int, scoreMap map[string]float64) string {
trainscore := 0.0
debugscore := 0.0
runtime := 0.0
if dateRecordAll.CloudBrainTaskNum > 0 {
cloudBrainInfo := make(map[string]string)
cloudBrainInfo["create_task_num"] = fmt.Sprint(dateRecordAll.CloudBrainTaskNum)
cloudBrainInfo["debug_task_num"] = fmt.Sprint(dateRecordAll.GpuDebugJob + dateRecordAll.NpuDebugJob)
if dateRecordAll.GpuDebugJob+dateRecordAll.NpuDebugJob >= 50 {
debugscore = float64(dateRecordAll.GpuDebugJob+dateRecordAll.NpuDebugJob) / float64(50)
}
cloudBrainInfo["train_task_num"] = fmt.Sprint(dateRecordAll.GpuTrainJob + dateRecordAll.NpuTrainJob)
if dateRecordAll.GpuTrainJob+dateRecordAll.NpuTrainJob >= 50 {
trainscore = float64(dateRecordAll.GpuTrainJob+dateRecordAll.NpuTrainJob) / float64(50)
}
cloudBrainInfo["inference_task_num"] = fmt.Sprint(dateRecordAll.NpuInferenceJob + CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_GpuInferenceJob"])
cloudBrainInfo["card_runtime"] = fmt.Sprint(dateRecordAll.CloudBrainRunTime)
if dateRecordAll.CloudBrainRunTime >= 100 {
runtime = float64(dateRecordAll.CloudBrainRunTime) / float64(100)
}
cloudBrainInfo["card_runtime_money"] = fmt.Sprint(dateRecordAll.CloudBrainRunTime * 5)
cloudBrainInfo["CloudBrainOne"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_CloudBrainOne"])
cloudBrainInfo["CloudBrainTwo"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_CloudBrainTwo"])
cloudBrainInfo["C2Net"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_C2Net"])

cloudBrainInfoJson, _ := json.Marshal(cloudBrainInfo)
scoreMap["trainscore"] = trainscore
scoreMap["debugscore"] = debugscore
scoreMap["runtime"] = runtime
return string(cloudBrainInfoJson)
} else {
scoreMap["trainscore"] = trainscore
scoreMap["debugscore"] = debugscore
scoreMap["runtime"] = runtime
return ""
}
}

func getCodeInfo(dateRecordAll UserBusinessAnalysisAll) (string, float64) {
if dateRecordAll.CommitCount > 0 {
codeInfo := make(map[string]string)
codeInfo["commit_count"] = fmt.Sprint(dateRecordAll.CommitCount)
codeInfo["commit_line"] = fmt.Sprint(dateRecordAll.CommitCodeSize)
score := 0.0
score = float64(dateRecordAll.CommitCodeSize) / float64(dateRecordAll.CommitCount) / float64(20000)
if score < (float64(dateRecordAll.CommitCount) / float64(100)) {
score = float64(dateRecordAll.CommitCount) / float64(100)
}
codeInfo["score"] = fmt.Sprintf("%.2f", score)

codeInfoJson, _ := json.Marshal(codeInfo)
return string(codeInfoJson), score
} else {
return "", 0
}
}

func getDataSetInfo(userId int64, CreatedDataset map[int64]int, dataSetDownloadMap map[int64]int, CommitDatasetNumMap map[int64]int, CollectedDataset map[int64]int) (string, float64) {
datasetInfo := make(map[string]string)
score := 0.0
if create_count, ok := CreatedDataset[userId]; ok {
datasetInfo["create_count"] = fmt.Sprint(create_count)
score = float64(create_count) / 10
}
if upload_count, ok := CommitDatasetNumMap[userId]; ok {
datasetInfo["upload_file_count"] = fmt.Sprint(upload_count)
}
if download_count, ok := dataSetDownloadMap[userId]; ok {
datasetInfo["download_count"] = fmt.Sprint(download_count)
}
if cllected_count, ok := CollectedDataset[userId]; ok {
datasetInfo["cllected_count"] = fmt.Sprint(cllected_count)
}

if len(datasetInfo) > 0 {
datasetInfoJson, _ := json.Marshal(datasetInfo)
return string(datasetInfoJson), score
} else {
return "", score
}
}

func getRepoDetailInfo(repoDetailInfoMap map[string]int, userId int64, mostDownload map[int64]string) string {
repoDetailInfo := make(map[string]string)
if total, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total"]; ok {
repoDetailInfo["repo_total"] = fmt.Sprint(total)
}
if private, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_private"]; ok {
repoDetailInfo["repo_is_private"] = fmt.Sprint(private)
}
if public, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_public"]; ok {
repoDetailInfo["repo_is_public"] = fmt.Sprint(public)
}
if download, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total_download"]; ok {
repoDetailInfo["repo_total_download"] = fmt.Sprint(download)
}
if mostdownload, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_most_download"]; ok {
repoDetailInfo["repo_most_download_count"] = fmt.Sprint(mostdownload)
}
if mostdownloadName, ok := mostDownload[userId]; ok {
repoDetailInfo["repo_most_download_name"] = mostdownloadName
}
if len(repoDetailInfo) > 0 {
repoDetailInfoJson, _ := json.Marshal(repoDetailInfo)
return string(repoDetailInfoJson)
} else {
return ""
}
}

func getMostActiveJson(userInfo map[string]int) string {
mostActiveMap := make(map[string]string)
if day, ok := userInfo["hour_day"]; ok {
hour := userInfo["hour_hour"]
month := userInfo["hour_month"]
year := userInfo["hour_year"]
delete(userInfo, "hour_day")
delete(userInfo, "hour_hour")
delete(userInfo, "hour_month")
delete(userInfo, "hour_year")
mostActiveMap["before_dawn"] = fmt.Sprint(year) + "/" + fmt.Sprint(month) + "/" + fmt.Sprint(day) + " " + fmt.Sprint(hour)
}
max := 0
max_day := ""
for key, value := range userInfo {
if value > max {
max = value
max_day = key
}
}
mostActiveMap["most_active_day"] = max_day
mostActiveMap["most_active_num"] = fmt.Sprint(max)
mostActiveMapJson, _ := json.Marshal(mostActiveMap)
return string(mostActiveMapJson)
}

func updateUserIndex(tableName string, statictisSess *xorm.Session, userId int64, userIndex float64) {
updateSql := "UPDATE public." + tableName + " set user_index=" + fmt.Sprint(userIndex*100) + " where id=" + fmt.Sprint(userId)
statictisSess.Exec(updateSql)
@@ -997,7 +1056,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,

DataDate := CountDate.Format("2006-01-02")
CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -1010,19 +1069,19 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("query commit code errr.")
} else {
//log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
//CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -1490,41 +1549,65 @@ func queryPullRequest(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}

func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
func queryCommitAction(start_unix int64, end_unix int64, actionType int64) (map[int64]int, map[int64]map[string]int) {
sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)
cond := "user_id=act_user_id and op_type=" + fmt.Sprint(actionType) + " and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
mostActiveMap := make(map[int64]map[string]int)
cond := "user_id=act_user_id and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Action))
if err != nil {
log.Info("query action error. return.")
return resultMap
return resultMap, mostActiveMap
}

var indexTotal int64
indexTotal = 0
for {
sess.Select("id,user_id,op_type,act_user_id").Table("action").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,user_id,op_type,act_user_id,created_unix").Table("action").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
actionList := make([]*Action, 0)
sess.Find(&actionList)

log.Info("query action size=" + fmt.Sprint(len(actionList)))
for _, actionRecord := range actionList {
if _, ok := resultMap[actionRecord.UserID]; !ok {
resultMap[actionRecord.UserID] = 1
if int64(actionRecord.OpType) == actionType {
if _, ok := resultMap[actionRecord.UserID]; !ok {
resultMap[actionRecord.UserID] = 1
} else {
resultMap[actionRecord.UserID] += 1
}
}
key := getDate(actionRecord.CreatedUnix)
if _, ok := mostActiveMap[actionRecord.UserID]; !ok {
tmpMap := make(map[string]int)
tmpMap[key] = 1
mostActiveMap[actionRecord.UserID] = tmpMap
} else {
resultMap[actionRecord.UserID] += 1
mostActiveMap[actionRecord.UserID][key] = getMapKeyStringValue(key, mostActiveMap[actionRecord.UserID]) + 1
}
utcTime := actionRecord.CreatedUnix.AsTime()
hour := utcTime.Hour()
if hour >= 0 && hour <= 5 {
key = "hour_hour"
if getMapKeyStringValue(key, mostActiveMap[actionRecord.UserID]) < hour {
mostActiveMap[actionRecord.UserID][key] = hour
mostActiveMap[actionRecord.UserID]["hour_day"] = utcTime.Day()
mostActiveMap[actionRecord.UserID]["hour_month"] = int(utcTime.Month())
mostActiveMap[actionRecord.UserID]["hour_year"] = utcTime.Year()
}
}
}

indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}

return resultMap
return resultMap, mostActiveMap
}
func getDate(createTime timeutil.TimeStamp) string {
return createTime.Format("2006-01-02")
}

func queryCreateIssue(start_unix int64, end_unix int64) map[int64]int {
@@ -1714,15 +1797,16 @@ func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int
return resultMap, resultFocusedByOtherMap
}

func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
func queryRecommedDataSet(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
userIdDdatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true"
userIdRecommentDatasetMap := make(map[int64]int)
userIdCreateDatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(Dataset))
if err != nil {
log.Info("query recommend dataset error. return.")
return userIdDdatasetMap
return userIdRecommentDatasetMap, userIdCreateDatasetMap
}
var indexTotal int64
indexTotal = 0
@@ -1732,18 +1816,21 @@ func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
if datasetRecord.Recommend {
if _, ok := userIdRecommentDatasetMap[datasetRecord.UserID]; !ok {
userIdRecommentDatasetMap[datasetRecord.UserID] = 1
} else {
userIdRecommentDatasetMap[datasetRecord.UserID] += 1
}
}
userIdCreateDatasetMap[datasetRecord.UserID] = getMapValue(datasetRecord.UserID, userIdCreateDatasetMap) + 1
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdDdatasetMap
return userIdRecommentDatasetMap, userIdCreateDatasetMap
}

func queryAllDataSet() (map[int64]int64, map[int64]int64) {
@@ -1922,22 +2009,23 @@ func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64
return imageCollect, imageCollected
}

func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
resultSizeMap := make(map[int64]int)
resultNumMap := make(map[int64]int)
resultDownloadMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Attachment))
if err != nil {
log.Info("query attachment error. return.")
return resultSizeMap, resultNumMap
return resultSizeMap, resultNumMap, resultDownloadMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uploader_id,size").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,uploader_id,size,download_count").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
attachmentList := make([]*Attachment, 0)
sess.Find(&attachmentList)

@@ -1946,9 +2034,11 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6
if _, ok := resultSizeMap[attachRecord.UploaderID]; !ok {
resultSizeMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB
resultNumMap[attachRecord.UploaderID] = 1
resultDownloadMap[attachRecord.UploaderID] = int(attachRecord.DownloadCount)
} else {
resultSizeMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB
resultNumMap[attachRecord.UploaderID] += 1
resultDownloadMap[attachRecord.UploaderID] += int(attachRecord.DownloadCount)
}
}

@@ -1958,32 +2048,50 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6
}
}

return resultSizeMap, resultNumMap
return resultSizeMap, resultNumMap, resultDownloadMap
}

func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[string]int, map[int64]string) {
sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)

detailInfoMap := make(map[string]int)
mostDownloadMap := make(map[int64]string)

cond := "is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(Repository))
if err != nil {
log.Info("query Repository error. return.")
return resultMap
return resultMap, detailInfoMap, mostDownloadMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,owner_id,name").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,owner_id,name,is_private,clone_cnt").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
repoList := make([]*Repository, 0)
sess.Find(&repoList)
log.Info("query Repository size=" + fmt.Sprint(len(repoList)))
for _, repoRecord := range repoList {
if _, ok := resultMap[repoRecord.OwnerID]; !ok {
resultMap[repoRecord.OwnerID] = 1
resultMap[repoRecord.OwnerID] = getMapValue(repoRecord.OwnerID, resultMap) + 1

key := fmt.Sprint(repoRecord.OwnerID) + "_total"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + 1

if repoRecord.IsPrivate {
key := fmt.Sprint(repoRecord.OwnerID) + "_is_private"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + 1
} else {
resultMap[repoRecord.OwnerID] += 1
key := fmt.Sprint(repoRecord.OwnerID) + "_is_public"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + 1
}
key = fmt.Sprint(repoRecord.OwnerID) + "_total_download"
detailInfoMap[key] = getMapKeyStringValue(key, detailInfoMap) + int(repoRecord.CloneCnt)

key = fmt.Sprint(repoRecord.OwnerID) + "_most_download"
if int(repoRecord.CloneCnt) > getMapKeyStringValue(key, detailInfoMap) {
detailInfoMap[key] = int(repoRecord.CloneCnt)
mostDownloadMap[repoRecord.OwnerID] = repoRecord.DisplayName()
}
}
indexTotal += PAGE_SIZE
@@ -1992,7 +2100,7 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
}
}

return resultMap
return resultMap, detailInfoMap, mostDownloadMap
}

func queryUserRepoOpenIIndex(start_unix int64, end_unix int64) map[int64]float64 {
@@ -2180,6 +2288,7 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
setMapKey("CloudBrainRunTime", cloudTaskRecord.UserID, int(cloudTaskRecord.Duration), resultItemMap)
}
if cloudTaskRecord.Type == 1 { //npu
setMapKey("CloudBrainTwo", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "INFERENCE" {
@@ -2187,14 +2296,32 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
} else {
setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else { //type=0 gpu
} else if cloudTaskRecord.Type == 0 { //type=0 gpu
setMapKey("CloudBrainOne", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "INFERENCE" {
setMapKey("GpuInferenceJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "BENCHMARK" {
setMapKey("GpuBenchMarkJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else if cloudTaskRecord.Type == 2 {
setMapKey("C2Net", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.ComputeResource == NPUResource {
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else if cloudTaskRecord.ComputeResource == GPUResource {
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
}
}
}
indexTotal += PAGE_SIZE
@@ -2274,3 +2401,26 @@ func subMonth(t1, t2 time.Time) (month int) {
}
return month
}

func GetContentFromPromote(url string) (string, error) {
defer func() {
if err := recover(); err != nil {
log.Info("not error.", err)
return
}
}()
resp, err := http.Get(url)
if err != nil || resp.StatusCode != 200 {
log.Info("Get organizations url error=" + err.Error())
return "", err
}

bytes, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
if err != nil {
log.Info("Get organizations url error=" + err.Error())
return "", err
}
allLineStr := string(bytes)
return allLineStr, nil
}

+ 200
- 0
models/user_business_struct.go View File

@@ -2,6 +2,27 @@ package models

import "code.gitea.io/gitea/modules/timeutil"

type UserSummaryCurrentYear struct {
ID int64 `xorm:"pk"`
Email string `xorm:"NOT NULL"`
//user
Name string `xorm:"NOT NULL"`
Phone string `xorm:"NULL"`
//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

DateCount int `xorm:"NOT NULL DEFAULT 0"`
MostActiveDay string `xorm:" NULL "` //08.05
RepoInfo string `xorm:"varchar(1000)"` //创建了XX 个项目,公开项目XX 个,私有项目XX 个累计被下载XXX 次,其中《XXXXXXX 》项目,获得了最高XXX 次下载
DataSetInfo string `xorm:"varchar(500)"` //创建了XX 个数据集,上传了XX 个数据集文件,累计被下载XX 次,被收藏XX 次
CodeInfo string `xorm:"varchar(500)"` //代码提交次数,提交总代码行数,最晚的提交时间
CloudBrainInfo string `xorm:"varchar(1000)"` //,创建了XX 个云脑任务,调试任务XX 个,训练任务XX 个,推理任务XX 个,累计运行了XXXX 卡时,累计节省xxxxx 元
//这些免费的算力资源分别有,XX% 来自鹏城云脑1,XX% 来自鹏城云脑2,XX% 来自智算网络
PlayARoll string `xorm:"varchar(500)"` //你参加了XX 次“我为开源打榜狂”活动,累计上榜XX 次,总共获得了社区XXX 元的激励

Label string `xorm:"varchar(500)"`
}

type UserBusinessAnalysisCurrentYear struct {
ID int64 `xorm:"pk"`
CountDate int64 `xorm:"pk"`
@@ -505,3 +526,182 @@ type UserMetrics struct {
ActivityUserJson string `xorm:"text NULL"` //激活用户列表
CurrentDayRegistUser int `xorm:"NOT NULL DEFAULT 0"` //当天注册用户
}

type UserBusinessAnalysisAll struct {
ID int64 `xorm:"pk"`

CountDate int64 `xorm:"pk"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 10
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

DataDate string `xorm:"NULL"`

//cloudbraintask
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysis struct {
ID int64 `xorm:"pk"`
DataDate string `xorm:"pk"`
CountDate int64 `xorm:"NULL"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 6
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`

Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
}

+ 20
- 23
modules/auth/modelarts.go View File

@@ -57,29 +57,26 @@ type CreateModelArtsTrainJobForm struct {
}

type CreateModelArtsInferenceJobForm struct {
DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"`
BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"`
PoolID string `form:"pool_id" binding:"Required"`
Flavor string `form:"flavor" binding:"Required"`
Params string `form:"run_para_list" binding:"Required"`
Description string `form:"description"`
IsSaveParam string `form:"is_save_para"`
ParameterTemplateName string `form:"parameter_template_name"`
PrameterDescription string `form:"parameter_description"`
BranchName string `form:"branch_name" binding:"Required"`
VersionName string `form:"version_name" binding:"Required"`
FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"`
LabelName string `form:"label_names" binding:"Required"`
TrainUrl string `form:"train_url" binding:"Required"`
ModelName string `form:"model_name" binding:"Required"`
ModelVersion string `form:"model_version" binding:"Required"`
CkptName string `form:"ckpt_name" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"`
DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"`
BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"`
PoolID string `form:"pool_id" binding:"Required"`
Flavor string `form:"flavor" binding:"Required"`
Params string `form:"run_para_list" binding:"Required"`
Description string `form:"description"`
BranchName string `form:"branch_name" binding:"Required"`
VersionName string `form:"version_name" binding:"Required"`
FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"`
LabelName string `form:"label_names" binding:"Required"`
TrainUrl string `form:"train_url" binding:"Required"`
ModelName string `form:"model_name" binding:"Required"`
ModelVersion string `form:"model_version" binding:"Required"`
CkptName string `form:"ckpt_name" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"`
}

func (f *CreateModelArtsTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {


+ 6
- 6
modules/cloudbrain/cloudbrain.go View File

@@ -228,7 +228,7 @@ func AdminOrImageCreaterRight(ctx *context.Context) {

}

func GenerateTask(req GenerateCloudBrainTaskReq) error {
func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) {
var versionCount int
if req.JobType == string(models.JobTypeTrain) {
versionCount = 1
@@ -335,11 +335,11 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
})
if err != nil {
log.Error("CreateJob failed:", err.Error(), req.Ctx.Data["MsgID"])
return err
return "", err
}
if jobResult.Code != Success {
log.Error("CreateJob(%s) failed:%s", req.JobName, jobResult.Msg, req.Ctx.Data["MsgID"])
return errors.New(jobResult.Msg)
return "", errors.New(jobResult.Msg)
}

var jobID = jobResult.Payload["jobId"].(string)
@@ -380,13 +380,13 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
})

if err != nil {
return err
return "", err
}

task, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID failed: %v", err.Error())
return err
return "", err
}

stringId := strconv.FormatInt(task.ID, 10)
@@ -401,7 +401,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
notification.NotifyOtherTask(req.Ctx.User, req.Ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugGPUTask)
}

return nil
return jobID, nil
}

func IsBenchmarkJob(jobType string) bool {


+ 111
- 0
modules/convert/cloudbrain.go View File

@@ -0,0 +1,111 @@
package convert

import (
"code.gitea.io/gitea/models"
api "code.gitea.io/gitea/modules/structs"
)

func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain {
return &api.Cloudbrain{
ID: task.ID,
JobID: task.JobID,
JobType: task.JobType,
Type: task.Type,
DisplayJobName: task.DisplayJobName,
Status: task.Status,
CreatedUnix: int64(task.CreatedUnix),
RepoID: task.RepoID,
Duration: task.Duration,
TrainJobDuration: task.TrainJobDuration,
ImageID: task.ImageID,
Image: task.Image,
Uuid: task.Uuid,
DatasetName: task.DatasetName,
ComputeResource: task.ComputeResource,
AiCenter: task.AiCenter,
BranchName: task.BranchName,
Parameters: task.Parameters,
BootFile: task.BootFile,
Description: task.Description,
ModelName: task.ModelName,

ModelVersion: task.ModelVersion,
CkptName: task.CkptName,

StartTime: int64(task.StartTime),
EndTime: int64(task.EndTime),

Spec: ToSpecification(task.Spec),
}
}
func ToAttachment(attachment *models.Attachment) *api.AttachmentShow {
return &api.AttachmentShow{
ID: attachment.ID,
UUID: attachment.UUID,
DatasetID: attachment.DatasetID,
ReleaseID: attachment.ReleaseID,
UploaderID: attachment.UploaderID,
CommentID: attachment.CommentID,
Name: attachment.Name,
Description: attachment.Description,
DownloadCount: attachment.DownloadCount,
UseNumber: attachment.UseNumber,
Size: attachment.Size,
IsPrivate: attachment.IsPrivate,
DecompressState: attachment.DecompressState,
Type: attachment.Type,
CreatedUnix: int64(attachment.CreatedUnix),
}
}

func ToDataset(dataset *models.Dataset) *api.Dataset {
var convertAttachments []*api.AttachmentShow
for _, attachment := range dataset.Attachments {
convertAttachments = append(convertAttachments, ToAttachment(attachment))
}
return &api.Dataset{
ID: dataset.ID,
Title: dataset.Title,
Status: dataset.Status,
Category: dataset.Category,
Description: dataset.Description,
DownloadTimes: dataset.DownloadTimes,
UseCount: dataset.UseCount,
NumStars: dataset.NumStars,
Recommend: dataset.Recommend,
License: dataset.License,
Task: dataset.Task,
ReleaseID: dataset.ReleaseID,
UserID: dataset.UserID,
RepoID: dataset.RepoID,
Repo: &api.RepositoryShow{
OwnerName: dataset.Repo.OwnerName,
Name: dataset.Repo.Name,
},
CreatedUnix: int64(dataset.CreatedUnix),
UpdatedUnix: int64(dataset.UpdatedUnix),
Attachments: convertAttachments,
}
}

func ToSpecification(s *models.Specification) *api.SpecificationShow {
return &api.SpecificationShow{
ID: s.ID,
AccCardsNum: s.AccCardsNum,
AccCardType: s.AccCardType,
CpuCores: s.CpuCores,
MemGiB: s.MemGiB,
GPUMemGiB: s.GPUMemGiB,
ShareMemGiB: s.ShareMemGiB,
ComputeResource: s.ComputeResource,
UnitPrice: s.UnitPrice,
}
}

func ToTagger(user *models.User) *api.Tagger {
return &api.Tagger{
Name: user.Name,
RelAvatarURL: user.RelAvatarLink(),
Email: user.Email,
}
}

+ 4
- 4
modules/grampus/grampus.go View File

@@ -102,7 +102,7 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram
return datasetGrampus
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()

centerID, centerName := getCentersParamter(ctx, req)
@@ -150,7 +150,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
})
if err != nil {
log.Error("createJob failed: %v", err.Error())
return err
return "", err
}

jobID := jobResult.JobInfo.JobID
@@ -191,7 +191,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error

if err != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, err.Error())
return err
return "", err
}

var actionType models.ActionType
@@ -202,7 +202,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
}
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, actionType)

return nil
return jobID, nil
}

func getCentersParamter(ctx *context.Context, req *GenerateTrainJobReq) ([]string, []string) {


+ 26
- 0
modules/grampus/resty.go View File

@@ -245,6 +245,32 @@ func GetTrainJobLog(jobID string) (string, error) {
return logContent, nil
}

func GetGrampusMetrics(jobID string) (models.GetTrainJobMetricStatisticResult, error) {
checkSetting()
client := getRestyClient()
var result models.GetTrainJobMetricStatisticResult
res, err := client.R().
SetAuthToken(TOKEN).
Get(HOST + urlTrainJob + "/" + jobID + "/task/0/replica/0/metrics")

if err != nil {
return result, fmt.Errorf("resty GetTrainJobLog: %v", err)
}
if err = json.Unmarshal([]byte(res.String()), &result); err != nil {
log.Error("GetGrampusMetrics json.Unmarshal failed(%s): %v", res.String(), err.Error())
return result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error())
}
if res.StatusCode() != http.StatusOK {
log.Error("Call GrampusMetrics failed(%d):%s(%s)", res.StatusCode(), result.ErrorCode, result.ErrorMsg)
return result, fmt.Errorf("Call GrampusMetrics failed(%d):%d(%s)", res.StatusCode(), result.ErrorCode, result.ErrorMsg)
}
if !result.IsSuccess {
log.Error("GetGrampusMetrics(%s) failed", jobID)
return result, fmt.Errorf("GetGrampusMetrics failed:%s", result.ErrorMsg)
}
return result, nil
}

func StopJob(jobID string) (*models.GrampusStopJobResponse, error) {
checkSetting()
client := getRestyClient()


+ 13
- 13
modules/modelarts/modelarts.go View File

@@ -350,7 +350,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
return nil
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
var jobResult *models.CreateTrainJobResult
var createErr error
@@ -410,17 +410,17 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
})
if errTemp != nil {
log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error())
return errTemp
return "", errTemp
}
}
return createErr
return "", createErr
}
jobId := strconv.FormatInt(jobResult.JobID, 10)
jobID := strconv.FormatInt(jobResult.JobID, 10)
createErr = models.CreateCloudbrain(&models.Cloudbrain{
Status: TransTrainJobStatus(jobResult.Status),
UserID: ctx.User.ID,
RepoID: ctx.Repo.Repository.ID,
JobID: jobId,
JobID: jobID,
JobName: req.JobName,
DisplayJobName: req.DisplayJobName,
JobType: string(models.JobTypeTrain),
@@ -458,10 +458,10 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error

if createErr != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, createErr.Error())
return createErr
return "", createErr
}
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobId, req.DisplayJobName, models.ActionCreateTrainTask)
return nil
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateTrainTask)
return jobID, nil
}

func GenerateModelConvertTrainJob(req *GenerateTrainJobReq) (*models.CreateTrainJobResult, error) {
@@ -682,7 +682,7 @@ func GetOutputPathByCount(TotalVersionCount int) (VersionOutputPath string) {
return VersionOutputPath
}

func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (err error) {
func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
var jobResult *models.CreateTrainJobResult
var createErr error
@@ -742,10 +742,10 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
})
if err != nil {
log.Error("InsertCloudbrainTemp failed: %v", err.Error())
return err
return "", err
}
}
return err
return "", err
}

// attach, err := models.GetAttachmentByUUID(req.Uuid)
@@ -796,7 +796,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e

if err != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, err.Error())
return err
return "", err
}
if req.JobType == string(models.JobTypeModelSafety) {
task, err := models.GetCloudbrainByJobID(jobID)
@@ -807,7 +807,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateInferenceTask)
}

return nil
return jobID, nil
}

func GetNotebookImageName(imageId string) (string, error) {


+ 7
- 7
modules/setting/setting.go View File

@@ -609,9 +609,9 @@ var (
AiCenterInfo string
}{}

C2NetInfos *C2NetSqInfos
CenterInfos *AiCenterInfos
C2NetMapInfo map[string]*C2NetSequenceInfo
C2NetInfos *C2NetSqInfos
CenterInfos *AiCenterInfos
C2NetMapInfo map[string]*C2NetSequenceInfo

//elk config
ElkUrl string
@@ -1451,7 +1451,7 @@ func NewContext() {
MaxDuration = sec.Key("MAX_DURATION").MustInt64(14400)
TrainGpuTypes = sec.Key("TRAIN_GPU_TYPES").MustString("")
TrainResourceSpecs = sec.Key("TRAIN_RESOURCE_SPECS").MustString("")
MaxModelSize = sec.Key("MAX_MODEL_SIZE").MustFloat64(500)
MaxModelSize = sec.Key("MAX_MODEL_SIZE").MustFloat64(200)
InferenceGpuTypes = sec.Key("INFERENCE_GPU_TYPES").MustString("")
InferenceResourceSpecs = sec.Key("INFERENCE_RESOURCE_SPECS").MustString("")
SpecialPools = sec.Key("SPECIAL_POOL").MustString("")
@@ -1655,9 +1655,9 @@ func getGrampusConfig() {
if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil {
log.Error("Unmarshal(C2NetSequence) failed:%v", err)
}
C2NetMapInfo=make(map[string]*C2NetSequenceInfo)
for _,value :=range C2NetInfos.C2NetSqInfo{
C2NetMapInfo[value.Name]=value
C2NetMapInfo = make(map[string]*C2NetSequenceInfo)
for _, value := range C2NetInfos.C2NetSqInfo {
C2NetMapInfo[value.Name] = value
}
}
Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus")


+ 2
- 2
modules/storage/minio.go View File

@@ -144,8 +144,8 @@ func (m *MinioStorage) HasObject(path string) (bool, error) {

// Indicate to our routine to exit cleanly upon return.
defer close(doneCh)
objectCh := m.client.ListObjects(m.bucket, m.buildMinioPath(path), false, doneCh)
//objectCh := m.client.ListObjects(m.bucket, m.buildMinioPath(path), false, doneCh)
objectCh := m.client.ListObjects(m.bucket, path, false, doneCh)
for object := range objectCh {
if object.Err != nil {
return hasObject, object.Err


+ 26
- 10
modules/storage/minio_ext.go View File

@@ -3,7 +3,6 @@ package storage
import (
"encoding/xml"
"errors"
"path"
"sort"
"strconv"
"strings"
@@ -101,7 +100,7 @@ func getClients() (*minio_ext.Client, *miniov6.Core, error) {
return client, core, nil
}

func GenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, partSize int64) (string, error) {
func GenMultiPartSignedUrl(objectName string, uploadId string, partNumber int, partSize int64) (string, error) {
minioClient, _, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -110,7 +109,7 @@ func GenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, partSiz

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")

return minioClient.GenUploadPartSignedUrl(uploadId, bucketName, objectName, partNumber, partSize, PresignedUploadPartUrlExpireTime, setting.Attachment.Minio.Location)
}
@@ -268,6 +267,23 @@ func MinioCopyFiles(bucketName string, srcPath string, destPath string, Files []
return fileTotalSize, nil
}

func MinioCopyAFile(srcBucketName, srcObjectName, destBucketName, destObjectName string) (int64, error) {
_, core, err := getClients()
var fileTotalSize int64
fileTotalSize = 0
if err != nil {
log.Error("getClients failed:", err.Error())
return fileTotalSize, err
}
meta, err := core.StatObject(srcBucketName, srcObjectName, miniov6.StatObjectOptions{})
if err != nil {
log.Info("Get file error:" + err.Error())
}
core.CopyObject(srcBucketName, srcObjectName, destBucketName, destObjectName, meta.UserMetadata)
fileTotalSize = meta.Size
return fileTotalSize, nil
}

func MinioPathCopy(bucketName string, srcPath string, destPath string) (int64, error) {
_, core, err := getClients()
var fileTotalSize int64
@@ -301,7 +317,7 @@ func MinioPathCopy(bucketName string, srcPath string, destPath string) (int64, e
return fileTotalSize, nil
}

func NewMultiPartUpload(uuid string) (string, error) {
func NewMultiPartUpload(objectName string) (string, error) {
_, core, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -310,12 +326,12 @@ func NewMultiPartUpload(uuid string) (string, error) {

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")

return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{})
}

func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) {
func CompleteMultiPartUpload(objectName string, uploadID string, totalChunks int) (string, error) {
client, core, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -324,8 +340,8 @@ func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (str

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
log.Info("bucketName=" + bucketName + " objectName=" + objectName + " uploadID=" + uploadID)
partInfos, err := client.ListObjectParts(bucketName, objectName, uploadID)
if err != nil {
log.Error("ListObjectParts failed:", err.Error())
@@ -351,7 +367,7 @@ func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (str
return core.CompleteMultipartUpload(bucketName, objectName, uploadID, complMultipartUpload.Parts)
}

func GetPartInfos(uuid string, uploadID string) (string, error) {
func GetPartInfos(objectName string, uploadID string) (string, error) {
minioClient, _, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -360,7 +376,7 @@ func GetPartInfos(uuid string, uploadID string) (string, error) {

minio := setting.Attachment.Minio
bucketName := minio.Bucket
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")
//objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/")

partInfos, err := minioClient.ListObjectParts(bucketName, objectName, uploadID)
if err != nil {


+ 24
- 25
modules/storage/obs.go View File

@@ -90,17 +90,16 @@ func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err
} else {
continue
}

break
}

return output, nil
}

func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) {
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
func GetObsPartInfos(objectName, uploadID string) (string, error) {
key := objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")

allParts, err := listAllParts(uuid, uploadID, key)
allParts, err := listAllParts(objectName, uploadID, key)
if err != nil {
log.Error("listAllParts failed: %v", err)
return "", err
@@ -114,10 +113,11 @@ func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) {
return chunks, nil
}

func NewObsMultiPartUpload(uuid, fileName string) (string, error) {
func NewObsMultiPartUpload(objectName string) (string, error) {
input := &obs.InitiateMultipartUploadInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")

output, err := ObsCli.InitiateMultipartUpload(input)
if err != nil {
@@ -128,13 +128,14 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) {
return output.UploadId, nil
}

func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error {
func CompleteObsMultiPartUpload(objectName, uploadID string, totalChunks int) error {
input := &obs.CompleteMultipartUploadInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
//input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
input.UploadId = uploadID

allParts, err := listAllParts(uuid, uploadID, input.Key)
allParts, err := listAllParts(objectName, uploadID, input.Key)
if err != nil {
log.Error("listAllParts failed: %v", err)
return err
@@ -153,15 +154,16 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int
return err
}

log.Info("uuid:%s, RequestId:%s", uuid, output.RequestId)
log.Info("uuid:%s, RequestId:%s", objectName, output.RequestId)

return nil
}

func ObsMultiPartUpload(uuid string, uploadId string, partNumber int, fileName string, putBody io.ReadCloser) error {
func ObsMultiPartUpload(objectName string, uploadId string, partNumber int, fileName string, putBody io.ReadCloser) error {
input := &obs.UploadPartInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.UploadId = uploadId
input.PartNumber = partNumber
input.Body = putBody
@@ -241,11 +243,6 @@ func ObsDownloadAFile(bucket string, key string) (io.ReadCloser, error) {
}
}

func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) {

return ObsDownloadAFile(setting.Bucket, strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/"))
}

func ObsModelDownload(JobName string, fileName string) (io.ReadCloser, error) {
input := &obs.GetObjectInput{}
input.Bucket = setting.Bucket
@@ -297,7 +294,7 @@ func ObsCopyManyFile(srcBucket string, srcPath string, destBucket string, destPa
log.Info("Get File error, error=" + err.Error())
continue
}
obsCopyFile(srcBucket, srcKey, destBucket, destKey)
ObsCopyFile(srcBucket, srcKey, destBucket, destKey)
fileTotalSize += out.ContentLength
}

@@ -321,7 +318,7 @@ func ObsCopyAllFile(srcBucket string, srcPath string, destBucket string, destPat
index++
for _, val := range output.Contents {
destKey := destPath + val.Key[length:]
obsCopyFile(srcBucket, val.Key, destBucket, destKey)
ObsCopyFile(srcBucket, val.Key, destBucket, destKey)
fileTotalSize += val.Size
}
if output.IsTruncated {
@@ -340,7 +337,7 @@ func ObsCopyAllFile(srcBucket string, srcPath string, destBucket string, destPat
return fileTotalSize, nil
}

func obsCopyFile(srcBucket string, srcKeyName string, destBucket string, destKeyName string) error {
func ObsCopyFile(srcBucket string, srcKeyName string, destBucket string, destKeyName string) error {
input := &obs.CopyObjectInput{}
input.Bucket = destBucket
input.Key = destKeyName
@@ -529,11 +526,12 @@ func GetObsListObject(jobName, outPutPath, parentDir, versionName string) ([]Fil
}
}

func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, fileName string) (string, error) {
func ObsGenMultiPartSignedUrl(objectName string, uploadId string, partNumber int) (string, error) {

input := &obs.CreateSignedUrlInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Expires = 60 * 60
input.Method = obs.HttpMethodPut

@@ -581,10 +579,11 @@ func GetObsCreateSignedUrl(jobName, parentDir, fileName string) (string, error)
return GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir, fileName), "/"))
}

func ObsGetPreSignedUrl(uuid, fileName string) (string, error) {
func ObsGetPreSignedUrl(objectName, fileName string) (string, error) {
input := &obs.CreateSignedUrlInput{}
input.Method = obs.HttpMethodGet
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Key = objectName
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.Bucket = setting.Bucket
input.Expires = 60 * 60



+ 45
- 0
modules/structs/attachment.go View File

@@ -27,3 +27,48 @@ type Attachment struct {
type EditAttachmentOptions struct {
Name string `json:"name"`
}

type Dataset struct {
ID int64 `json:"id"`
Title string `json:"title"`
Status int32 `json:"status"`
Category string `json:"category"`
Description string `json:"description"`
DownloadTimes int64 `json:"downloadTimes"`
UseCount int64 `json:"useCount"`
NumStars int `json:"numStars"`
Recommend bool `json:"recommend"`
License string `json:"license"`
Task string `json:"task"`
ReleaseID int64 `json:"releaseId"`
UserID int64 `json:"userId"`
RepoID int64 `json:"repoId"`
Repo *RepositoryShow `json:"repo"`
CreatedUnix int64 `json:"createdUnix"`
UpdatedUnix int64 `json:"updatedUnix"`

Attachments []*AttachmentShow `json:"attachments"`
}

type RepositoryShow struct {
OwnerName string `json:"ownerName"`
Name string `json:"name"`
}

type AttachmentShow struct {
ID int64 `json:"id"`
UUID string `json:"uuid"`
DatasetID int64 `json:"datasetId"`
ReleaseID int64 `json:"releaseId"`
UploaderID int64 `json:"uploaderId"`
CommentID int64 `json:"commentId"`
Name string `json:"name"`
Description string `json:"description"`
DownloadCount int64 `json:"downloadCount"`
UseNumber int64 `json:"useNumber"`
Size int64 `json:"size"`
IsPrivate bool `json:"isPrivate"`
DecompressState int32 `json:"decompressState"`
Type int `json:"type"`
CreatedUnix int64 `json:"createdUnix"`
}

+ 84
- 0
modules/structs/cloudbrain.go View File

@@ -0,0 +1,84 @@
package structs

type CreateGrampusTrainJobOption struct {
DisplayJobName string `json:"display_job_name" binding:"Required"`
JobName string `json:"job_name" binding:"Required" `
Attachment string `json:"attachment" binding:"Required"`
BootFile string `json:"boot_file" binding:"Required"`
ImageID string `json:"image_id" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
Description string `json:"description"`
BranchName string `json:"branch_name" binding:"Required"`
EngineName string `json:"engine_name" binding:"Required"`
WorkServerNumber int `json:"work_server_number" binding:"Required"`
Image string `json:"image" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type CreateTrainJobOption struct {
Type int `json:"type"`
DisplayJobName string `json:"display_job_name" binding:"Required"`
ImageID string `json:"image_id"`
Image string `json:"image" binding:"Required"`
Attachment string `json:"attachment" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
Description string `json:"description" `
BootFile string `json:"boot_file" binding:"Required"`
BranchName string `json:"branch_name" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
WorkServerNumber int `json:"work_server_number"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type Cloudbrain struct {
ID int64 `json:"id"`
JobID string `json:"job_id"`
JobType string `json:"job_type"`
Type int `json:"type"`
DisplayJobName string `json:"display_job_name"`
Status string `json:"status"`
CreatedUnix int64 `json:"created_unix"`
RepoID int64 `json:"repo_id"`
Duration int64 `json:"duration"` //运行时长 单位秒
TrainJobDuration string `json:"train_job_duration"`
ImageID string `json:"image_id"` //grampus image_id
Image string `json:"image"`
Uuid string `json:"uuid"` //数据集id
DatasetName string `json:"dataset_name"`
ComputeResource string `json:"compute_resource"` //计算资源,例如npu
AiCenter string `json:"ai_center"` //grampus ai center: center_id+center_name
BranchName string `json:"branch_name"` //分支名称
Parameters string `json:"parameters"` //传给modelarts的param参数
BootFile string `json:"boot_file"` //启动文件
Description string `json:"description"` //描述
ModelName string `json:"model_name"` //模型名称
ModelVersion string `json:"model_version"` //模型版本
CkptName string `json:"ckpt_name"` //权重文件名称
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`

Spec *SpecificationShow `json:"spec"`
}

type SpecificationShow struct {
ID int64 `json:"id"`
AccCardsNum int `json:"acc_cards_num"`
AccCardType string `json:"acc_card_type"`
CpuCores int `json:"cpu_cores"`
MemGiB float32 `json:"mem_gi_b"`
GPUMemGiB float32 `json:"gpu_mem_gi_b"`
ShareMemGiB float32 `json:"share_mem_gi_b"`
ComputeResource string `json:"compute_resource"`
UnitPrice int `json:"unit_price"`
}

+ 7
- 0
modules/structs/tagger.go View File

@@ -0,0 +1,7 @@
package structs

type Tagger struct {
Name string `json:"name"`
Email string `json:"email"`
RelAvatarURL string `json:"relAvatarURL"`
}

+ 6
- 2
options/locale/locale_en-US.ini View File

@@ -617,6 +617,7 @@ organization = Organizations
uid = Uid
u2f = Security Keys
bind_wechat = Bind WeChat
no_wechat_bind = Can not do the operation, please bind WeChat first.
wechat_bind = WeChat Binding
bind_account_information = Bind account information
bind_time = Bind Time
@@ -1036,6 +1037,7 @@ cloudbrain.time.starttime=Start run time
cloudbrain.time.endtime=End run time
cloudbrain.datasetdownload=Dataset download url
model_manager = Model
model_experience = Model Experience
model_noright=You have no right to do the operation.
model_rename=Duplicate model name, please modify model name.

@@ -1266,12 +1268,14 @@ model.manage.model_accuracy = Model Accuracy
model.convert=Model Transformation
model.list=Model List
model.manage.create_new_convert_task=Create Model Transformation Task

model.manage.import_local_model=Import Local Model
model.manage.import_online_model=Import Online Model
model.manage.notcreatemodel=No model has been created
model.manage.init1=Code version: You have not initialized the code repository, please
model.manage.init2=initialized first ;
model.manage.createtrainjob_tip=Training task: you haven't created a training task, please create it first
model.manage.createtrainjob=Training task.
model.manage.createmodel_tip=You can import local model or online model. Import online model should
model.manage.createtrainjob=Create training task.
model.manage.delete=Delete Model
model.manage.delete_confirm=Are you sure to delete this model? Once this model is deleted, it cannot be restored.
model.manage.select.trainjob=Select train task


+ 6
- 2
options/locale/locale_zh-CN.ini View File

@@ -622,6 +622,7 @@ organization=组织
uid=用户 ID
u2f=安全密钥
wechat_bind = 微信绑定
no_wechat_bind = 不能创建任务,请先绑定微信。
bind_wechat = 绑定微信
bind_account_information = 绑定账号信息
bind_time = 绑定时间
@@ -1036,6 +1037,7 @@ datasets.desc=数据集功能
cloudbrain_helper=使用GPU/NPU资源,开启Notebook、模型训练任务等

model_manager = 模型
model_experience = 模型体验
model_noright=您没有操作权限。
model_rename=模型名称重复,请修改模型名称

@@ -1281,12 +1283,14 @@ model.manage.model_accuracy = 模型精度
model.convert=模型转换任务
model.list=模型列表
model.manage.create_new_convert_task=创建模型转换任务

model.manage.import_local_model=导入本地模型
model.manage.import_online_model=导入线上模型
model.manage.notcreatemodel=未创建过模型
model.manage.init1=代码版本:您还没有初始化代码仓库,请先
model.manage.init2=创建代码版本;
model.manage.createtrainjob_tip=训练任务:您还没创建过训练任务,请先创建
model.manage.createtrainjob=训练任务。
model.manage.createmodel_tip=您可以导入本地模型或者导入线上模型。导入线上模型需先
model.manage.createtrainjob=创建训练任务。
model.manage.delete=删除模型
model.manage.delete_confirm=你确认删除该模型么?此模型一旦删除不可恢复。
model.manage.select.trainjob=选择训练任务


+ 17
- 19346
package-lock.json
File diff suppressed because it is too large
View File


+ 22
- 1
routers/admin/resources.go View File

@@ -127,6 +127,7 @@ func GetResourceSpecificationList(ctx *context.Context) {
Status: status,
Cluster: cluster,
AvailableCode: available,
OrderBy: models.SearchSpecOrderById,
})
if err != nil {
log.Error("GetResourceSpecificationList error.%v", err)
@@ -136,6 +137,26 @@ func GetResourceSpecificationList(ctx *context.Context) {
ctx.JSON(http.StatusOK, response.SuccessWithData(list))
}

func GetAllResourceSpecificationList(ctx *context.Context) {
queue := ctx.QueryInt64("queue")
status := ctx.QueryInt("status")
cluster := ctx.Query("cluster")
available := ctx.QueryInt("available")
list, err := resource.GetAllDistinctResourceSpecification(models.SearchResourceSpecificationOptions{
QueueId: queue,
Status: status,
Cluster: cluster,
AvailableCode: available,
})
if err != nil {
log.Error("GetResourceSpecificationList error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

ctx.JSON(http.StatusOK, response.SuccessWithData(list))
}

func GetResourceSpecificationScenes(ctx *context.Context) {
specId := ctx.ParamsInt64(":id")
list, err := resource.GetResourceSpecificationScenes(specId)
@@ -182,7 +203,7 @@ func UpdateResourceSpecification(ctx *context.Context, req models.ResourceSpecif

if err != nil {
log.Error("UpdateResourceSpecification error. %v", err)
ctx.JSON(http.StatusOK, response.ResponseError(err))
ctx.JSON(http.StatusOK, response.ResponseBizError(err))
return
}
ctx.JSON(http.StatusOK, response.Success())


+ 62
- 0
routers/api/v1/api.go View File

@@ -242,6 +242,15 @@ func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler {
}
}

func reqWeChat() macaron.Handler {
return func(ctx *context.Context) {
if setting.WechatAuthSwitch && ctx.User.WechatOpenId == "" {
ctx.JSON(http.StatusForbidden, models.BaseErrorMessageApi("settings.no_wechat_bind"))
return
}
}
}

// reqRepoReader user should have specific read permission or be a repo admin or a site admin
func reqRepoReader(unitType models.UnitType) macaron.Handler {
return func(ctx *context.Context) {
@@ -517,6 +526,25 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown)
m.Post("/markdown/raw", misc.MarkdownRaw)

m.Group("/images", func() {

m.Get("/public", repo.GetPublicImages)
m.Get("/custom", repo.GetCustomImages)
m.Get("/star", repo.GetStarImages)
m.Get("/npu", repo.GetNpuImages)

}, reqToken())

m.Group("/attachments", func() {

m.Get("/:uuid", repo.GetAttachment)
m.Get("/get_chunks", repo.GetSuccessChunks)
m.Get("/new_multipart", repo.NewMultipart)
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteMultipart)

}, reqToken())

// Notifications
m.Group("/notifications", func() {
m.Combo("").
@@ -701,6 +729,13 @@ func RegisterRoutes(m *macaron.Macaron) {

m.Combo("/repositories/:id", reqToken()).Get(repo.GetByID)

m.Group("/datasets/:username/:reponame", func() {
m.Get("/current_repo", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets", repo.MyDatasetsMultiple)
m.Get("/public_datasets", repo.PublicDatasetMultiple)
m.Get("/my_favorite", repo.MyFavoriteDatasetMultiple)
}, reqToken(), repoAssignment())

m.Group("/repos", func() {
m.Get("/search", repo.Search)

@@ -709,7 +744,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate)
m.Post("/migrate/submit", reqToken(), bind(auth.MigrateRepoForm{}), repo.MigrateSubmit)

m.Group("/specification", func() {
m.Get("", repo.GetResourceSpec)
}, reqToken())

m.Group("/:username/:reponame", func() {
m.Get("/right", reqToken(), repo.GetRight)
m.Get("/tagger", reqToken(), repo.ListTagger)
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
Delete(reqToken(), reqOwner(), repo.Delete).
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit)
@@ -938,21 +979,41 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/:id/log", repo.CloudbrainGetLog)
m.Get("/:id/download_log_file", repo.CloudbrainDownloadLogFile)
m.Group("/train-job", func() {

m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), context.ReferencesGitRepo(false), bind(api.CreateTrainJobOption{}), repo.CreateCloudBrain)

m.Group("/:jobid", func() {
m.Get("", repo.GetModelArtsTrainJobVersion)
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)
m.Get("/model_list", repo.CloudBrainModelList)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop)
})
})
m.Group("/inference-job", func() {
m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), bind(api.CreateTrainJobOption{}), context.ReferencesGitRepo(false), repo.CreateCloudBrainInferenceTask)

m.Group("/:jobid", func() {
m.Get("", repo.GetCloudBrainInferenceJob)
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)

m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.DelCloudBrainJob)
m.Get("/result_list", repo.InferencJobResultList)
})
})
}, reqRepoReader(models.UnitTypeCloudBrain))
m.Group("/modelmanage", func() {
m.Post("/create_new_model", repo.CreateNewModel)
m.Get("/show_model_api", repo.ShowModelManageApi)
m.Delete("/delete_model", repo.DeleteModel)
m.Get("/downloadall", repo.DownloadModel)
m.Get("/query_model_byId", repo.QueryModelById)
m.Get("/query_model_for_predict", repo.QueryModelListForPredict)
m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict)
m.Get("/query_train_model", repo.QueryTrainModelList)
m.Post("/create_model_convert", repo.CreateModelConvert)
m.Get("/show_model_convert_page", repo.ShowModelConvertPage)
m.Get("/query_model_convert_byId", repo.QueryModelConvertById)

m.Get("/:id", repo.GetCloudbrainModelConvertTask)
m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog)
m.Get("/:id/modelartlog", repo.TrainJobForModelConvertGetLog)
@@ -989,6 +1050,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("", repo.GetModelArtsTrainJobVersion)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.GrampusStopJob)
m.Get("/log", repo_ext.GrampusGetLog)
m.Get("/metrics", repo_ext.GrampusMetrics)
m.Get("/download_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo_ext.GrampusDownloadLog)
})
})


+ 25
- 0
routers/api/v1/repo/attachments.go View File

@@ -0,0 +1,25 @@
package repo

import (
"code.gitea.io/gitea/modules/context"
routeRepo "code.gitea.io/gitea/routers/repo"
)

func GetSuccessChunks(ctx *context.APIContext) {
routeRepo.GetSuccessChunks(ctx.Context)
}

func NewMultipart(ctx *context.APIContext) {
routeRepo.NewMultipart(ctx.Context)
}
func GetMultipartUploadUrl(ctx *context.APIContext) {
routeRepo.GetMultipartUploadUrl(ctx.Context)
}

func CompleteMultipart(ctx *context.APIContext) {
routeRepo.CompleteMultipart(ctx.Context)

}
func GetAttachment(ctx *context.APIContext) {
routeRepo.GetAttachment(ctx.Context)
}

+ 97
- 37
routers/api/v1/repo/cloudbrain.go View File

@@ -16,6 +16,14 @@ import (
"strings"
"time"

cloudbrainService "code.gitea.io/gitea/services/cloudbrain"

"code.gitea.io/gitea/modules/convert"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/modules/notification"

"code.gitea.io/gitea/modules/setting"
@@ -29,6 +37,77 @@ import (
routerRepo "code.gitea.io/gitea/routers/repo"
)

func CloudBrainShow(ctx *context.APIContext) {

task, err := models.GetCloudbrainByJobID(ctx.Params(":jobid"))

if err != nil {
log.Info("error:" + err.Error())
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("repo.cloudbrain_query_fail"))
return
}
cloudbrainTask.PrepareSpec4Show(task)
task.ContainerIp = ""
if cloudbrainTask.IsTaskNotStop(task) {
cloudbrainTask.SyncTaskStatus(task)
}

if task.TrainJobDuration == "" {
if task.Duration == 0 {
var duration int64
if task.Status == string(models.JobWaiting) {
duration = 0
} else if task.Status == string(models.JobRunning) {
duration = time.Now().Unix() - int64(task.CreatedUnix)
} else {
duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix)
}
task.Duration = duration
}
task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
}
//to unify image output
if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
task.ImageID = strconv.FormatInt(task.EngineID, 10)
task.Image = task.EngineName

} else if task.Type == models.TypeC2Net {
task.Image = task.EngineName
}
task.AiCenter = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx.Context)

ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)})

}

func CreateCloudBrain(ctx *context.APIContext, option api.CreateTrainJobOption) {
if option.Type == cloudbrainTask.TaskTypeCloudbrainOne {
cloudbrainTask.CloudbrainOneTrainJobCreate(ctx.Context, option)
}
if option.Type == cloudbrainTask.TaskTypeModelArts {
cloudbrainTask.ModelArtsTrainJobNpuCreate(ctx.Context, option)
}

if option.Type == cloudbrainTask.TaskTypeGrampusGPU {
cloudbrainTask.GrampusTrainJobGpuCreate(ctx.Context, option)
}
if option.Type == cloudbrainTask.TaskTypeGrampusNPU {
cloudbrainTask.GrampusTrainJobNpuCreate(ctx.Context, option)
}

}

func CreateCloudBrainInferenceTask(ctx *context.APIContext, option api.CreateTrainJobOption) {

if option.Type == 0 {
cloudbrainTask.CloudBrainInferenceJobCreate(ctx.Context, option)
}
if option.Type == 1 {
cloudbrainTask.ModelArtsInferenceJobCreate(ctx.Context, option)
}

}

// cloudbrain get job task by jobid
func GetCloudbrainTask(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/cloudbrain/{jobid} cloudbrain jobTask
@@ -81,47 +160,22 @@ func GetCloudbrainTask(ctx *context.APIContext) {
"JobDuration": job.TrainJobDuration,
})
} else {
jobResult, err := cloudbrain.GetJob(job.JobID)
if err != nil {
ctx.NotFound(err)
log.Error("GetJob failed:", err)
return
}
result, _ := models.ConvertToJobResultPayload(jobResult.Payload)
jobAfter, err := cloudbrainTask.SyncCloudBrainOneStatus(job)

if err != nil {
ctx.NotFound(err)
log.Error("ConvertToJobResultPayload failed:", err)
log.Error("Sync cloud brain one status failed:", err)
return
}
oldStatus := job.Status
job.Status = result.JobStatus.State
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
}

if result.JobStatus.State != string(models.JobWaiting) {
models.ParseAndSetDurationFromCloudBrainOne(result, job)
if oldStatus != job.Status {
notification.NotifyChangeCloudbrainStatus(job, oldStatus)
}
err = models.UpdateJob(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}
}

ctx.JSON(http.StatusOK, map[string]interface{}{
"ID": ID,
"JobName": result.Config.JobName,
"JobStatus": result.JobStatus.State,
"SubState": result.JobStatus.SubState,
"CreatedTime": time.Unix(result.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05"),
"CompletedTime": time.Unix(result.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05"),
"JobDuration": job.TrainJobDuration,
"JobName": jobAfter.JobName,
"JobStatus": jobAfter.Status,
"SubState": "",
"CreatedTime": jobAfter.CreatedUnix.Format("2006-01-02 15:04:05"),
"CompletedTime": jobAfter.UpdatedUnix.Format("2006-01-02 15:04:05"),
"JobDuration": jobAfter.TrainJobDuration,
})
}
}
@@ -580,7 +634,7 @@ func CloudbrainGetLog(ctx *context.APIContext) {
endLine += 1
}
}
result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath)
if result == nil {
log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"])
@@ -595,14 +649,20 @@ func CloudbrainGetLog(ctx *context.APIContext) {
if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
content = content + ctx.Data["existStr"].(string)
}
logFileName := result["FileName"]

//Logs can only be downloaded if the file exists
//and the current user is an administrator or the creator of the task
canLogDownload := logFileName != nil && logFileName != "" && job.IsUserHasRight(ctx.User)

re := map[string]interface{}{
"JobID": ID,
"LogFileName": result["FileName"],
"LogFileName": logFileName,
"StartLine": result["StartLine"],
"EndLine": result["EndLine"],
"Content": content,
"Lines": result["Lines"],
"CanLogDownload": result["FileName"] != "",
"CanLogDownload": canLogDownload,
"StartTime": job.StartTime,
}
//result := CloudbrainGetLogByJobId(job.JobID, job.JobName)


+ 123
- 0
routers/api/v1/repo/datasets.go View File

@@ -0,0 +1,123 @@
package repo

import (
"fmt"
"strings"

"code.gitea.io/gitea/modules/convert"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)

func PublicDatasetMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
PublicOnly: true,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)

}

func MyFavoriteDatasetMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
StarByMe: true,
DatasetIDs: models.GetDatasetIdsStarByUser(ctx.User.ID),
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)
}

func CurrentRepoDatasetMultiple(ctx *context.APIContext) {
datasetIds := models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID)
searchOrderBy := getSearchOrderByInValues(datasetIds)
opts := &models.SearchDatasetOptions{
RepoID: ctx.Repo.Repository.ID,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
DatasetIDs: datasetIds,
SearchOrderBy: searchOrderBy,
}

datasetMultiple(ctx, opts)

}

func MyDatasetsMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
UploadAttachmentByMe: true,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)

}
func datasetMultiple(ctx *context.APIContext, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
if page < 1 {
page = 1
}
pageSize := ctx.QueryInt("pageSize")
if pageSize < 1 {
pageSize = setting.UI.DatasetPagingNum
}

keyword := strings.Trim(ctx.Query("q"), " ")
opts.Keyword = keyword
if opts.SearchOrderBy.String() == "" {
opts.SearchOrderBy = models.SearchOrderByRecentUpdated
}

opts.RecommendOnly = ctx.QueryBool("recommend")
opts.ListOptions = models.ListOptions{
Page: page,
PageSize: pageSize,
}
opts.JustNeedZipFile = true
opts.User = ctx.User

datasets, count, err := models.SearchDataset(opts)

if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]interface{}{
"code": 1,
"message": err.Error(),
"data": []*api.Dataset{},
"count": 0,
})
return
}
var convertDatasets []*api.Dataset
for _, dataset := range datasets {
convertDatasets = append(convertDatasets, convert.ToDataset(dataset))
}

ctx.JSON(200, map[string]interface{}{
"code": 0,
"message": "",
"data": convertDatasets,
"count": count,
})
}

func getSearchOrderByInValues(datasetIds []int64) models.SearchOrderBy {
if len(datasetIds) == 0 {
return ""
}
searchOrderBy := "CASE id "
for i, id := range datasetIds {
searchOrderBy += fmt.Sprintf(" WHEN %d THEN %d", id, i+1)
}
searchOrderBy += " ELSE 0 END"
return models.SearchOrderBy(searchOrderBy)
}

+ 141
- 0
routers/api/v1/repo/images.go View File

@@ -0,0 +1,141 @@
package repo

import (
"encoding/json"
"net/http"
"strconv"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
)

type NPUImageINFO struct {
ID string `json:"id"`
Value string `json:"value"`
}

func GetPublicImages(ctx *context.APIContext) {
uid := getUID(ctx)
opts := models.SearchImageOptions{
IncludePublicOnly: true,
UID: uid,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
IncludeOfficialOnly: ctx.QueryBool("recommend"),
SearchOrderBy: "type desc, num_stars desc,id desc",
Status: models.IMAGE_STATUS_SUCCESS,
CloudbrainType: ctx.QueryInt("cloudbrainType"),
}

getImages(ctx, &opts)

}

func GetCustomImages(ctx *context.APIContext) {
uid := getUID(ctx)
opts := models.SearchImageOptions{
UID: uid,
IncludeOwnerOnly: true,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
Status: -1,
SearchOrderBy: "id desc",
}
getImages(ctx, &opts)

}
func GetStarImages(ctx *context.APIContext) {

uid := getUID(ctx)
opts := models.SearchImageOptions{
UID: uid,
IncludeStarByMe: true,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
Status: models.IMAGE_STATUS_SUCCESS,
SearchOrderBy: "id desc",
}
getImages(ctx, &opts)

}

func GetNpuImages(ctx *context.APIContext) {
cloudbrainType := ctx.QueryInt("type")
if cloudbrainType == 0 { //modelarts
getModelArtsImages(ctx)
} else { //c2net
getC2netNpuImages(ctx)
}
}

func getModelArtsImages(ctx *context.APIContext) {

var versionInfos modelarts.VersionInfo
_ = json.Unmarshal([]byte(setting.EngineVersions), &versionInfos)
var npuImageInfos []NPUImageINFO
for _, info := range versionInfos.Version {
npuImageInfos = append(npuImageInfos, NPUImageINFO{
ID: strconv.Itoa(info.ID),
Value: info.Value,
})
}
ctx.JSON(http.StatusOK, npuImageInfos)

}

func getC2netNpuImages(ctx *context.APIContext) {
images, err := grampus.GetImages(grampus.ProcessorTypeNPU)
var npuImageInfos []NPUImageINFO
if err != nil {
log.Error("GetImages failed:", err.Error())
ctx.JSON(http.StatusOK, []NPUImageINFO{})
} else {
for _, info := range images.Infos {
npuImageInfos = append(npuImageInfos, NPUImageINFO{
ID: info.ID,
Value: info.Name,
})
}
ctx.JSON(http.StatusOK, npuImageInfos)
}
}
func getImages(ctx *context.APIContext, opts *models.SearchImageOptions) {
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}

pageSize := ctx.QueryInt("pageSize")
if pageSize <= 0 {
pageSize = 15
}
opts.ListOptions = models.ListOptions{
Page: page,
PageSize: pageSize,
}
imageList, total, err := models.SearchImage(opts)
if err != nil {
log.Error("Can not get images:%v", err)
ctx.JSON(http.StatusOK, models.ImagesPageResult{
Count: 0,
Images: []*models.Image{},
})
} else {
ctx.JSON(http.StatusOK, models.ImagesPageResult{
Count: total,
Images: imageList,
})
}
}

func getUID(ctx *context.APIContext) int64 {
var uid int64 = -1
if ctx.IsSigned {
uid = ctx.User.ID
}
return uid
}

+ 71
- 0
routers/api/v1/repo/mlops.go View File

@@ -0,0 +1,71 @@
package repo

import (
"net/http"

"code.gitea.io/gitea/models"

"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/log"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/api/v1/utils"
)

//标注任务可分配人员
func ListTagger(ctx *context.APIContext) {

taggers := make([]*api.Tagger, 0)
userRemember := make(map[string]string)
collaborators, err := ctx.Repo.Repository.GetCollaborators(utils.GetListOptions(ctx))
if err != nil {
log.Warn("ListCollaborators", err)
ctx.JSON(http.StatusOK, taggers)
return
}
for _, collaborator := range collaborators {
taggers = append(taggers, convert.ToTagger(collaborator.User))
userRemember[collaborator.User.Name] = ""
}

teams, err := ctx.Repo.Repository.GetRepoTeams()
if err != nil {
log.Warn("ListTeams", err)
ctx.JSON(http.StatusOK, taggers)
return
}

for _, team := range teams {
team.GetMembers(&models.SearchMembersOptions{})
for _, user := range team.Members {
if _, ok := userRemember[user.Name]; !ok {
taggers = append(taggers, convert.ToTagger(user))
userRemember[user.Name] = ""
}
}
}
if !ctx.Repo.Owner.IsOrganization() {
if _, ok := userRemember[ctx.Repo.Owner.Name]; !ok {
taggers = append(taggers, convert.ToTagger(ctx.Repo.Owner))

}
}
ctx.JSON(http.StatusOK, taggers)

}
func GetRight(ctx *context.APIContext) {
right := "none"

if ctx.IsUserRepoReaderSpecific(models.UnitTypeCode) {
right = "read"
}

if ctx.IsUserRepoWriter([]models.UnitType{models.UnitTypeCode}) || ctx.IsUserRepoAdmin() {
right = "write"
}

ctx.JSON(http.StatusOK, map[string]string{
"right": right,
})

}

+ 16
- 40
routers/api/v1/repo/modelarts.go View File

@@ -12,6 +12,8 @@ import (
"strconv"
"strings"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

"code.gitea.io/gitea/modules/urfs_client/urchin"

"code.gitea.io/gitea/modules/notification"
@@ -20,7 +22,6 @@ import (
"code.gitea.io/gitea/modules/setting"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
@@ -109,39 +110,11 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
}

if job.Type == models.TypeCloudBrainOne {
jobResult, err := cloudbrain.GetJob(job.JobID)
if err != nil {
ctx.NotFound(err)
log.Error("GetJob failed:", err)
return
}
result, err := models.ConvertToJobResultPayload(jobResult.Payload)
job, err = cloudbrainTask.SyncCloudBrainOneStatus(job)
if err != nil {
ctx.NotFound(err)
log.Error("ConvertToJobResultPayload failed:", err)
return
}
oldStatus := job.Status
job.Status = result.JobStatus.State
if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))

job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
}

if result.JobStatus.State != string(models.JobWaiting) {
models.ParseAndSetDurationFromCloudBrainOne(result, job)
if oldStatus != job.Status {
notification.NotifyChangeCloudbrainStatus(job, oldStatus)
}
err = models.UpdateJob(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}
}
} else if job.Type == models.TypeCloudBrainTwo {
err := modelarts.HandleTrainJobInfo(job)
if err != nil {
@@ -308,15 +281,6 @@ func TrainJobGetLog(ctx *context.APIContext) {
return
}

prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, versionName), "/") + "/job"
_, err = storage.GetObsLogFileName(prefix)
var canLogDownload bool
if err != nil {
canLogDownload = false
} else {
canLogDownload = true
}

ctx.Data["log_file_name"] = resultLogFile.LogFileList[0]

ctx.JSON(http.StatusOK, map[string]interface{}{
@@ -326,11 +290,23 @@ func TrainJobGetLog(ctx *context.APIContext) {
"EndLine": result.EndLine,
"Content": result.Content,
"Lines": result.Lines,
"CanLogDownload": canLogDownload,
"CanLogDownload": canLogDownload(ctx.User, task),
"StartTime": task.StartTime,
})
}

func canLogDownload(user *models.User, task *models.Cloudbrain) bool {
if task == nil || !task.IsUserHasRight(user) {
return false
}
prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, task.VersionName), "/") + "/job"
_, err := storage.GetObsLogFileName(prefix)
if err != nil {
return false
}
return true
}

func trainJobGetLogContent(jobID string, versionID int64, baseLine string, order string, lines int) (*models.GetTrainJobLogFileNamesResult, *models.GetTrainJobLogResult, error) {

resultLogFile, err := modelarts.GetTrainJobLogFileNames(jobID, strconv.FormatInt(versionID, 10))


+ 115
- 0
routers/api/v1/repo/modelmanage.go View File

@@ -0,0 +1,115 @@
package repo

import (
"net/http"

"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
routerRepo "code.gitea.io/gitea/routers/repo"
)

type FileInfo struct {
FileName string `json:"fileName"`
ModTime string `json:"modTime"`
IsDir bool `json:"isDir"`
Size int64 `json:"size"`
ParenDir string `json:"parenDir"`
UUID string `json:"uuid"`
}

func CreateNewModel(ctx *context.APIContext) {
log.Info("CreateNewModel by api.")
routerRepo.SaveModel(ctx.Context)
}

func ShowModelManageApi(ctx *context.APIContext) {
log.Info("ShowModelManageApi by api.")
routerRepo.ShowModelPageInfo(ctx.Context)
}

func DeleteModel(ctx *context.APIContext) {
log.Info("DeleteModel by api.")
routerRepo.DeleteModel(ctx.Context)
}

func DownloadModel(ctx *context.APIContext) {
log.Info("DownloadModel by api.")
routerRepo.DownloadMultiModelFile(ctx.Context)
}

func QueryModelById(ctx *context.APIContext) {
log.Info("QueryModelById by api.")
routerRepo.QueryModelById(ctx.Context)
}

func QueryModelListForPredict(ctx *context.APIContext) {
log.Info("QueryModelListForPredict by api.")
routerRepo.QueryModelListForPredict(ctx.Context)
}

func QueryTrainModelList(ctx *context.APIContext) {
result, err := routerRepo.QueryTrainModelFileById(ctx.Context)
if err != nil {
log.Info("query error." + err.Error())
}
re := convertFileFormat(result)
ctx.JSON(http.StatusOK, re)
}

func convertFileFormat(result []storage.FileInfo) []FileInfo {
re := make([]FileInfo, 0)
if result != nil {
for _, file := range result {
tmpFile := FileInfo{
FileName: file.FileName,
ModTime: file.ModTime,
IsDir: file.IsDir,
Size: file.Size,
ParenDir: file.ParenDir,
UUID: file.UUID,
}
re = append(re, tmpFile)
}
}
return re
}

func QueryModelFileForPredict(ctx *context.APIContext) {
log.Info("QueryModelFileForPredict by api.")
id := ctx.Query("id")
result := routerRepo.QueryModelFileByID(id)
re := convertFileFormat(result)
ctx.JSON(http.StatusOK, re)
}

func CreateModelConvert(ctx *context.APIContext) {
log.Info("CreateModelConvert by api.")
routerRepo.SaveModelConvert(ctx.Context)
}

func ShowModelConvertPage(ctx *context.APIContext) {
log.Info("ShowModelConvertPage by api.")
modelResult, count, err := routerRepo.GetModelConvertPageData(ctx.Context)
if err == nil {
mapInterface := make(map[string]interface{})
mapInterface["data"] = modelResult
mapInterface["count"] = count
ctx.JSON(http.StatusOK, mapInterface)
} else {
mapInterface := make(map[string]interface{})
mapInterface["data"] = nil
mapInterface["count"] = 0
ctx.JSON(http.StatusOK, mapInterface)
}

}

func QueryModelConvertById(ctx *context.APIContext) {
modelResult, err := routerRepo.GetModelConvertById(ctx.Context)
if err == nil {
ctx.JSON(http.StatusOK, modelResult)
} else {
ctx.JSON(http.StatusOK, nil)
}
}

+ 36
- 0
routers/api/v1/repo/spec.go View File

@@ -0,0 +1,36 @@
package repo

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/cloudbrain/resource"
)

func GetResourceSpec(ctx *context.APIContext) {
jobType := ctx.Query("jobType")
computeResource := ctx.Query("compute")
cluster := ctx.Query("cluster")
aiCenterCode := ctx.Query("center")
if jobType == "" || computeResource == "" || cluster == "" {
log.Info("GetResourceSpec api.param error")
ctx.JSON(200, response.OuterBizError(response.PARAM_ERROR))
return
}
specs, err := resource.FindAvailableSpecs4Show(ctx.User.ID, models.FindSpecsOptions{
JobType: models.JobType(jobType),
ComputeResource: computeResource,
Cluster: cluster,
AiCenterCode: aiCenterCode,
})
if err != nil {
log.Error("GetResourceSpec api error. %v", err)
ctx.JSON(200, response.OuterServerError(err.Error()))
return
}

specMap := make(map[string]interface{}, 0)
specMap["specs"] = specs
ctx.JSON(200, response.OuterSuccessWithData(specMap))
}

+ 3
- 1
routers/private/internal.go View File

@@ -6,9 +6,10 @@
package private

import (
"code.gitea.io/gitea/routers/admin"
"strings"

"code.gitea.io/gitea/routers/admin"

"code.gitea.io/gitea/routers/repo"

"code.gitea.io/gitea/modules/log"
@@ -52,6 +53,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/tool/org_stat", OrgStatisticManually)
m.Post("/tool/update_repo_visit/:date", UpdateRepoVisit)
m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration)
m.Post("/task/history_handle/aicenter", repo.HandleTaskWithAiCenter)
m.Post("/resources/specification/handle_historical_task", admin.RefreshHistorySpec)

}, CheckInternalToken)


+ 44
- 23
routers/repo/ai_model_convert.go View File

@@ -74,27 +74,27 @@ func SaveModelConvert(ctx *context.Context) {
log.Info("save model convert start.")
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.no_operate_right"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.no_operate_right"),
})
return
}
name := ctx.Query("name")
desc := ctx.Query("desc")
modelId := ctx.Query("modelId")
modelPath := ctx.Query("ModelFile")
SrcEngine := ctx.QueryInt("SrcEngine")
modelPath := ctx.Query("modelFile")
SrcEngine := ctx.QueryInt("srcEngine")
InputShape := ctx.Query("inputshape")
InputDataFormat := ctx.Query("inputdataformat")
DestFormat := ctx.QueryInt("DestFormat")
NetOutputFormat := ctx.QueryInt("NetOutputFormat")
DestFormat := ctx.QueryInt("destFormat")
NetOutputFormat := ctx.QueryInt("netOutputFormat")

task, err := models.QueryModelById(modelId)
if err != nil {
log.Error("no such model!", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.model_not_exist"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.model_not_exist"),
})
return
}
@@ -105,8 +105,8 @@ func SaveModelConvert(ctx *context.Context) {
if convert.Name == name {
log.Info("convert.Name=" + name + " convert.id=" + convert.ID)
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.create_error1"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.create_error1"),
})
return
}
@@ -119,8 +119,8 @@ func SaveModelConvert(ctx *context.Context) {
if isRunningTask(convert.Status) {
log.Info("convert.Status=" + convert.Status + " convert.id=" + convert.ID)
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.create_error2"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.create_error2"),
})
return
}
@@ -150,7 +150,8 @@ func SaveModelConvert(ctx *context.Context) {
go goCreateTask(modelConvert, ctx, task)

ctx.JSON(200, map[string]string{
"result_code": "0",
"id": id,
"code": "0",
})
}

@@ -604,11 +605,11 @@ func StopModelConvert(ctx *context.Context) {
}

func ShowModelConvertInfo(ctx *context.Context) {
ctx.Data["ID"] = ctx.Query("ID")
ctx.Data["ID"] = ctx.Query("id")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

job, err := models.QueryModelConvertById(ctx.Query("ID"))
job, err := models.QueryModelConvertById(ctx.Query("id"))
if err == nil {
if job.TrainJobDuration == "" {
job.TrainJobDuration = "00:00:00"
@@ -707,6 +708,31 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
return
}
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pageSize := ctx.QueryInt("pageSize")
if pageSize <= 0 {
pageSize = setting.UI.IssuePagingNum
}
modelResult, count, err := GetModelConvertPageData(ctx)
if err == nil {
pager := context.NewPagination(int(count), page, pageSize, 5)
ctx.Data["Page"] = pager
ctx.Data["Tasks"] = modelResult
ctx.Data["MODEL_CONVERT_COUNT"] = count
} else {
ctx.ServerError("Query data error.", err)
}
}

func GetModelConvertById(ctx *context.Context) (*models.AiModelConvert, error) {
id := ctx.Query("id")
return models.QueryModelConvertById(id)
}

func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, int64, error) {
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
@@ -725,10 +751,8 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
})
if err != nil {
log.Info("query db error." + err.Error())
ctx.ServerError("Cloudbrain", err)
return
return nil, 0, err
}
ctx.Data["MODEL_CONVERT_COUNT"] = count
userIds := make([]int64, len(modelResult))
for i, model := range modelResult {
model.IsCanOper = isOper(ctx, model.UserId)
@@ -743,10 +767,7 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
model.UserRelAvatarLink = value.RelAvatarLink()
}
}
pager := context.NewPagination(int(count), page, pageSize, 5)
ctx.Data["Page"] = pager
ctx.Data["Tasks"] = modelResult

return modelResult, count, nil
}

func ModelConvertDownloadModel(ctx *context.Context) {
@@ -757,7 +778,7 @@ func ModelConvertDownloadModel(ctx *context.Context) {
ctx.ServerError("Not found task.", err)
return
}
AllDownload := ctx.QueryBool("AllDownload")
AllDownload := ctx.QueryBool("allDownload")
if AllDownload {
if job.IsGpuTrainTask() {
path := setting.CBCodePathPrefix + job.ID + "/model/"


+ 402
- 80
routers/repo/ai_model_manage.go View File

@@ -22,25 +22,33 @@ import (
)

const (
Model_prefix = "aimodels/"
tplModelManageIndex = "repo/modelmanage/index"
tplModelManageDownload = "repo/modelmanage/download"
tplModelInfo = "repo/modelmanage/showinfo"
MODEL_LATEST = 1
MODEL_NOT_LATEST = 0
MODEL_MAX_SIZE = 1024 * 1024 * 1024
STATUS_COPY_MODEL = 1
STATUS_FINISHED = 0
STATUS_ERROR = 2
Attachment_model = "model"
Model_prefix = "aimodels/"
tplModelManageIndex = "repo/modelmanage/index"
tplModelManageDownload = "repo/modelmanage/download"
tplModelInfo = "repo/modelmanage/showinfo"
tplCreateLocalModelInfo = "repo/modelmanage/create_local_1"
tplCreateLocalForUploadModelInfo = "repo/modelmanage/create_local_2"
tplCreateOnlineModelInfo = "repo/modelmanage/create_online"

MODEL_LATEST = 1
MODEL_NOT_LATEST = 0
MODEL_MAX_SIZE = 1024 * 1024 * 1024
STATUS_COPY_MODEL = 1
STATUS_FINISHED = 0
STATUS_ERROR = 2

MODEL_LOCAL_TYPE = 1
MODEL_ONLINE_TYPE = 0
)

func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) error {
func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) (string, error) {
aiTask, err := models.GetCloudbrainByJobIDAndVersionName(jobId, versionName)
if err != nil {
aiTask, err = models.GetRepoCloudBrainByJobID(ctx.Repo.Repository.ID, jobId)
if err != nil {
log.Info("query task error." + err.Error())
return err
return "", err
} else {
log.Info("query gpu train task.")
}
@@ -56,7 +64,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
if len(aimodels) > 0 {
for _, model := range aimodels {
if model.Version == version {
return errors.New(ctx.Tr("repo.model.manage.create_error"))
return "", errors.New(ctx.Tr("repo.model.manage.create_error"))
}
if model.New == MODEL_LATEST {
lastNewModelId = model.ID
@@ -70,13 +78,12 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
cloudType = models.TypeCloudBrainTwo
} else if aiTask.ComputeResource == models.GPUResource {
cloudType = models.TypeCloudBrainOne
spec, err := resource.GetCloudbrainSpec(aiTask.ID)
if err == nil {
flaverName := "GPU: " + fmt.Sprint(spec.AccCardsNum) + "*" + spec.AccCardType + ",CPU: " + fmt.Sprint(spec.CpuCores) + "," + ctx.Tr("cloudbrain.memory") + ": " + fmt.Sprint(spec.MemGiB) + "GB," + ctx.Tr("cloudbrain.shared_memory") + ": " + fmt.Sprint(spec.ShareMemGiB) + "GB"
aiTask.FlavorName = flaverName
}
}

spec, err := resource.GetCloudbrainSpec(aiTask.ID)
if err == nil {
specJson, _ := json.Marshal(spec)
aiTask.FlavorName = string(specJson)
}
accuracy := make(map[string]string)
accuracy["F1"] = ""
accuracy["Recall"] = ""
@@ -111,7 +118,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio

err = models.SaveModelToDb(model)
if err != nil {
return err
return "", err
}
if len(lastNewModelId) > 0 {
//udpate status and version count
@@ -134,7 +141,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio

log.Info("save model end.")
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask)
return nil
return id, nil
}

func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile string) {
@@ -173,7 +180,7 @@ func SaveNewNameModel(ctx *context.Context) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
name := ctx.Query("Name")
name := ctx.Query("name")
if name == "" {
ctx.Error(500, fmt.Sprintf("name or version is null."))
return
@@ -189,44 +196,181 @@ func SaveNewNameModel(ctx *context.Context) {
log.Info("save model end.")
}

func SaveLocalModel(ctx *context.Context) {
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
re := map[string]string{
"code": "-1",
}
log.Info("save SaveLocalModel start.")
uuid := uuid.NewV4()
id := uuid.String()
name := ctx.Query("name")
version := ctx.Query("version")
if version == "" {
version = "0.0.1"
}
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
taskType := ctx.QueryInt("type")
modelActualPath := ""
if taskType == models.TypeCloudBrainOne {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/"
modelActualPath = setting.Attachment.Minio.Bucket + "/" + destKeyNamePrefix
} else if taskType == models.TypeCloudBrainTwo {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/"
modelActualPath = setting.Bucket + "/" + destKeyNamePrefix
} else {
re["msg"] = "type is error."
ctx.JSON(200, re)
return
}
var lastNewModelId string
repoId := ctx.Repo.Repository.ID
aimodels := models.QueryModelByName(name, repoId)
if len(aimodels) > 0 {
for _, model := range aimodels {
if model.Version == version {
re["msg"] = ctx.Tr("repo.model.manage.create_error")
ctx.JSON(200, re)
return
}
if model.New == MODEL_LATEST {
lastNewModelId = model.ID
}
}
}
model := &models.AiModelManage{
ID: id,
Version: version,
ModelType: MODEL_LOCAL_TYPE,
VersionCount: len(aimodels) + 1,
Label: label,
Name: name,
Description: description,
New: MODEL_LATEST,
Type: taskType,
Path: modelActualPath,
Size: 0,
AttachmentId: "",
RepoId: repoId,
UserId: ctx.User.ID,
Engine: int64(engine),
TrainTaskInfo: "",
Accuracy: "",
Status: STATUS_FINISHED,
}

err := models.SaveModelToDb(model)
if err != nil {
re["msg"] = err.Error()
ctx.JSON(200, re)
return
}
if len(lastNewModelId) > 0 {
//udpate status and version count
models.ModifyModelNewProperty(lastNewModelId, MODEL_NOT_LATEST, 0)
}
var units []models.RepoUnit
var deleteUnitTypes []models.UnitType
units = append(units, models.RepoUnit{
RepoID: ctx.Repo.Repository.ID,
Type: models.UnitTypeModelManage,
Config: &models.ModelManageConfig{
EnableModelManage: true,
},
})
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeModelManage)

models.UpdateRepositoryUnits(ctx.Repo.Repository, units, deleteUnitTypes)

log.Info("save model end.")
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask)
re["code"] = "0"
re["id"] = id
ctx.JSON(200, re)
}

func getSize(files []storage.FileInfo) int64 {
var size int64
for _, file := range files {
size += file.Size
}
return size
}

func UpdateModelSize(modeluuid string) {
model, err := models.QueryModelById(modeluuid)
if err == nil {
if model.Type == models.TypeCloudBrainOne {
if strings.HasPrefix(model.Path, setting.Attachment.Minio.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, model.Path[len(setting.Attachment.Minio.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from minio. id=" + modeluuid)
}
size := getSize(files)
models.ModifyModelSize(modeluuid, size)
}
} else if model.Type == models.TypeCloudBrainTwo {
if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, model.Path[len(setting.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from obs. id=" + modeluuid)
}
size := getSize(files)
models.ModifyModelSize(modeluuid, size)
}
}
} else {
log.Info("not found model,uuid=" + modeluuid)
}
}

func SaveModel(ctx *context.Context) {
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
log.Info("save model start.")
JobId := ctx.Query("JobId")
VersionName := ctx.Query("VersionName")
name := ctx.Query("Name")
version := ctx.Query("Version")
label := ctx.Query("Label")
description := ctx.Query("Description")
engine := ctx.QueryInt("Engine")
JobId := ctx.Query("jobId")
VersionName := ctx.Query("versionName")
name := ctx.Query("name")
version := ctx.Query("version")
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
modelSelectedFile := ctx.Query("modelSelectedFile")
log.Info("engine=" + fmt.Sprint(engine) + " modelSelectedFile=" + modelSelectedFile)

re := map[string]string{
"code": "-1",
}
if JobId == "" || VersionName == "" {
ctx.Error(500, fmt.Sprintf("JobId or VersionName is null."))
re["msg"] = "JobId or VersionName is null."
ctx.JSON(200, re)
return
}
if modelSelectedFile == "" {
ctx.Error(500, fmt.Sprintf("Not selected model file."))
re["msg"] = "Not selected model file."
ctx.JSON(200, re)
return
}

if name == "" || version == "" {
ctx.Error(500, fmt.Sprintf("name or version is null."))
re["msg"] = "name or version is null."
ctx.JSON(200, re)
return
}

err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx)

id, err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx)
if err != nil {
log.Info("save model error." + err.Error())
ctx.Error(500, fmt.Sprintf("save model error. %v", err))
return
re["msg"] = err.Error()
} else {
re["code"] = "0"
re["id"] = id
}
ctx.Status(200)
ctx.JSON(200, re)
log.Info("save model end.")
}

@@ -288,16 +432,74 @@ func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir
return "", 0, nil
}
}
func DeleteModelFile(ctx *context.Context) {
log.Info("delete model start.")
id := ctx.Query("id")
fileName := ctx.Query("fileName")
model, err := models.QueryModelById(id)
if err == nil {
if model.ModelType == MODEL_LOCAL_TYPE {
if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
objectName := model.Path[len(bucketName)+1:] + fileName
log.Info("delete bucket=" + bucketName + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
totalSize := storage.MinioGetFilesSize(bucketName, []string{objectName})
err := storage.Attachments.DeleteDir(objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
return
} else {
log.Info("delete minio file size is:" + fmt.Sprint(totalSize))
models.ModifyModelSize(id, model.Size-totalSize)
}
}
} else if model.Type == models.TypeCloudBrainTwo {
bucketName := setting.Bucket
objectName := model.Path[len(setting.Bucket)+1:] + fileName
log.Info("delete bucket=" + setting.Bucket + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
totalSize := storage.ObsGetFilesSize(bucketName, []string{objectName})
err := storage.ObsRemoveObject(bucketName, objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
return
} else {
log.Info("delete obs file size is:" + fmt.Sprint(totalSize))
models.ModifyModelSize(id, model.Size-totalSize)
}
}
}
}
}
ctx.JSON(200, map[string]string{
"code": "0",
})
}

func DeleteModel(ctx *context.Context) {
log.Info("delete model start.")
id := ctx.Query("ID")
id := ctx.Query("id")
err := deleteModelByID(ctx, id)
if err != nil {
ctx.JSON(500, err.Error())
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
} else {
ctx.JSON(200, map[string]string{
"result_code": "0",
"code": "0",
})
}
}
@@ -309,14 +511,28 @@ func deleteModelByID(ctx *context.Context, id string) error {
return errors.New(ctx.Tr("repo.model_noright"))
}
if err == nil {
log.Info("bucket=" + setting.Bucket + " path=" + model.Path)
if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) {
err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:])
if err != nil {
log.Info("Failed to delete model. id=" + id)
return err

if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
log.Info("bucket=" + bucketName + " path=" + model.Path)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
err := storage.Attachments.DeleteDir(model.Path[len(bucketName)+1:])
if err != nil {
log.Info("Failed to delete model. id=" + id)
return err
}
}
} else if model.Type == models.TypeCloudBrainTwo {
log.Info("bucket=" + setting.Bucket + " path=" + model.Path)
if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) {
err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:])
if err != nil {
log.Info("Failed to delete model. id=" + id)
return err
}
}
}

err = models.DeleteModelById(id)
if err == nil { //find a model to change new
aimodels := models.QueryModelByName(model.Name, model.RepoId)
@@ -354,7 +570,7 @@ func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, in

func DownloadMultiModelFile(ctx *context.Context) {
log.Info("DownloadMultiModelFile start.")
id := ctx.Query("ID")
id := ctx.Query("id")
log.Info("id=" + id)
task, err := models.QueryModelById(id)
if err != nil {
@@ -487,7 +703,10 @@ func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *con

func QueryTrainJobVersionList(ctx *context.Context) {
log.Info("query train job version list. start.")
JobID := ctx.Query("JobID")
JobID := ctx.Query("jobId")
if JobID == "" {
JobID = ctx.Query("JobId")
}

VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)

@@ -515,20 +734,33 @@ func QueryTrainJobList(ctx *context.Context) {

}

func QueryTrainModelList(ctx *context.Context) {
log.Info("query train job list. start.")
jobName := ctx.Query("jobName")
taskType := ctx.QueryInt("type")
VersionName := ctx.Query("VersionName")
func QueryTrainModelFileById(ctx *context.Context) ([]storage.FileInfo, error) {
JobID := ctx.Query("jobId")
VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)
if err == nil {
if count == 1 {
task := VersionListTasks[0]
jobName := task.JobName
taskType := task.Type
VersionName := task.VersionName
modelDbResult, err := getModelFromObjectSave(jobName, taskType, VersionName)
return modelDbResult, err
}
}
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
return nil, errors.New("Not found task.")
}

func getModelFromObjectSave(jobName string, taskType int, VersionName string) ([]storage.FileInfo, error) {
if taskType == models.TypeCloudBrainTwo {
objectkey := path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, VersionName) + "/"
modelDbResult, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, objectkey)
log.Info("bucket=" + setting.Bucket + " objectkey=" + objectkey)
if err != nil {
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
return nil, err
} else {
ctx.JSON(200, modelDbResult)
return
return modelDbResult, nil
}
} else if taskType == models.TypeCloudBrainOne {
modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/"
@@ -536,12 +768,30 @@ func QueryTrainModelList(ctx *context.Context) {
modelDbResult, err := storage.GetAllObjectByBucketAndPrefixMinio(bucketName, modelSrcPrefix)
if err != nil {
log.Info("get TypeCloudBrainOne TrainJobListModel failed:", err)
return nil, err
} else {
ctx.JSON(200, modelDbResult)
return
return modelDbResult, nil
}
}
ctx.JSON(200, "")
return nil, errors.New("Not support.")
}

func QueryTrainModelList(ctx *context.Context) {
log.Info("query train job list. start.")
jobName := ctx.Query("jobName")
taskType := ctx.QueryInt("type")
VersionName := ctx.Query("versionName")
if VersionName == "" {
VersionName = ctx.Query("VersionName")
}
modelDbResult, err := getModelFromObjectSave(jobName, taskType, VersionName)
if err != nil {
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
ctx.JSON(200, "")
} else {
ctx.JSON(200, modelDbResult)
return
}
}

func DownloadSingleModelFile(ctx *context.Context) {
@@ -612,7 +862,7 @@ func DownloadSingleModelFile(ctx *context.Context) {
}

func ShowModelInfo(ctx *context.Context) {
ctx.Data["ID"] = ctx.Query("ID")
ctx.Data["ID"] = ctx.Query("id")
ctx.Data["name"] = ctx.Query("name")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)
@@ -620,6 +870,19 @@ func ShowModelInfo(ctx *context.Context) {
ctx.HTML(200, tplModelInfo)
}

func QueryModelById(ctx *context.Context) {
id := ctx.Query("id")
model, err := models.QueryModelById(id)
if err == nil {
model.IsCanOper = isOper(ctx, model.UserId)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
removeIpInfo(model)
ctx.JSON(http.StatusOK, model)
} else {
ctx.JSON(http.StatusNotFound, nil)
}
}

func ShowSingleModel(ctx *context.Context) {
name := ctx.Query("name")

@@ -828,30 +1091,59 @@ func ModifyModel(id string, description string) error {

func ModifyModelInfo(ctx *context.Context) {
log.Info("modify model start.")
id := ctx.Query("ID")
description := ctx.Query("Description")

id := ctx.Query("id")
re := map[string]string{
"code": "-1",
}
task, err := models.QueryModelById(id)
if err != nil {
re["msg"] = err.Error()
log.Error("no such model!", err.Error())
ctx.ServerError("no such model:", err)
ctx.JSON(200, re)
return
}
if !isOper(ctx, task.UserId) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
//ctx.ServerError("no right.", errors.New(ctx.Tr("repo.model_noright")))
re["msg"] = "No right to operation."
ctx.JSON(200, re)
return
}
if task.ModelType == MODEL_LOCAL_TYPE {
name := ctx.Query("name")
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
aimodels := models.QueryModelByName(name, task.RepoId)
if aimodels != nil && len(aimodels) > 0 {
if len(aimodels) == 1 {
if aimodels[0].ID != task.ID {
re["msg"] = ctx.Tr("repo.model.manage.create_error")
ctx.JSON(200, re)
return
}
} else {
re["msg"] = ctx.Tr("repo.model.manage.create_error")
ctx.JSON(200, re)
return
}
}
err = models.ModifyLocalModel(id, name, label, description, engine)

err = ModifyModel(id, description)
} else {
label := ctx.Query("label")
description := ctx.Query("description")
engine := task.Engine
name := task.Name
err = models.ModifyLocalModel(id, name, label, description, int(engine))
}

if err != nil {
log.Info("modify error," + err.Error())
ctx.ServerError("error.", err)
re["msg"] = err.Error()
ctx.JSON(200, re)
return
} else {
ctx.JSON(200, "success")
re["code"] = "0"
ctx.JSON(200, re)
}

}

func QueryModelListForPredict(ctx *context.Context) {
@@ -894,28 +1186,36 @@ func QueryModelListForPredict(ctx *context.Context) {
}

func QueryModelFileForPredict(ctx *context.Context) {
id := ctx.Query("ID")
id := ctx.Query("id")
if id == "" {
id = ctx.Query("ID")
}
ctx.JSON(http.StatusOK, QueryModelFileByID(id))
}

func QueryModelFileByID(id string) []storage.FileInfo {
model, err := models.QueryModelById(id)
if err == nil {
if model.Type == models.TypeCloudBrainTwo {
prefix := model.Path[len(setting.Bucket)+1:]
fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
ctx.JSON(http.StatusOK, fileinfos)
return fileinfos
} else if model.Type == models.TypeCloudBrainOne {
prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
ctx.JSON(http.StatusOK, fileinfos)
return fileinfos
}
} else {
log.Error("no such model!", err.Error())
ctx.ServerError("no such model:", err)
return
}
return nil
}

func QueryOneLevelModelFile(ctx *context.Context) {
id := ctx.Query("ID")
id := ctx.Query("id")
if id == "" {
id = ctx.Query("ID")
}
parentDir := ctx.Query("parentDir")
model, err := models.QueryModelById(id)
if err != nil {
@@ -941,3 +1241,25 @@ func QueryOneLevelModelFile(ctx *context.Context) {
ctx.JSON(http.StatusOK, fileinfos)
}
}

func CreateLocalModel(ctx *context.Context) {
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

ctx.HTML(200, tplCreateLocalModelInfo)
}

func CreateLocalModelForUpload(ctx *context.Context) {
ctx.Data["uuid"] = ctx.Query("uuid")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)
ctx.Data["max_model_size"] = setting.MaxModelSize * MODEL_MAX_SIZE
ctx.HTML(200, tplCreateLocalForUploadModelInfo)
}

func CreateOnlineModel(ctx *context.Context) {
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

ctx.HTML(200, tplCreateOnlineModelInfo)
}

+ 2
- 2
routers/repo/aisafety.go View File

@@ -804,7 +804,7 @@ func createForNPU(ctx *context.Context, jobName string) error {
JobType: string(models.JobTypeModelSafety),
}

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
return err
@@ -901,7 +901,7 @@ func createForGPU(ctx *context.Context, jobName string) error {
LabelName: evaluationIndex,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
return err
}


+ 18
- 17
routers/repo/attachment.go View File

@@ -11,6 +11,7 @@ import (
"fmt"
"mime/multipart"
"net/http"
"path"
"strconv"
"strings"

@@ -311,7 +312,8 @@ func GetAttachment(ctx *context.Context) {
url = setting.PROXYURL + "/obs_proxy_download?uuid=" + attach.UUID + "&file_name=" + attach.Name
log.Info("return url=" + url)
} else {
url, err = storage.ObsGetPreSignedUrl(attach.UUID, attach.Name)
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(attach.UUID[0:1], attach.UUID[1:2], attach.UUID, attach.Name)), "/")
url, err = storage.ObsGetPreSignedUrl(objectName, attach.Name)
if err != nil {
ctx.ServerError("ObsGetPreSignedUrl", err)
return
@@ -415,7 +417,7 @@ func AddAttachment(ctx *context.Context) {
uuid := ctx.Query("uuid")
has := false
if typeCloudBrain == models.TypeCloudBrainOne {
has, err = storage.Attachments.HasObject(models.AttachmentRelativePath(uuid))
has, err = storage.Attachments.HasObject(setting.Attachment.Minio.BasePath + models.AttachmentRelativePath(uuid))
if err != nil {
ctx.ServerError("HasObject", err)
return
@@ -557,7 +559,7 @@ func GetSuccessChunks(ctx *context.Context) {

isExist := false
if typeCloudBrain == models.TypeCloudBrainOne {
isExist, err = storage.Attachments.HasObject(models.AttachmentRelativePath(fileChunk.UUID))
isExist, err = storage.Attachments.HasObject(setting.Attachment.Minio.BasePath + models.AttachmentRelativePath(fileChunk.UUID))
if err != nil {
ctx.ServerError("HasObject failed", err)
return
@@ -593,12 +595,12 @@ func GetSuccessChunks(ctx *context.Context) {
}

if typeCloudBrain == models.TypeCloudBrainOne {
chunks, err = storage.GetPartInfos(fileChunk.UUID, fileChunk.UploadID)
chunks, err = storage.GetPartInfos(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID)), "/"), fileChunk.UploadID)
if err != nil {
log.Error("GetPartInfos failed:%v", err.Error())
}
} else {
chunks, err = storage.GetObsPartInfos(fileChunk.UUID, fileChunk.UploadID, fileName)
chunks, err = storage.GetObsPartInfos(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID, fileName)), "/"), fileChunk.UploadID)
if err != nil {
log.Error("GetObsPartInfos failed:%v", err.Error())
}
@@ -699,13 +701,13 @@ func NewMultipart(ctx *context.Context) {
uuid := gouuid.NewV4().String()
var uploadID string
if typeCloudBrain == models.TypeCloudBrainOne {
uploadID, err = storage.NewMultiPartUpload(uuid)
uploadID, err = storage.NewMultiPartUpload(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/"))
if err != nil {
ctx.ServerError("NewMultipart", err)
return
}
} else {
uploadID, err = storage.NewObsMultiPartUpload(uuid, fileName)
uploadID, err = storage.NewObsMultiPartUpload(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/"))
if err != nil {
ctx.ServerError("NewObsMultiPartUpload", err)
return
@@ -749,8 +751,8 @@ func PutOBSProxyUpload(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("FormFile: %v", RequestBody))
return
}
err := storage.ObsMultiPartUpload(uuid, uploadID, partNumber, fileName, RequestBody.ReadCloser())
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
err := storage.ObsMultiPartUpload(objectName, uploadID, partNumber, fileName, RequestBody.ReadCloser())
if err != nil {
log.Info("upload error.")
}
@@ -759,8 +761,8 @@ func PutOBSProxyUpload(ctx *context.Context) {
func GetOBSProxyDownload(ctx *context.Context) {
uuid := ctx.Query("uuid")
fileName := ctx.Query("file_name")
body, err := storage.ObsDownload(uuid, fileName)
objectName := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
body, err := storage.ObsDownloadAFile(setting.Bucket, objectName)
if err != nil {
log.Info("upload error.")
} else {
@@ -805,7 +807,7 @@ func GetMultipartUploadUrl(ctx *context.Context) {
return
}

url, err = storage.GenMultiPartSignedUrl(uuid, uploadID, partNumber, size)
url, err = storage.GenMultiPartSignedUrl(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/"), uploadID, partNumber, size)
if err != nil {
ctx.Error(500, fmt.Sprintf("GenMultiPartSignedUrl failed: %v", err))
return
@@ -815,7 +817,7 @@ func GetMultipartUploadUrl(ctx *context.Context) {
url = setting.PROXYURL + "/obs_proxy_multipart?uuid=" + uuid + "&uploadId=" + uploadID + "&partNumber=" + fmt.Sprint(partNumber) + "&file_name=" + fileName
log.Info("return url=" + url)
} else {
url, err = storage.ObsGenMultiPartSignedUrl(uuid, uploadID, partNumber, fileName)
url, err = storage.ObsGenMultiPartSignedUrl(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/"), uploadID, partNumber)
if err != nil {
ctx.Error(500, fmt.Sprintf("ObsGenMultiPartSignedUrl failed: %v", err))
return
@@ -823,7 +825,6 @@ func GetMultipartUploadUrl(ctx *context.Context) {
log.Info("url=" + url)
}
}

ctx.JSON(200, map[string]string{
"url": url,
})
@@ -855,13 +856,13 @@ func CompleteMultipart(ctx *context.Context) {
}

if typeCloudBrain == models.TypeCloudBrainOne {
_, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks)
_, err = storage.CompleteMultiPartUpload(strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID)), "/"), uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err))
return
}
} else {
err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks)
err = storage.CompleteObsMultiPartUpload(strings.TrimPrefix(path.Join(setting.BasePath, path.Join(fileChunk.UUID[0:1], fileChunk.UUID[1:2], fileChunk.UUID, fileName)), "/"), uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err))
return
@@ -1013,7 +1014,7 @@ func queryDatasets(ctx *context.Context, attachs []*models.AttachmentUsername) {
}

for _, attch := range attachs {
has, err := storage.Attachments.HasObject(models.AttachmentRelativePath(attch.UUID))
has, err := storage.Attachments.HasObject(setting.Attachment.Minio.BasePath + models.AttachmentRelativePath(attch.UUID))
if err != nil || !has {
continue
}


+ 323
- 0
routers/repo/attachment_model.go View File

@@ -0,0 +1,323 @@
package repo

import (
"fmt"
"path"
"strconv"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/minio_ext"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/upload"
gouuid "github.com/satori/go.uuid"
)

func GetModelChunks(ctx *context.Context) {
fileMD5 := ctx.Query("md5")
typeCloudBrain := ctx.QueryInt("type")
fileName := ctx.Query("file_name")
scene := ctx.Query("scene")
modeluuid := ctx.Query("modeluuid")
log.Info("scene=" + scene + " typeCloudBrain=" + fmt.Sprint(typeCloudBrain))
var chunks string

err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}

fileChunk, err := models.GetModelFileChunkByMD5AndUser(fileMD5, ctx.User.ID, typeCloudBrain, modeluuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.JSON(200, map[string]string{
"uuid": "",
"uploaded": "0",
"uploadID": "",
"chunks": "",
})
} else {
ctx.ServerError("GetFileChunkByMD5", err)
}
return
}

isExist := false
if typeCloudBrain == models.TypeCloudBrainOne {
isExist, err = storage.Attachments.HasObject(fileChunk.ObjectName)
if isExist {
log.Info("The file is exist in minio. has uploaded.path=" + fileChunk.ObjectName)
} else {
log.Info("The file is not exist in minio..")
}
if err != nil {
ctx.ServerError("HasObject failed", err)
return
}
} else {
isExist, err = storage.ObsHasObject(fileChunk.ObjectName)
if isExist {
log.Info("The file is exist in obs. has uploaded. path=" + fileChunk.ObjectName)
} else {
log.Info("The file is not exist in obs.")
}
if err != nil {
ctx.ServerError("ObsHasObject failed", err)
return
}
}

if isExist {
if fileChunk.IsUploaded == models.FileNotUploaded {
log.Info("the file has been uploaded but not recorded")
fileChunk.IsUploaded = models.FileUploaded
if err = models.UpdateModelFileChunk(fileChunk); err != nil {
log.Error("UpdateFileChunk failed:", err.Error())
}
}
modelname := ""
model, err := models.QueryModelById(modeluuid)
if err == nil && model != nil {
modelname = model.Name
}
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": "0",
"modeluuid": modeluuid,
"fileName": fileName,
"modelName": modelname,
})
} else {
if fileChunk.IsUploaded == models.FileUploaded {
log.Info("the file has been recorded but not uploaded")
fileChunk.IsUploaded = models.FileNotUploaded
if err = models.UpdateModelFileChunk(fileChunk); err != nil {
log.Error("UpdateFileChunk failed:", err.Error())
}
}

if typeCloudBrain == models.TypeCloudBrainOne {
chunks, err = storage.GetPartInfos(fileChunk.ObjectName, fileChunk.UploadID)
if err != nil {
log.Error("GetPartInfos failed:%v", err.Error())
}
} else {
chunks, err = storage.GetObsPartInfos(fileChunk.ObjectName, fileChunk.UploadID)
if err != nil {
log.Error("GetObsPartInfos failed:%v", err.Error())
}
}
if err != nil {
models.DeleteModelFileChunk(fileChunk)
ctx.JSON(200, map[string]string{
"uuid": "",
"uploaded": "0",
"uploadID": "",
"chunks": "",
})
} else {
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": "0",
"datasetID": "0",
"fileName": "",
"datasetName": "",
})
}
}
}

func getObjectName(filename string, modeluuid string) string {
return strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, filename)), "/")
}

func NewModelMultipart(ctx *context.Context) {
if !setting.Attachment.Enabled {
ctx.Error(404, "attachment is not enabled")
return
}
fileName := ctx.Query("file_name")
modeluuid := ctx.Query("modeluuid")

err := upload.VerifyFileType(ctx.Query("fileType"), strings.Split(setting.Attachment.AllowedTypes, ","))
if err != nil {
ctx.Error(400, err.Error())
return
}

typeCloudBrain := ctx.QueryInt("type")
err = checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}

if setting.Attachment.StoreType == storage.MinioStorageType {
totalChunkCounts := ctx.QueryInt("totalChunkCounts")
if totalChunkCounts > minio_ext.MaxPartsCount {
ctx.Error(400, fmt.Sprintf("chunk counts(%d) is too much", totalChunkCounts))
return
}

fileSize := ctx.QueryInt64("size")
if fileSize > minio_ext.MaxMultipartPutObjectSize {
ctx.Error(400, fmt.Sprintf("file size(%d) is too big", fileSize))
return
}

uuid := gouuid.NewV4().String()
var uploadID string
var objectName string
if typeCloudBrain == models.TypeCloudBrainOne {
objectName = strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, fileName)), "/")
uploadID, err = storage.NewMultiPartUpload(objectName)
if err != nil {
ctx.ServerError("NewMultipart", err)
return
}
} else {

objectName = strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, fileName)), "/")
uploadID, err = storage.NewObsMultiPartUpload(objectName)
if err != nil {
ctx.ServerError("NewObsMultiPartUpload", err)
return
}
}

_, err = models.InsertModelFileChunk(&models.ModelFileChunk{
UUID: uuid,
UserID: ctx.User.ID,
UploadID: uploadID,
Md5: ctx.Query("md5"),
Size: fileSize,
ObjectName: objectName,
ModelUUID: modeluuid,
TotalChunks: totalChunkCounts,
Type: typeCloudBrain,
})

if err != nil {
ctx.Error(500, fmt.Sprintf("InsertFileChunk: %v", err))
return
}

ctx.JSON(200, map[string]string{
"uuid": uuid,
"uploadID": uploadID,
})
} else {
ctx.Error(404, "storage type is not enabled")
return
}
}

func GetModelMultipartUploadUrl(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
partNumber := ctx.QueryInt("chunkNumber")
size := ctx.QueryInt64("size")
typeCloudBrain := ctx.QueryInt("type")
err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}
fileChunk, err := models.GetModelFileChunkByUUID(uuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.Error(404)
} else {
ctx.ServerError("GetFileChunkByUUID", err)
}
return
}
url := ""
if typeCloudBrain == models.TypeCloudBrainOne {
if size > minio_ext.MinPartSize {
ctx.Error(400, fmt.Sprintf("chunk size(%d) is too big", size))
return
}
url, err = storage.GenMultiPartSignedUrl(fileChunk.ObjectName, uploadID, partNumber, size)
if err != nil {
ctx.Error(500, fmt.Sprintf("GenMultiPartSignedUrl failed: %v", err))
return
}
} else {
url, err = storage.ObsGenMultiPartSignedUrl(fileChunk.ObjectName, uploadID, partNumber)
if err != nil {
ctx.Error(500, fmt.Sprintf("ObsGenMultiPartSignedUrl failed: %v", err))
return
}
log.Info("url=" + url)

}

ctx.JSON(200, map[string]string{
"url": url,
})
}

func CompleteModelMultipart(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
typeCloudBrain := ctx.QueryInt("type")
modeluuid := ctx.Query("modeluuid")
log.Warn("uuid:" + uuid)
log.Warn("modeluuid:" + modeluuid)
log.Warn("typeCloudBrain:" + strconv.Itoa(typeCloudBrain))

err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
return
}
fileChunk, err := models.GetModelFileChunkByUUID(uuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.Error(404)
} else {
ctx.ServerError("GetFileChunkByUUID", err)
}
return
}

if typeCloudBrain == models.TypeCloudBrainOne {
_, err = storage.CompleteMultiPartUpload(fileChunk.ObjectName, uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err))
return
}
} else {
err = storage.CompleteObsMultiPartUpload(fileChunk.ObjectName, uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err))
return
}
}

fileChunk.IsUploaded = models.FileUploaded

err = models.UpdateModelFileChunk(fileChunk)
if err != nil {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}
//更新模型大小信息
UpdateModelSize(modeluuid)

ctx.JSON(200, map[string]string{
"result_code": "0",
})

}

+ 26
- 47
routers/repo/cloudbrain.go View File

@@ -2,7 +2,6 @@ package repo

import (
"bufio"
"code.gitea.io/gitea/modules/urfs_client/urchin"
"encoding/json"
"errors"
"fmt"
@@ -16,6 +15,8 @@ import (
"time"
"unicode/utf8"

"code.gitea.io/gitea/modules/urfs_client/urchin"

"code.gitea.io/gitea/modules/dataset"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"
@@ -398,7 +399,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {

}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -584,7 +585,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -1845,59 +1846,37 @@ func SyncCloudbrainStatus() {
continue
}
if task.Type == models.TypeCloudBrainOne {
result, err := cloudbrain.GetJob(task.JobID)

task, err = cloudbrainTask.SyncCloudBrainOneStatus(task)
if err != nil {
log.Error("GetJob(%s) failed:%v", task.JobName, err)
log.Error("Sync cloud brain one (%s) failed:%v", task.JobName, err)
continue
}

if result != nil {
jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
taskRoles := jobRes.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
oldStatus := task.Status
task.Status = taskRes.TaskStatuses[0].State
if task.Status != string(models.JobWaiting) {
models.ParseAndSetDurationFromCloudBrainOne(jobRes, task)
if task.Status != string(models.JobWaiting) {
if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) {
log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
err = cloudbrain.StopJob(task.JobID)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
oldStatus := task.Status
task.Status = string(models.JobStopped)
if task.EndTime == 0 {
task.EndTime = timeutil.TimeStampNow()
}
task.ComputeAndSetDuration()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
}

var maxDuration int64
if task.JobType == string(models.JobTypeBenchmark) {
maxDuration = setting.BenchmarkMaxDuration
} else if task.JobType == string(models.JobTypeSnn4imagenet) || task.JobType == string(models.JobTypeBrainScore) {
maxDuration = setting.ModelBenchmarkMaxDuration
} else {
maxDuration = setting.MaxDuration
}

if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) {
log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
err = cloudbrain.StopJob(task.JobID)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
task.Status = string(models.JobStopped)
if task.EndTime == 0 {
task.EndTime = timeutil.TimeStampNow()
}
task.ComputeAndSetDuration()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
}

}
} else if task.Type == models.TypeCloudBrainTwo {
if task.JobType == string(models.JobTypeDebug) {
@@ -2509,7 +2488,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tplCloudBrainBenchmarkNew, &form)
@@ -2663,7 +2642,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)


+ 2
- 318
routers/repo/dataset.go View File

@@ -47,8 +47,8 @@ func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment
permission := false
if !permission && ctx.User != nil {
isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
isInRepoTeam,_:=repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator ||isInRepoTeam {
isInRepoTeam, _ := repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator || isInRepoTeam {
log.Info("Collaborator user may visit the attach.")
permission = true
}
@@ -349,96 +349,6 @@ func DatasetAction(ctx *context.Context) {

}

func CurrentRepoDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

repo := ctx.Repo.Repository
var datasetIDs []int64
dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
return
}
datasetIDs = append(datasetIDs, dataset.ID)
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: datasetIDs,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyDatasets(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
keyword := strings.Trim(ctx.Query("q"), " ")
@@ -593,180 +503,6 @@ func ReferenceDatasetData(ctx *context.Context) {

}

func PublicDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyFavoriteDataset(ctx *context.Context) {
UserId := ctx.User.ID
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
var NotColDatasetIDs []int64
var IsColDatasetIDs []int64
datasetStars, err := models.GetDatasetStarByUser(ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
log.Error("GetDatasetStarByUser failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
//If the dataset has been deleted, it will not be counted
for _, datasetStar := range datasetStars {
IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar)
if err != nil {
log.Error("IsDatasetStarExist error:", err.Error())
}
if IsExist {
DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset)
if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator {
IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID)
} else {
NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID)
}
}
}

NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: NotColDatasetIDs,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
//If is collaborator, there is no need to determine whether the dataset is private or public
IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: IsColDatasetIDs,
NeedIsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
for _, NotColDataset := range NotColDatasets {
IsColDatasets = append(IsColDatasets, NotColDataset)
}
datasets := IsColDatasets
count := NotColcount + IsColcount
sort.Slice(datasets, func(i, j int) bool {
return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix
})

page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pagesize := ctx.QueryInt("pagesize")
if pagesize <= 0 {
pagesize = 5
}
pageDatasetsInfo := getPageDatasets(datasets, page, pagesize)
if pageDatasetsInfo == nil {
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": "[]",
"count": strconv.FormatInt(count, 10),
})
return
}
data, err := json.Marshal(pageDatasetsInfo)
log.Info("data:", data)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})

}
func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo {
begin := (page - 1) * pagesize
end := (page) * pagesize

if begin > len(AttachmentInfos)-1 {
return nil
}
if end > len(AttachmentInfos)-1 {
return AttachmentInfos[begin:]
} else {
return AttachmentInfos[begin:end]
}

}
func getTotalPage(total int64, pageSize int) int {

another := 0
if int(total)%pageSize != 0 {
another = 1
}
return int(total)/pageSize + another

}

func GetDatasetStatus(ctx *context.Context) {

var (
@@ -791,55 +527,3 @@ func GetDatasetStatus(ctx *context.Context) {
"AttachmentStatus": fmt.Sprint(attachment.DecompressState),
})
}
func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("query repo error:", err.Error())
} else {
repo.GetOwner()
if ctx.User != nil {
if repo.Owner.IsOrganization() {
org := repo.Owner
org.Teams, err = org.GetUserTeams(ctx.User.ID)
if err != nil {
log.Error("GetUserTeams error:", err.Error())
return false
}
if org.IsUserPartOfOrg(ctx.User.ID) {
for _, t := range org.Teams {
if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) {
return true
}
}
isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID)
if isOwner {
return isOwner
}
return false
}
}

isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
if isCollaborator {
return true
}
}
}

return false
}
func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) {
dataset, err := models.GetDatasetByID(datasetStar.DatasetID)
if err != nil {
log.Error("query dataset error:", err.Error())
return false, nil, nil, err
} else {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("GetRepositoryByID error:", err.Error())
return false, nil, nil, err
}
return true, repo, dataset, nil
}

}

+ 73
- 62
routers/repo/grampus.go View File

@@ -1,7 +1,6 @@
package repo

import (
"code.gitea.io/gitea/modules/urfs_client/urchin"
"encoding/json"
"errors"
"fmt"
@@ -13,6 +12,9 @@ import (
"strings"
"time"

"code.gitea.io/gitea/modules/urfs_client/urchin"
"code.gitea.io/gitea/routers/response"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

"code.gitea.io/gitea/modules/dataset"
@@ -474,7 +476,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain

}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -509,28 +511,6 @@ func GrampusTrainJobVersionCreate(ctx *context.Context, form auth.CreateGrampusT

}

func checkSpecialPool(ctx *context.Context, resourceType string) string {
grampus.InitSpecialPool()
if grampus.SpecialPools != nil {
for _, pool := range grampus.SpecialPools.Pools {

if pool.IsExclusive && pool.Type == resourceType {

org, _ := models.GetOrgByName(pool.Org)
if org != nil {
isOrgMember, _ := models.IsOrganizationMember(org.ID, ctx.User.ID)
if !isOrgMember {
return ctx.Tr("repo.grampus.no_operate_right")
}
}
}

}

}
return ""
}

func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = true
grampusTrainJobNpuCreate(ctx, form)
@@ -733,7 +713,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
req.PreTrainModelPath = preTrainModelPath
}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
@@ -885,10 +865,10 @@ func GrampusTrainJobShow(ctx *context.Context) {
}
}
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
}
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
}
}
}
@@ -960,15 +940,14 @@ func GrampusGetLog(ctx *context.Context) {
content, err := grampus.GetTrainJobLog(job.JobID)
if err != nil {
log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobName": job.JobName,
"Content": "",
"CanLogDownload": false,
})
return
}
var canLogDownload bool
if err != nil {
canLogDownload = false
} else {
canLogDownload = true
}
canLogDownload := err == nil && job.IsUserHasRight(ctx.User)
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobName": job.JobName,
"Content": content,
@@ -978,6 +957,28 @@ func GrampusGetLog(ctx *context.Context) {
return
}

func GrampusMetrics(ctx *context.Context) {
jobID := ctx.Params(":jobid")
job, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}

result, err := grampus.GetGrampusMetrics(job.JobID)
if err != nil {
log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"])
}
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobID": jobID,
"Interval": result.Interval,
"MetricsInfo": result.MetricsInfo,
})

return
}

func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) {
var command string

@@ -1003,7 +1004,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
if processorType == grampus.ProcessorTypeNPU {
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
unZipDatasetCommand := cloudbrainTask.GenerateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}
@@ -1077,31 +1078,6 @@ func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileNa
return commandDownloadTemp
}

func generateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
if len(datasetNameArray) == 1 { //单数据集
unZipDatasetCommand = "unzip -q '" + datasetName + "';"
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
if strings.HasSuffix(datasetNameTemp, ".tar.gz") {
unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';"
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}
return unZipDatasetCommand
}

func downloadZipCode(ctx *context.Context, codePath, branchName string) error {
archiveType := git.ZIP
archivePath := codePath
@@ -1149,3 +1125,38 @@ func downloadZipCode(ctx *context.Context, codePath, branchName string) error {

return nil
}
func HandleTaskWithAiCenter(ctx *context.Context) {
log.Info("HandleTaskWithAiCenter start")
updateCounts := 0
cloudBrains, err := models.GetC2NetWithAiCenterWrongJob()
if err != nil {
log.Error("GetC2NetWithAiCenterWrongJob failed:" + err.Error())
return
}
if len(cloudBrains) == 0 {
log.Info("HandleC2NetWithAiCenterWrongJob:no task need handle")
return
}
cloudBrainCounts := len(cloudBrains)
for _, task := range cloudBrains {
result, err := grampus.GetJob(task.JobID)
if err != nil {
log.Error("GetJob failed:" + err.Error())
continue
}
if result != nil {
if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
}
updateCounts++
}
}
r := make(map[string]interface{}, 0)
r["cloudBrainCounts"] = cloudBrainCounts
r["updateCounts"] = updateCounts
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}

+ 2
- 2
routers/repo/modelarts.go View File

@@ -1230,7 +1230,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
return
}

err = modelarts.GenerateTrainJob(ctx, req)
_, err = modelarts.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
trainJobNewDataPrepare(ctx)
@@ -2205,7 +2205,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
req.UserCommand = userCommand
req.UserImageUrl = userImageUrl

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
inferenceJobErrorNewDataPrepare(ctx, form)


+ 30
- 0
routers/response/api_response.go View File

@@ -0,0 +1,30 @@
package response

type AiforgeOuterResponse struct {
Code int `json:"code"`
Msg string `json:"msg"`
Data interface{} `json:"data"`
}

func OuterSuccess() *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS}
}

func OuterError(code int, msg string) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: code, Msg: msg}
}

func OuterServerError(msg string) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg}
}

func OuterBizError(err *BizError) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: err.Code, Msg: err.Err}
}

func OuterSuccessWithData(data interface{}) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data}
}
func OuterErrorWithData(code int, msg string, data interface{}) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: code, Msg: msg, Data: data}
}

+ 5
- 1
routers/response/response.go View File

@@ -24,10 +24,14 @@ func ServerError(msg string) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg}
}

func ResponseError(err *BizError) *AiforgeResponse {
func ResponseBizError(err *BizError) *AiforgeResponse {
return &AiforgeResponse{Code: err.Code, Msg: err.Err}
}

func ResponseError(err error) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: err.Error()}
}

func SuccessWithData(data interface{}) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data}
}


+ 2
- 2
routers/response/response_list.go View File

@@ -1,6 +1,7 @@
package response

//repo response
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error"}

var RESOURCE_QUEUE_NOT_AVAILABLE = &BizError{Code: 1001, Err: "resource queue not available"}
var SPECIFICATION_NOT_EXIST = &BizError{Code: 1002, Err: "specification not exist"}
var SPECIFICATION_NOT_AVAILABLE = &BizError{Code: 1003, Err: "specification not available"}
@@ -11,4 +12,3 @@ var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of
//common response
var SYSTEM_ERROR = &BizError{Code: 9009, Err: "System error.Please try again later"}
var INSUFFICIENT_PERMISSION = &BizError{Code: 9003, Err: "insufficient permissions"}
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error permissions"}

+ 20
- 4
routers/routes/routes.go View File

@@ -645,6 +645,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/specification", func() {
m.Get("", admin.GetSpecificationPage)
m.Get("/list", admin.GetResourceSpecificationList)
m.Get("/list/all", admin.GetAllResourceSpecificationList)
m.Get("/scenes/:id", admin.GetResourceSpecificationScenes)
m.Post("/grampus/sync", admin.SyncGrampusSpecs)
m.Post("/add", binding.Bind(models.ResourceSpecificationReq{}), admin.AddResourceSpecification)
@@ -728,6 +729,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/complete_multipart", repo.CompleteMultipart)
})

m.Group("/attachments/model", func() {
m.Get("/get_chunks", repo.GetModelChunks)
m.Get("/new_multipart", repo.NewModelMultipart)
m.Get("/get_multipart_url", repo.GetModelMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteModelMultipart)
})

m.Group("/attachments", func() {
m.Get("/public/query", repo.QueryAllPublicDataset)
m.Get("/private/:username", repo.QueryPrivateDataset)
@@ -1127,10 +1135,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
m.Post("/reference_datasets", reqRepoDatasetWriterJson, bindIgnErr(auth.ReferenceDatasetForm{}), repo.ReferenceDatasetPost)
m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
m.Get("/current_repo", repo.CurrentRepoDataset)
m.Get("/my_datasets", repo.MyDatasets)
m.Get("/public_datasets", repo.PublicDataset)
m.Get("/my_favorite", repo.MyFavoriteDataset)

m.Get("/current_repo_m", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets_m", repo.MyDatasetsMultiple)
@@ -1232,6 +1236,12 @@ func RegisterRoutes(m *macaron.Macaron) {
})
}, context.RepoRef())
m.Group("/modelmanage", func() {
m.Get("/create_local_model_1", repo.CreateLocalModel)
m.Get("/create_local_model_2", repo.CreateLocalModelForUpload)
m.Get("/create_online_model", repo.CreateOnlineModel)
m.Post("/create_local_model", repo.SaveLocalModel)
m.Delete("/delete_model_file", repo.DeleteModelFile)

m.Post("/create_model", repo.SaveModel)
m.Post("/create_model_convert", reqWechatBind, reqRepoModelManageWriter, repo.SaveModelConvert)
m.Post("/create_new_model", repo.SaveNewNameModel)
@@ -1491,6 +1501,12 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/record/list", point.GetPointRecordList)
}, reqSignIn)

m.Group("/resources", func() {
m.Group("/queue", func() {
m.Get("/centers", admin.GetResourceAiCenters)
})
})

if setting.API.EnableSwagger {
m.Get("/swagger.v1.json", templates.JSONRenderer(), routers.SwaggerV1Json)
}


+ 2
- 2
routers/user/notification.go View File

@@ -132,11 +132,11 @@ func getNotifications(c *context.Context) {
}

c.Data["Title"] = c.Tr("notifications")
//c.Data["Keyword"] = keyword
c.Data["Type"] = keyword
c.Data["Status"] = status
c.Data["Notifications"] = notifications

pager.SetDefaultParams(c)
pager.AddParam(c, "q", "Type")
c.Data["Page"] = pager
}



+ 12
- 12
services/cloudbrain/cloudbrainTask/count.go View File

@@ -14,28 +14,28 @@ type StatusInfo struct {
ComputeResource string
}

var cloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)}
var cloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)}
var grampusTwoNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning}
var CloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)}
var CloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)}
var GrampusNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning}
var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeDebug},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeInference},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeBenchmark) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeBenchmark, models.JobTypeBrainScore, models.JobTypeSnn4imagenet},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo, models.TypeCDCenter},
@@ -45,22 +45,22 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: cloudbrainTwoNotFinalStatuses,
NotFinalStatuses: CloudbrainTwoNotFinalStatuses,
ComputeResource: models.NPUResource,
}, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo},
JobType: []models.JobType{models.JobTypeInference},
NotFinalStatuses: cloudbrainTwoNotFinalStatuses,
NotFinalStatuses: CloudbrainTwoNotFinalStatuses,
ComputeResource: models.NPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.GPUResource: {
CloudBrainTypes: []int{models.TypeC2Net},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: grampusTwoNotFinalStatuses,
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.NPUResource: {
CloudBrainTypes: []int{models.TypeC2Net},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: grampusTwoNotFinalStatuses,
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.NPUResource,
}}

@@ -71,7 +71,7 @@ func GetNotFinalStatusTaskCount(uid int64, cloudbrainType int, jobType string, c
}

key := jobNewType + "-" + strconv.Itoa(cloudbrainType)
if len(computeResource) > 0 {
if len(computeResource) > 0 && cloudbrainType == models.TypeC2Net {
key = key + "-" + computeResource[0]
}



+ 631
- 0
services/cloudbrain/cloudbrainTask/inference.go View File

@@ -0,0 +1,631 @@
package cloudbrainTask

import (
"bufio"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"strconv"
"strings"
"unicode/utf8"

"code.gitea.io/gitea/modules/modelarts"

"code.gitea.io/gitea/modules/git"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward/point/account"
)

const CLONE_FILE_PREFIX = "file:///"

func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {

displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
image := strings.TrimSpace(option.Image)
uuid := option.Attachment
jobType := string(models.JobTypeInference)
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
branchName := option.BranchName
bootFile := strings.TrimSpace(option.BootFile)
labelName := option.LabelName
repo := ctx.Repo.Repository

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
defer lock.UnLock()
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err")))
return
}

ckptUrl := setting.Attachment.Minio.RealPath + option.PreTrainModelUrl + option.CkptName
log.Info("ckpt url:" + ckptUrl)
command, err := getInferenceJobCommand(option)
if err != nil {
log.Error("getTrainJobCommand failed: %v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist"))
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
}
}

if !jobNamePattern.MatchString(displayJobName) {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_jobname_err")))
return
}

bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err")))
return
}

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, jobType)
if err != nil {
log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain.morethanonejob")))
return
}
}

if branchName == "" {
branchName = cloudbrain.DefaultBranchName
}
errStr := loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ResultPath)
if errStr != "" {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)

datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.error.dataset_select")))
return
}
spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeInference,
ComputeResource: models.GPU,
Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainOne})
if err != nil || spec == nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification is not available"))
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance")))
return
}
req := cloudbrain.GenerateCloudBrainTaskReq{
Ctx: ctx,
DisplayJobName: displayJobName,
JobName: jobName,
Image: image,
Command: command,
Uuids: uuid,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,
CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
ModelPath: setting.Attachment.Minio.RealPath + option.PreTrainModelUrl,
BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"),
Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"),
JobType: jobType,
Description: option.Description,
BranchName: branchName,
BootFile: option.BootFile,
Params: option.Params,
CommitID: commitID,
ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"),
ModelName: option.ModelName,
ModelVersion: option.ModelVersion,
CkptName: option.CkptName,
TrainUrl: option.PreTrainModelUrl,
LabelName: labelName,
Spec: spec,
}

jobId, err := cloudbrain.GenerateTask(req)
if err != nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func ModelArtsInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {
ctx.Data["PageIsTrainJob"] = true
VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount)
displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := option.Attachment
description := option.Description
workServerNumber := option.WorkServerNumber
engineID, _ := strconv.Atoi(option.ImageID)
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params
repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath
resultObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.ResultPath + VersionOutputPath + "/"
logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/"
//dataPath := "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/"
branchName := option.BranchName
EngineName := option.Image
LabelName := option.LabelName
isLatestVersion := modelarts.IsLatestVersion
VersionCount := modelarts.VersionCountOne
trainUrl := option.PreTrainModelUrl
modelName := option.ModelName
modelVersion := option.ModelVersion
ckptName := option.CkptName
ckptUrl := "/" + option.PreTrainModelUrl + option.CkptName

errStr := checkInferenceJobMultiNode(ctx.User.ID, option.WorkServerNumber)
if errStr != "" {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeInference), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err")))
return
}
defer lock.UnLock()

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeInference))
if err != nil {
log.Error("GetCloudbrainInferenceJobCountByUserID failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting inference task", ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("you have already a running or waiting inference task, can not create more"))
return
}
}

if err := paramCheckCreateInferenceJob(option); err != nil {
log.Error("paramCheckCreateInferenceJob failed:(%v)", err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err")))
return
}

//Determine whether the task name of the task in the project is duplicated
tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeInference), displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist"))
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
}
}

spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeInference,
ComputeResource: models.NPU,
Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainTwo})
if err != nil || spec == nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification not available"))
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance")))
return
}

//todo: del the codeLocalPath
_, err = ioutil.ReadDir(codeLocalPath)
if err == nil {
os.RemoveAll(codeLocalPath)
}

gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branchName)

if err := downloadCode(repo, codeLocalPath, branchName); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

//todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.ResultPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_result: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_result"))
return
}

if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_log"))
return
}

if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

var parameters models.Parameters
param := make([]models.Parameter, 0)
param = append(param, models.Parameter{
Label: modelarts.ResultUrl,
Value: "s3:/" + resultObsPath,
}, models.Parameter{
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})

datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
dataPath := dataUrl
jsondatas, err := json.Marshal(datasUrlList)
if err != nil {
log.Error("Failed to Marshal: %v", err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("json error:"+err.Error()))
return
}
if isMultiDataset {
param = append(param, models.Parameter{
Label: modelarts.MultiDataUrl,
Value: string(jsondatas),
})
}

existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("运行参数错误"))
return
}

for _, parameter := range parameters.Parameter {
if parameter.Label == modelarts.DeviceTarget {
existDeviceTarget = true
}
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
Value: parameter.Value,
})
}
}
}
if !existDeviceTarget {
param = append(param, models.Parameter{
Label: modelarts.DeviceTarget,
Value: modelarts.Ascend,
})
}

req := &modelarts.GenerateInferenceJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
DataUrl: dataPath,
Description: description,
CodeObsPath: codeObsPath,
BootFileUrl: codeObsPath + bootFile,
BootFile: bootFile,
TrainUrl: trainUrl,
WorkServerNumber: workServerNumber,
EngineID: int64(engineID),
LogUrl: logObsPath,
PoolID: getPoolId(),
Uuid: uuid,
Parameters: param, //modelarts train parameters
CommitID: commitID,
BranchName: branchName,
Params: option.Params,
EngineName: EngineName,
LabelName: LabelName,
IsLatestVersion: isLatestVersion,
VersionCount: VersionCount,
TotalVersionCount: modelarts.TotalVersionCount,
ModelName: modelName,
ModelVersion: modelVersion,
CkptName: ckptName,
ResultUrl: resultObsPath,
Spec: spec,
DatasetName: datasetNames,
JobType: string(models.JobTypeInference),
}

jobId, err := modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func getDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) {
var isMultiDataset bool
var dataUrl string
var datasetNames string
var datasUrlList []models.Datasurl
uuids := strings.Split(uuidStr, ";")
if len(uuids) > setting.MaxDatasetNum {
log.Error("the dataset count(%d) exceed the limit", len(uuids))
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset count exceed the limit")
}

datasetInfos := make(map[string]models.DatasetInfo)
attachs, err := models.GetAttachmentsByUUIDs(uuids)
if err != nil || len(attachs) != len(uuids) {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}

for i, tmpUuid := range uuids {
var attach *models.Attachment
for _, tmpAttach := range attachs {
if tmpAttach.UUID == tmpUuid {
attach = tmpAttach
break
}
}
if attach == nil {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}
fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz")
for _, datasetInfo := range datasetInfos {
if fileName == datasetInfo.Name {
log.Error("the dataset name is same: %v", attach.Name)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset name is same")
}
}
if len(attachs) <= 1 {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
isMultiDataset = false
} else {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attachs[0].UUID[0:1], attachs[0].UUID[1:2]) + "/" + attachs[0].UUID + attachs[0].UUID + "/"
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
datasUrlList = append(datasUrlList, models.Datasurl{
DatasetUrl: datasetUrl,
DatasetName: fileName,
})
isMultiDataset = true
}

if i == 0 {
datasetNames = attach.Name
} else {
datasetNames += ";" + attach.Name
}
}

return datasUrlList, dataUrl, datasetNames, isMultiDataset, nil
}
func checkInferenceJobMultiNode(userId int64, serverNum int) string {
if serverNum == 1 {
return ""
}

return "repo.modelarts.no_node_right"

}

func paramCheckCreateInferenceJob(option api.CreateTrainJobOption) error {
if !strings.HasSuffix(strings.TrimSpace(option.BootFile), ".py") {
log.Error("the boot file(%s) must be a python file", strings.TrimSpace(option.BootFile))
return errors.New("启动文件必须是python文件")
}

if option.ModelName == "" {
log.Error("the ModelName(%d) must not be nil", option.ModelName)
return errors.New("模型名称不能为空")
}
if option.ModelVersion == "" {
log.Error("the ModelVersion(%d) must not be nil", option.ModelVersion)
return errors.New("模型版本不能为空")
}
if option.CkptName == "" {
log.Error("the CkptName(%d) must not be nil", option.CkptName)
return errors.New("权重文件不能为空")
}
if option.BranchName == "" {
log.Error("the Branch(%d) must not be nil", option.BranchName)
return errors.New("分支名不能为空")
}

if utf8.RuneCountInString(option.Description) > 255 {
log.Error("the Description length(%d) must not more than 255", option.Description)
return errors.New("描述字符不能超过255个字符")
}

return nil
}

func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string {
err := downloadCode(repo, codePath, branchName)
if err != nil {
return "cloudbrain.load_code_failed"
}

err = uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/")
if err != nil {
return "cloudbrain.load_code_failed"
}

modelPath := setting.JobPath + jobName + resultPath + "/"
err = mkModelPath(modelPath)
if err != nil {
return "cloudbrain.load_code_failed"
}
err = uploadCodeToMinio(modelPath, jobName, resultPath+"/")
if err != nil {
return "cloudbrain.load_code_failed"
}

return ""
}

func downloadCode(repo *models.Repository, codePath, branchName string) error {
//add "file:///" prefix to make the depth valid
if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
return err
}

configFile, err := os.OpenFile(codePath+"/.git/config", os.O_RDWR, 0666)
if err != nil {
log.Error("open file(%s) failed:%v", codePath+"/,git/config", err)
return err
}

defer configFile.Close()

pos := int64(0)
reader := bufio.NewReader(configFile)
for {
line, err := reader.ReadString('\n')
if err != nil {
if err == io.EOF {
log.Error("not find the remote-url")
return nil
} else {
log.Error("read error: %v", err)
return err
}
}

if strings.Contains(line, "url") && strings.Contains(line, ".git") {
originUrl := "\turl = " + repo.CloneLink().HTTPS + "\n"
if len(line) > len(originUrl) {
originUrl += strings.Repeat(" ", len(line)-len(originUrl))
}
bytes := []byte(originUrl)
_, err := configFile.WriteAt(bytes, pos)
if err != nil {
log.Error("WriteAt failed:%v", err)
return err
}
break
}

pos += int64(len(line))
}

return nil
}

func getInferenceJobCommand(option api.CreateTrainJobOption) (string, error) {
var command string
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params

if !strings.HasSuffix(bootFile, ".py") {
log.Error("bootFile(%s) format error", bootFile)
return command, errors.New("bootFile format error")
}

var parameters models.Parameters
var param string
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)
return command, err
}

for _, parameter := range parameters.Parameter {
param += " --" + parameter.Label + "=" + parameter.Value
}
}

param += " --modelname" + "=" + option.CkptName

command += "python /code/" + bootFile + param + " > " + cloudbrain.ResultPath + "/" + option.DisplayJobName + "-" + cloudbrain.LogFile

return command, nil
}

+ 83
- 0
services/cloudbrain/cloudbrainTask/sync_status.go View File

@@ -0,0 +1,83 @@
package cloudbrainTask

import (
"net/http"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
)

var noteBookOKMap = make(map[int64]int, 20)

//if a task notebook url can get two times, the notebook can browser.
const successfulCount = 3

func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error) {
jobResult, err := cloudbrain.GetJob(task.JobID)
if err != nil {

log.Error("GetJob failed:", err)

return task, err
}
result, err := models.ConvertToJobResultPayload(jobResult.Payload)
if err != nil {
log.Error("ConvertToJobResultPayload failed:", err)
return task, err
}
oldStatus := task.Status

if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))

task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
task.ContainerID = taskRes.TaskStatuses[0].ContainerID
}

if (result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobRunning)) ||
task.Status == string(models.JobRunning) || (result.JobStatus.State == string(models.JobRunning) && isNoteBookReady(task)) {

models.ParseAndSetDurationFromCloudBrainOne(result, task)
task.Status = result.JobStatus.State
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:", err)
return task, err
}
}
return task, nil

}

func isNoteBookReady(task *models.Cloudbrain) bool {
if task.JobType != string(models.JobTypeDebug) {
return true
}
noteBookUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName
r := httplib.Get(noteBookUrl)
res, err := r.Response()
if err != nil {
return false
}
if res.StatusCode == http.StatusOK {
count := noteBookOKMap[task.ID]
if count < successfulCount-1 {
noteBookOKMap[task.ID] = count + 1
return false
} else {
delete(noteBookOKMap, task.ID)
return true
}

}
return false

}

+ 1210
- 0
services/cloudbrain/cloudbrainTask/train.go
File diff suppressed because it is too large
View File


+ 1
- 1
services/cloudbrain/resource/resource_queue.go View File

@@ -16,7 +16,7 @@ func AddResourceQueue(req models.ResourceQueueReq) error {
}

func UpdateResourceQueue(queueId int64, req models.ResourceQueueReq) error {
if _, err := models.UpdateResourceQueueById(queueId, models.ResourceQueue{
if _, err := models.UpdateResourceCardsTotalNum(queueId, models.ResourceQueue{
CardsTotalNum: req.CardsTotalNum,
Remark: req.Remark,
}); err != nil {


+ 62
- 7
services/cloudbrain/resource/resource_specification.go View File

@@ -1,20 +1,23 @@
package resource

import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/admin/operate_log"
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"
)

func AddResourceSpecification(doerId int64, req models.ResourceSpecificationReq) error {
@@ -127,10 +130,49 @@ func GetResourceSpecificationList(opts models.SearchResourceSpecificationOptions
if err != nil {
return nil, err
}

return models.NewResourceSpecAndQueueListRes(n, r), nil
}

//GetAllDistinctResourceSpecification returns specification and queue after distinct
//totalSize is always 0 here
func GetAllDistinctResourceSpecification(opts models.SearchResourceSpecificationOptions) (*models.ResourceSpecAndQueueListRes, error) {
opts.Page = 0
opts.PageSize = 1000
opts.OrderBy = models.SearchSpecOrder4Standard
_, r, err := models.SearchResourceSpecification(opts)
if err != nil {
return nil, err
}
nr := distinctResourceSpecAndQueue(r)
return models.NewResourceSpecAndQueueListRes(0, nr), nil
}

func distinctResourceSpecAndQueue(r []models.ResourceSpecAndQueue) []models.ResourceSpecAndQueue {
specs := make([]models.ResourceSpecAndQueue, 0, len(r))
sourceSpecIdMap := make(map[string]models.ResourceSpecAndQueue, 0)
for i := 0; i < len(r); i++ {
spec := r[i]
if spec.SourceSpecId == "" {
specs = append(specs, spec)
continue
}
if _, has := sourceSpecIdMap[spec.SourceSpecId]; has {
//prefer to use on-shelf spec
if sourceSpecIdMap[spec.SourceSpecId].Status != spec.Status && spec.Status == models.SpecOnShelf {
for k, v := range specs {
if v.ResourceSpecification.ID == sourceSpecIdMap[spec.SourceSpecId].ResourceSpecification.ID {
specs[k] = spec
}
}
}
continue
}
specs = append(specs, spec)
sourceSpecIdMap[spec.SourceSpecId] = spec
}
return specs
}

func GetResourceSpecificationScenes(specId int64) ([]models.ResourceSceneBriefRes, error) {
r, err := models.GetSpecScenes(specId)
if err != nil {
@@ -197,6 +239,7 @@ func AddSpecOperateLog(doerId int64, operateType string, newValue, oldValue *mod
}

func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.Specification, error) {
opts.SpecStatus = models.SpecOnShelf
r, err := models.FindSpecs(opts)
if err != nil {
log.Error("FindAvailableSpecs error.%v", err)
@@ -210,6 +253,18 @@ func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.S
return specs, err
}

func FindAvailableSpecs4Show(userId int64, opts models.FindSpecsOptions) ([]*api.SpecificationShow, error) {
specs, err := FindAvailableSpecs(userId, opts)
if err != nil {
return nil, err
}
result := make([]*api.SpecificationShow, len(specs))
for i, v := range specs {
result[i] = convert.ToSpecification(v)
}
return result, nil
}

func filterExclusiveSpecs(r []*models.Specification, userId int64) []*models.Specification {
specs := make([]*models.Specification, 0, len(r))
specMap := make(map[int64]string, 0)


+ 21
- 2
templates/base/footer_content.tmpl View File

@@ -24,11 +24,30 @@
<div class="text">{{.LangName}}</div>
<div class="menu">
{{range .AllLangs}}
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a>
<!-- <a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a> -->
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="javascript:;" olang="{{$.Lang}}" lang="{{.Lang}}" >{{.Name}}</a>
{{end}}
</div>
</div>

<script>
;(function() {
document.addEventListener('DOMContentLoaded', function() {
$('.ui.language .menu .item').on('click', function() {
var lang = $(this).attr('lang');
var oLang = $(this).attr('olang');
if (oLang === lang) return;
var origin = window.location.origin;
var pathname = window.location.pathname;
var search = window.location.search;
var hash = window.location.hash;
var oHref = window.location.href;
var urlSearchParams = new URLSearchParams(search);
urlSearchParams.set('lang', lang);
window.location.href = origin + pathname + '?' + urlSearchParams.toString() + hash;
});
});
})();
</script>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class="item" target="_blank"><i class="compass icon" ></i> {{.i18n.Tr "custom.Platform_Tutorial"}}</a>
{{if .EnableSwagger}}<a href="/api/swagger" class="item"><i class="plug icon"></i> API</a>{{end}}
{{if .IsSigned}}


+ 21
- 1
templates/base/footer_content_fluid.tmpl View File

@@ -22,10 +22,30 @@
<div class="text">{{.LangName}}</div>
<div class="menu">
{{range .AllLangs}}
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a>
<!--<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="{{if eq $.Lang .Lang}}#{{else}}{{$.Link}}?lang={{.Lang}}{{end}}">{{.Name}}</a>-->
<a lang="{{.Lang}}" class="item {{if eq $.Lang .Lang}}active selected{{end}}" href="javascript:;" olang="{{$.Lang}}" lang="{{.Lang}}" >{{.Name}}</a>
{{end}}
</div>
</div>
<script>
;(function() {
document.addEventListener('DOMContentLoaded', function() {
$('.ui.language .menu .item').on('click', function() {
var lang = $(this).attr('lang');
var oLang = $(this).attr('olang');
if (oLang === lang) return;
var origin = window.location.origin;
var pathname = window.location.pathname;
var search = window.location.search;
var hash = window.location.hash;
var oHref = window.location.href;
var urlSearchParams = new URLSearchParams(search);
urlSearchParams.set('lang', lang);
window.location.href = origin + pathname + '?' + urlSearchParams.toString() + hash;
});
});
})();
</script>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class="item" target="_blank"><i class="compass icon"></i> {{.i18n.Tr "custom.Platform_Tutorial"}} </a>
{{if .EnableSwagger}}<a href="/api/swagger" class="item"><i class="plug icon" ></i> API</a>{{end}}
{{if .IsSigned}}


+ 2
- 2
templates/base/head_navbar.tmpl View File

@@ -38,7 +38,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui simple dropdown item" id='dropdown_explore'>
@@ -78,7 +78,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>


+ 2
- 2
templates/base/head_navbar_fluid.tmpl View File

@@ -38,7 +38,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_explore'>
@@ -77,7 +77,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_PageHome'>


+ 2
- 2
templates/base/head_navbar_home.tmpl View File

@@ -30,7 +30,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_explore'>
@@ -70,7 +70,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_PageHome'>


+ 2
- 2
templates/base/head_navbar_pro.tmpl View File

@@ -40,7 +40,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_explore'>
@@ -80,7 +80,7 @@
{{.i18n.Tr "repo.model_manager"}}
<i class="dropdown icon"></i>
<div class="menu">
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a>
<a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a>
</div>
</div>
<div class="ui dropdown item" id='dropdown_PageHome'>


+ 1
- 1
templates/custom/max_log.tmpl View File

@@ -29,7 +29,7 @@
<div class="ui message message-max{{.VersionName}}" style="display: none;">
<div id="header"></div>
</div>
<div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;">
<div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;height: 100%">
<div class="ui inverted active dimmer">
<div class="ui loader"></div>
</div>


+ 2
- 4
templates/repo/cloudbrain/inference/new.tmpl View File

@@ -331,9 +331,7 @@
$('#model_name_version').empty()
let html = ''
nameMap[value].forEach(element => {
let {TrainTaskInfo} = element
TrainTaskInfo = JSON.parse(TrainTaskInfo)
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`
});
$('#model_name_version').append(html)
$("#select_model_version").removeClass("loading")
@@ -387,7 +385,7 @@
}
function loadCheckpointList(value){
return new Promise((resolve,reject)=>{
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{ID:value}, (data) => {
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{id:value}, (data) => {
resolve(data)
})
})


+ 30
- 101
templates/repo/cloudbrain/trainjob/show.tmpl View File

@@ -284,10 +284,7 @@
<div class="content-pad">
<div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);">
<a class="active item"
data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item" data-tab="second{{$k}}"
onclick="javascript:parseInfo()">{{$.i18n.Tr "repo.cloudbrain.runinfo"}}</a>
data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>

<a class="item log_bottom" data-tab="third{{$k}}"
data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
@@ -504,25 +501,6 @@

</div>
</div>

<div class="ui tab" data-tab="second{{$k}}">
<div>
<div class="ui message message{{.VersionName}}" style="display: none;">
<div id="header"></div>
</div>
<div class="ui attached log" id="log_state{{.VersionName}}"
style="height: 390px !important; overflow: auto;">
<input type="hidden" id="json_value" value="{{$.result.JobStatus.AppExitDiagnostics}}">
<input type="hidden" id="ExitDiagnostics" value="{{$.ExitDiagnostics}}">
<span id="info_display" class="info_text">

</span>
</div>

</div>

</div>

<div class="ui tab" data-tab="third{{$k}}">
<div class="file-info">
<a id="{{.VersionName}}-log-down"
@@ -633,24 +611,24 @@

<div class="required inline field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" id="VersionName" name="VersionName" value="V0001">
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input type="hidden" id="versionName" name="versionName" value="V0001">
<input style="width: 45%;" id="JobName" readonly required>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<div class="ui dropdown selection search width70" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -677,12 +655,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -712,7 +690,15 @@
<script src="{{StaticUrlPrefix}}/js/specsuse.js?v={{MD5 AppVer}}" type="text/javascript"></script>

<script>
var setting = {
var userName;
var repoPath;
$(document).ready(function(){
var url = window.location.href;
var urlArr = url.split('/')
userName = urlArr.slice(-5)[0]
repoPath = urlArr.slice(-4)[0]
});
var setting = {
check: {
enable: true,
chkboxType: {"Y":"ps", "N":"ps"}
@@ -850,23 +836,19 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val("V0001")
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val("V0001")
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
$('#choice_Engine .default.text').css({ "color": "rgb(0, 0, 0,0.87)" })
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
createModelName();
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -887,8 +869,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},
@@ -920,66 +908,7 @@
$('.secondary.menu .item').tab();
});

let userName
let repoPath
let jobID
let downlaodFlag = {{ $.canDownload }}
let taskID = {{ $.task.ID }}
let realJobName = {{ $.task.JobName }}
$(document).ready(function () {
let url = window.location.href;
let urlArr = url.split('/')
userName = urlArr.slice(-5)[0]
repoPath = urlArr.slice(-4)[0]
jobID = urlArr.slice(-1)[0]
})
function stopBubbling(e) {
e = window.event || e;
if (e.stopPropagation) {
e.stopPropagation(); //阻止事件 冒泡传播
} else {
e.cancelBubble = true; //ie兼容
}
}

function loadLog(version_name) {
document.getElementById("mask").style.display = "block"
let startLine = $('input[name=end_line]').val();
if(startLine==""){
startLine=0;
}
let endLine = $('input[name=end_line]').val();
if(endLine==""){
endLine = 50;
}
$.get(`/${userName}/${repoPath}/cloudbrain/train-job/${jobID}/get_log?endLine=${endLine}&startLine=${startLine}`, (data) => {
$('input[name=end_line]').val(data.EndLine)
$('input[name=start_line]').val(data.StartLine)
$(`#log_file${version_name}`).text(data.Content)
document.getElementById("mask").style.display = "none"
}).fail(function (err) {
console.log(err);
document.getElementById("mask").style.display = "none"
});
}

function refreshStatus(version_name) {
$.get(`/api/v1/repos/${userName}/${repoPath}/cloudbrain/${taskID}?version_name=${versionname}`, (data) => {
// header status and duration
//$(`#${version_name}-duration-span`).text(data.JobDuration)
$(`#${version_name}-status-span span`).text(data.JobStatus)
$(`#${version_name}-status-span i`).attr("class", data.JobStatus)
// detail status and duration
//$('#'+version_name+'-duration').text(data.JobDuration)
$('#' + version_name + '-status').text(data.JobStatus)
loadLog(version_name)


}).fail(function (err) {
console.log(err);
});
stopBubbling(arguments.callee.caller.arguments[0])
}

function parseInfo() {
let jsonValue = document.getElementById("json_value").value;


+ 32
- 25
templates/repo/grampus/trainjob/show.tmpl View File

@@ -238,11 +238,8 @@
<span>
<div style="float: right;">
{{$.CsrfTokenHtml}}
</div>
<div class="ac-display-inblock title_text acc-margin-bottom">

<span class="cti-mgRight-sm">{{TimeSinceUnix1 .CreatedUnix}}</span>
<span class="cti-mgRight-sm">
{{$.i18n.Tr "repo.modelarts.current_version"}}:{{.VersionName}}</span>
@@ -260,7 +257,6 @@
<span class="refresh-status" data-tooltip="刷新" style="cursor: pointer;" data-inverted="" data-version="{{.VersionName}}">
<i class="redo icon redo-color"></i>
</span>

</div>
<div style="float: right;">
{{if and ($.canDownload) (ne .Status "WAITING") ($.Permission.CanWrite $.UnitTypeModelManage) }}
@@ -269,7 +265,6 @@
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-create-model">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{end}}

</div>
</span>
</span>
@@ -282,6 +277,9 @@

<a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item log_bottom" data-tab="second{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
{{ if eq $.Spec.ComputeResource "NPU"}}
<a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}" data-path="{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/metrics">{{$.i18n.Tr "cloudbrain.resource_use"}}</a>
{{end}}
<a class="item load-model-file" data-tab="third{{$k}}" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first{{$k}}">
@@ -564,6 +562,14 @@

</div>

</div>
<div class="ui tab" data-tab="four{{$k}}" style="position: relative;">
<i class="ri-refresh-line metric_chart"
style="position: absolute;right: 25%;color:#3291f8;z-index:99;cursor: pointer;"
data-version="{{.VersionName}}"></i>
<div id="metric-{{.VersionName}}" style="height: 260px;width: 870px;">
</div>
</div>
<div class="ui tab" data-tab="third{{$k}}">
<input type="hidden" name="model{{.VersionName}}" value="-1">
@@ -624,24 +630,24 @@

<div class="required inline field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" id="VersionName" name="VersionName" value="V0001">
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input type="hidden" id="versionName" name="versionName" value="V0001">
<input style="width: 45%;" id="JobName" readonly required>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<div class="ui dropdown selection search width70" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -669,12 +675,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -762,7 +768,6 @@
function showMenu() {
var cityObj = $("#modelSelectedFile");
var cityOffset = $("#modelSelectedFile").offset();
//$("#menuContent").css({left:cityOffset.left + "px", top:cityOffset.top + cityObj.outerHeight() + "px"}).slideDown("fast");
$("#menuContent").slideDown("fast");
$("body").bind("mousedown", onBodyDown);
}
@@ -861,11 +866,10 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val("V0001")
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val("V0001")
if(obj.ComputeResource=="NPU"){
if (obj.EngineName != null && obj.EngineName != "") {
@@ -873,16 +877,16 @@
srcEngine = srcEngine.trim().toLowerCase();
if (srcEngine == 'tensorflow') {
$('#choice_Engine .default.text').text("TensorFlow");
$('#choice_Engine input[name="Engine"]').val(1)
$('#choice_Engine input[name="engine"]').val(1)
}
if (srcEngine == 'mindspore') {
$('#choice_Engine .default.text').text("MindSpore");
$('#choice_Engine input[name="Engine"]').val(2)
$('#choice_Engine input[name="engine"]').val(2)
}
}
}else{
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
}
$('#choice_Engine .default.text').css({ "color": "rgb(0, 0, 0,0.87)" })
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
@@ -890,9 +894,6 @@
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -914,8 +915,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 2
- 4
templates/repo/modelarts/inferencejob/new.tmpl View File

@@ -362,9 +362,7 @@
$('#model_name_version').empty()
let html = ''
nameMap[value].forEach(element => {
let {TrainTaskInfo} = element
TrainTaskInfo = JSON.parse(TrainTaskInfo)
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`
});
$('#model_name_version').append(html)
$("#select_model_version").removeClass("loading")
@@ -418,7 +416,7 @@
}
function loadCheckpointList(value){
return new Promise((resolve,reject)=>{
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{ID:value}, (data) => {
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{id:value}, (data) => {
resolve(data)
})
})


+ 25
- 24
templates/repo/modelarts/trainjob/show.tmpl View File

@@ -321,7 +321,7 @@
data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item log_bottom" data-tab="second{{$k}}"
data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
<a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "cloudbrain.resource_use"}}</a>
<a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}" data-path="{{$.RepoRelPath}}/modelarts/train-job/{{.JobID}}/metric_statistics?version_name={{.VersionName}}&statistic_type=each&metrics=">{{$.i18n.Tr "cloudbrain.resource_use"}}</a>
<a class="item load-model-file" data-tab="third{{$k}}" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first{{$k}}">
@@ -662,29 +662,29 @@
<div class="two inline fields ">
<div class="required ten wide field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>&nbsp;
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input class="width83" id="JobName" readonly required>

</div>
<div class="required six widde field">
<label>{{.i18n.Tr "repo.model.manage.version"}}</label>
<input class="width70" id="VersionName" name="VersionName" readonly required>
<input class="width70" id="versionName" name="versionName" readonly required>
</div>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<input type="hidden" id="Engine" name="Engine" required>
<input style="width: 45%;" id="Engine_name" name="Engine_name" readonly required maxlength="255">
<input type="hidden" id="engine" name="engine" required>
<input style="width: 45%;" id="engine_name" name="engine_name" readonly required maxlength="255">
</div>
<div class="unite min_title inline fields required">
<div class="field required">
@@ -699,12 +699,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -726,13 +726,12 @@

</div>
</div>
</div>
{{template "base/footer" .}}

<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js"></script>
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.excheck.js"></script>
<script>
<script>
var setting = {
check: {
enable: true,
@@ -899,27 +898,23 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val(obj.VersionName).addClass('model_disabled')
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val(obj.VersionName).addClass('model_disabled')
if(obj.EngineID ==122 || obj.EngineID ==35 || obj.EngineID ==-1 || obj.EngineID ==37){
$('input[name="Engine_name"]').val("MindSpore").addClass('model_disabled');
$('input[name="Engine"]').val(2);
$('input[name="engine_name"]').val("MindSpore").addClass('model_disabled');
$('input[name="engine"]').val(2);
}
if(obj.EngineID ==121 || obj.EngineID ==38){
$('input[name="Engine_name"]').val("TensorFlow").addClass('model_disabled');
$('input[name="Engine"]').val(1);
$('input[name="engine_name"]').val("TensorFlow").addClass('model_disabled');
$('input[name="engine"]').val(1);
}
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
createModelName();
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -940,8 +935,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 39
- 38
templates/repo/modelmanage/convertIndex.tmpl View File

@@ -93,7 +93,7 @@
<div class="ui grid stackable item">
<div class="row">
<div class="three wide column padding0">
<a class="title" href="{{$.RepoLink}}/modelmanage/show_model_convert_info?ID={{.ID}}" title="{{.Name}}" style="font-size: 14px;">
<a class="title" href="{{$.RepoLink}}/modelmanage/show_model_convert_info?id={{.ID}}" title="{{.Name}}" style="font-size: 14px;">
<span class="fitted" style="width: 90%;vertical-align: middle;">{{.Name}}</span>
</a>
</div>
@@ -141,7 +141,7 @@
</form>

{{if .IsCanOper}}
<a id="ai-download-{{.ID}}" href="{{$.Repository.HTMLURL}}/modelmanage/download_model_convert/{{.ID}}?AllDownload=true&a=1" class='ui basic {{if eq .Status "SUCCEEDED" "COMPLETED"}}blue {{else}}disabled {{end}}button' style="border-radius: .28571429rem;">
<a id="ai-download-{{.ID}}" href="{{$.Repository.HTMLURL}}/modelmanage/download_model_convert/{{.ID}}?allDownload=true&a=1" class='ui basic {{if eq .Status "SUCCEEDED" "COMPLETED"}}blue {{else}}disabled {{end}}button' style="border-radius: .28571429rem;">
{{$.i18n.Tr "repo.modelconvert.download"}}
</a>
{{else}}
@@ -233,7 +233,7 @@
</div>
<div class="ui dropdown selection search eight wide field" id="choice_version">
<input type="hidden" id="ModelVersion" name="ModelVersion" required>
<input type="hidden" id="modelVersion" name="modelVersion" required>
<div class="default text">{{$.i18n.Tr "repo.modelconvert.selectversion"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="model-version">
@@ -246,7 +246,7 @@
<label for="choice_file">{{$.i18n.Tr "repo.model.manage.modelfile"}}</label>
</div>
<div class="ui dropdown selection search eight wide field" id="choice_file">
<input type="hidden" id="ModelFile" name="ModelFile" required>
<input type="hidden" id="modelFile" name="modelFile" required>
<div class="default text">{{$.i18n.Tr "repo.modelconvert.selectmodelfile"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="model-file">
@@ -260,10 +260,10 @@
</div>
<div class="unite min_title inline fields required">
<div class="three wide field right aligned">
<label for="SrcEngine">{{$.i18n.Tr "repo.modelconvert.srcengine"}}</label>
<label for="srcEngine">{{$.i18n.Tr "repo.modelconvert.srcengine"}}</label>
</div>
<select id="SrcEngine" class="ui search dropdown eight wide field" placeholder="" style='color:#000000;' name="SrcEngine" onchange="javascript:srcEngineChanged()">
<select id="srcEngine" class="ui search dropdown eight wide field" placeholder="" style='color:#000000;' name="srcEngine" onchange="javascript:srcEngineChanged()">
</select>
</div>
@@ -289,30 +289,30 @@
<div class="unite min_title inline fields required">
<div class="three wide field right aligned">
<label for="DestFormat">{{$.i18n.Tr "repo.modelconvert.outputformat"}}</label>
<label for="destFormat">{{$.i18n.Tr "repo.modelconvert.outputformat"}}</label>
</div>
<select id="DestFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="DestFormat">
<select id="destFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="destFormat">
</select>
</div>
<div class="unite min_title inline fields">
<div class="three wide field right aligned">
<label for="NetOutputFormat">{{$.i18n.Tr "repo.modelconvert.netoutputdata"}}&nbsp;&nbsp;</label>
<label for="netOutputFormat">{{$.i18n.Tr "repo.modelconvert.netoutputdata"}}&nbsp;&nbsp;</label>
</div>
<select id="NetOutputFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="NetOutputFormat">
<select id="netOutputFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="netOutputFormat">

</select>
</div>
<div class="unite min_title inline fields">
<div class="three wide field right aligned">
<label for="Description">{{$.i18n.Tr "repo.modelconvert.taskdesc"}}&nbsp;&nbsp;</label>
<label for="description">{{$.i18n.Tr "repo.modelconvert.taskdesc"}}&nbsp;&nbsp;</label>
</div>
<div class="twelve wide field">
<textarea id="Description" name="Description" rows="1" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 256)"></textarea>
<textarea id="description" name="description" rows="1" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 256)"></textarea>
</div>
</div>
<div class="unite min_title inline field">
@@ -364,9 +364,9 @@
$("#task_name").removeClass("error")
}

data['desc']= $('#Description').val()
data['modelId'] = $('#ModelVersion').val()
data['SrcEngine'] = $('#SrcEngine').val();
data['desc']= $('#description').val()
data['modelId'] = $('#modelVersion').val()
data['srcEngine'] = $('#srcEngine').val();
data['inputshape']= $('#inputshape').val();

if(inputshapeNotValid(data['inputshape'])){
@@ -379,10 +379,10 @@
}

data['inputdataformat']= $('#inputdataformat').val();
data['DestFormat'] = $('#DestFormat').val();
data['NetOutputFormat']= $('#NetOutputFormat').val();
data['ModelFile'] = $('#ModelFile').val();
if(data['ModelFile']==""){
data['destFormat'] = $('#destFormat').val();
data['netOutputFormat']= $('#netOutputFormat').val();
data['modelFile'] = $('#modelFile').val();
if(data['modelFile']==""){
$('.ui.error.message').text("{{.i18n.Tr "repo.modelconvert.modelfileempty"}}")
$('.ui.error.message').css('display','block')
$("#ModelFile_Div").addClass("error")
@@ -392,11 +392,11 @@
}
$.post(`${repolink}/modelmanage/create_model_convert`,data,(result) => {
console.log("result=" + result);
if(result.result_code ==0){
if(result.code ==0){
$('.ui.modal.second').modal('hide');
window.location.reload();
}else{
$('.ui.error.message').text(result.message)
$('.ui.error.message').text(result.msg)
$('.ui.error.message').css('display','block')
}
})
@@ -456,7 +456,7 @@
$('#choice_version').dropdown({
onChange:function(value){
console.log("model version:" + value);
$('#choice_version input[name="ModelVersion"]').val(value)
$('#choice_version input[name="modelVersion"]').val(value)
loadModelFile(value);
}
})
@@ -464,26 +464,26 @@
$('#choice_file').dropdown({
onChange:function(value){
console.log("model file:" + value);
$('#choice_file input[name="ModelFile"]').val(value)
$('#choice_file input[name="modelFile"]').val(value)
}
})

})

function srcEngineChanged(){
var ele = window.document.getElementById("SrcEngine");
var ele = window.document.getElementById("srcEngine");
var index=ele.selectedIndex;
var options=ele.options;
var option = options[index];
console.log("SrcEngine value=" + option);
console.log("srcEngine value=" + option);
let destFormatHtml = "<option name=\"ONNX\" value=\"0\">ONNX</option>";
let netOutputFormatHtml = "<option name=\"FP32\" value=\"0\">FP32</option>";
if(option==null || option =="undefined" || option.value == 0){
destFormatHtml += "<option name=\"TensorRT\" value=\"1\">TensorRT</option>"
netOutputFormatHtml += "<option name=\"FP16\" value=\"1\">FP16</option>";
}
$('#DestFormat').html(destFormatHtml);
$('#NetOutputFormat').html(netOutputFormatHtml);
$('#destFormat').html(destFormatHtml);
$('#netOutputFormat').html(netOutputFormatHtml);
}
function loadModelList(){
@@ -509,7 +509,7 @@
if(modelId ==null || modelId ==""){
console.log("modelId is null");
}else{
$.get(`${repolink}/modelmanage/query_modelfile_for_predict?ID=${modelId}`, (data) => {
$.get(`${repolink}/modelmanage/query_modelfile_for_predict?id=${modelId}`, (data) => {
const n_length = data.length
let file_html=''
let firstFileName =''
@@ -526,7 +526,7 @@
}
$("#model-file").append(file_html)
$('#choice_file .default.text').text(firstFileName)
$('#choice_file input[name="ModelFile"]').val(firstFileName)
$('#choice_file input[name="modelFile"]').val(firstFileName)
})

}
@@ -550,19 +550,19 @@
n_length = versionList.length
let train_html=''
for (let i=0;i<n_length;i++){
train_html += `<div class="item" data-value="${versionList[i].ID}">${versionList[i].Version}</div>`
train_html += `<div class="item" data-value="${versionList[i].id}">${versionList[i].version}</div>`
train_html += '</div>'
}
$("#model-version").append(train_html)
$('#choice_version .default.text').text(versionList[0].Version)
$('#choice_version input[name="ModelVersion"]').val(versionList[0].ID)
loadModelFile(versionList[0].ID);
$('#choice_version .default.text').text(versionList[0].version)
$('#choice_version input[name="modelVersion"]').val(versionList[0].id)
loadModelFile(versionList[0].id);
}
setEngineValue(value);
}
function setEngineValue(value){
$('#SrcEngine').dropdown('clear');
$('#srcEngine').dropdown('clear');
console.log("setEngineValue value=" + value);
let html = ""
html +="<option name=\"PyTorch\" " + getSelected(0,value) + " value=\"0\">PyTorch</option>";
@@ -570,7 +570,8 @@
html +="<option name=\"MindSpore\" " + getSelected(2,value) + " value=\"2\">MindSpore</option>";
html +="<option name=\"PaddlePaddle\" " + getSelected(4,value) + " value=\"4\">PaddlePaddle</option>";
html +="<option name=\"MXNet\" " + getSelected(6,value) + " value=\"6\">MXNet</option>";
$('#SrcEngine').html(html);

$('#srcEngine').html(html);
srcEngineChanged();
}
function getSelected(engineOption, modelName){
@@ -580,13 +581,13 @@
let nameMap = modelData.nameMap
let versionList = nameMap[modelName]
if(versionList != null && versionList.length >0){
if(versionList[0].Engine == engineOption){
if(versionList[0].engine == engineOption){
return "selected=\"selected\"";
}else{
if((versionList[0].Engine==122 || versionList[0].Engine==37) && engineOption==2){
if((versionList[0].engine==122 || versionList[0].engine==37) && engineOption==2){
return "selected=\"selected\"";
}
if((versionList[0].Engine==121 || versionList[0].Engine==38) && engineOption==1){
if((versionList[0].engine==121 || versionList[0].engine==38) && engineOption==1){
return "selected=\"selected\"";
}
}


+ 10
- 0
templates/repo/modelmanage/create_local_1.tmpl View File

@@ -0,0 +1,10 @@
{{template "base/head" .}}
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-modelmanage-local-create-1.css?v={{MD5 AppVer}}" />
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<div class="ui container">
<div id="__vue-root"></div>
</div>
</div>
<script src="{{StaticUrlPrefix}}/js/vp-modelmanage-local-create-1.js?v={{MD5 AppVer}}"></script>
{{template "base/footer" .}}

+ 11
- 0
templates/repo/modelmanage/create_local_2.tmpl View File

@@ -0,0 +1,11 @@
{{template "base/head" .}}
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-modelmanage-local-create-2.css?v={{MD5 AppVer}}" />
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<script>var MAX_MODEL_SIZE = {{ .max_model_size }};</script>
<div class="ui container">
<div id="__vue-root"></div>
</div>
</div>
<script src="{{StaticUrlPrefix}}/js/vp-modelmanage-local-create-2.js?v={{MD5 AppVer}}"></script>
{{template "base/footer" .}}

+ 581
- 0
templates/repo/modelmanage/create_online.tmpl View File

@@ -0,0 +1,581 @@
{{template "base/head" .}}
<link rel="stylesheet" href="/self/ztree/css/zTreeStyle/zTreeStyle.css" type="text/css">
<style>
#newmodel .header {
height: 45px;
border: 1px solid #d4d4d5;
border-radius: 5px 5px 0 0;
font-size: 14px;
background: #f0f0f0;
display: flex;
align-items: center;
}
#newmodel .content {
margin-top: -1px;
border: 1px solid #d4d4d5;
border-top: none;
}
.inline.fields .right.aligned label{
width: 100% !important;
text-align: right;
}
.inline .ui.dropdown .text {
color: rgba(0, 0, 0, .87) !important;
max-width: 360px;
}
.newtext{
margin-left: 12px !important
}
.menuContent{
position: absolute;
background: #ffffff;
left: 0;
right: 26px;
top: 36px;
z-index:999;
border: 1px solid #96c8da;
border-top: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
box-shadow: 0 2px 3px 0 rgb(34 36 38 / 15%);
}
</style>
<div id="mask">
<div id="loadingPage">
<div class="rect1"></div>
<div class="rect2"></div>
<div class="rect3"></div>
<div class="rect4"></div>
<div class="rect5"></div>
</div>
</div>
{{$repository := .Repository.ID}}
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<div class="ui container">
<div id="newmodel">
<div class="ui second">
<div class="header" style="padding: 1rem;background-color: rgba(240, 240, 240, 100);">
<h4 id="model_header">{{.i18n.Tr "repo.model.manage.import_online_model"}}</h4>
</div>
<div class="content content-padding">
<form id="formId" class="ui form dirty">
<input class="ays-ignore" type="hidden" name="initModel" value="{{$.MODEL_COUNT}}">
<div class="ui error message"></div>
<input class="ays-ignore" type="hidden" name="_csrf" value="">
<div class="inline fields">
<div class="required two wide field right aligned">
<label for="jobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
</div>
<div class="required thirteen wide inline field">
<div class="ui dropdown selection search loading" id="choice_model">
<input class="ays-ignore" type="hidden" id="jobId" name="jobId" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-name">
</div>
</div>
<label for="versionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<span>&nbsp;</span>
<div class="ui dropdown selection search" id="choice_version">
<input class="ays-ignore" type="hidden" id="versionName" name="versionName" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.version"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-version">

</div>
</div>
</div>
</div>
<div class="required inline fields" id="modelname">
<div class="two wide field right aligned">
<label for="name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
</div>
<div class="eight wide field">
<input class="ays-ignore" id="name" name="name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
</div>
<div class="required inline fields" id="verionName" style="display:none;">
<div class="two wide field right aligned">
<label for="version">{{.i18n.Tr "repo.model.manage.version"}}</label>
</div>
<div class="eight wide field">
<input class="ays-ignore" id="version" name="version" value="" readonly required maxlength="255">
</div>
</div>
<div class="unite min_title inline fields required">
<div class="two wide field right aligned">
<label for="Engine">{{.i18n.Tr "repo.model.manage.engine"}}</label>
</div>
<div class="ui ten wide field dropdown selection" id="choice_Engine">
<input class="ays-ignore" type="hidden" id="engine" name="engine" required>
<div class="default text newtext">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">

</div>
</div>
</div>
<div class="unite min_title inline fields required">
<div class="two wide field right aligned">
<label for="modelSelectedFile">{{.i18n.Tr "repo.model.manage.modelfile"}}</label>
</div>
<div class="thirteen wide field" style="position:relative">
<input class="ays-ignore" id="modelSelectedFile" type="text" readonly required onclick="showMenu();" name="modelSelectedFile" >
<div id="menuContent" class="menuContent" style="display:none;">
<ul id="treeDemo" class="ztree"></ul>
</div>
</div>
</div>
<div class="inline fields">
<div class="two wide field right aligned">
<label for="Label">{{.i18n.Tr "repo.model.manage.modellabel"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<input class="ays-ignore" id="label" name="label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
</div>
<div class="inline fields">
<div class="two wide field right aligned">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<textarea id="description" class="ays-ignore" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
onkeyup="this.value=this.value.substring(0, 256)"></textarea>
</div>
</div>
</form>
<div class="inline field" style="margin-left:140px;margin-top:28px;">
<button id="submitId" type="button" class="ui create_train_job green button" onclick="submitSaveModel()"
style="">
{{.i18n.Tr "repo.model.manage.sava_model"}}
</button>
<button style="margin-left:0px;" class="ui button cancel" onclick="backToModelListPage()">{{.i18n.Tr "repo.cloudbrain.cancel"}}</button>
</div>
</div>
</div>
</div>
</div>
</div>
{{template "base/footer" .}}
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js"></script>
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.excheck.js"></script>
<script>
;(function() {
var setting = {
check: {
enable: true,
chkboxType: {"Y":"ps", "N":"ps"}
},
view: {
dblClickExpand: false
},
callback: {
beforeClick: beforeClick,
onCheck: onCheck
}
};

function beforeClick(treeId, treeNode) {
var zTree = $.fn.zTree.getZTreeObj("treeDemo");
zTree.checkNode(treeNode, !treeNode.checked, null, true);
return false;
}
function onCheck(e, treeId, treeNode) {
var zTree = $.fn.zTree.getZTreeObj("treeDemo"),
nodes = zTree.getCheckedNodes(true),
v = "";
for (var i=0, l=nodes.length; i<l; i++) {
if(nodes[i].isParent){
continue;
}
var pathNodes = nodes[i].getPath();
var path ="";
for(var j=0;j<pathNodes.length;j++){
if(j ==0){
path += pathNodes[j].name;
}else{
path += "/" + pathNodes[j].name;
}
}
v += path + ";";
}
if (v.length > 0 ) v = v.substring(0, v.length-1);
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", v);
}
function showMenu() {
var cityObj = $("#modelSelectedFile");
var cityOffset = $("#modelSelectedFile").offset();
// $("#menuContent").css({left:cityOffset.left + "px", top:cityOffset.top + cityObj.outerHeight() + "px"}).slideDown("fast");
$("#menuContent").slideDown("fast");

$("body").bind("mousedown", onBodyDown);
}
window.showMenu = showMenu;

function hideMenu() {
$("#menuContent").fadeOut("fast");
$("body").unbind("mousedown", onBodyDown);
}
function onBodyDown(event) {
if (!(event.target.id == "menuBtn" || event.target.id == "modelSelectedFile" || event.target.id == "menuContent" || $(event.target).parents("#menuContent").length>0)) {
hideMenu();
}
}

$(document).ready(function(){
//$.fn.zTree.init($("#treeDemo"), setting, zNodes);
});
let repolink = {{.RepoLink }}
let repoId = {{ $repository }}
const { _AppSubUrl, _StaticUrlPrefix, csrf } = window.config;
$('input[name="_csrf"]').val(csrf)
let modelData;
function createModelName() {
let repoName = location.pathname.split('/')[2]
let modelName = repoName + '_model_' + Math.random().toString(36).substr(2, 4)
$('#name').val(modelName)
$('#version').val("0.0.1")
}
let dirKey="isOnlyDir--:&";
/*
function showcreate(obj) {
$('.ui.modal.second')
.modal({
centered: false,
onShow: function () {
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
loadTrainList()
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('#choice_model').dropdown('clear')
$('#choice_version').dropdown('clear')
$('#choice_Engine').dropdown('clear')
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')

}
})
.modal('show')
}
*/
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
loadTrainList()

$(function () {
$('#choice_model').dropdown({
onChange: function (value) {
$("#choice_version").addClass("loading")
$('#choice_version').dropdown('clear')
$("#job-version").empty()
loadTrainVersion(value)
}
})

$('#choice_version').dropdown({
onChange: function (value) {
console.log("model version:" + value);
if (modelData != null) {
for (var i = 0; i < modelData.length; i++) {
if (modelData[i].VersionName == value) {
setEngine(modelData[i]);
loadModelFile(modelData[i]);
break;
}
}
}
}
})
});

function versionAdd(version) {
let versionArray = version.split('.')
if (versionArray[2] == '9') {
if (versionArray[1] == '9') {
versionArray[0] = String(Number(versionArray[1]) + 1)
versionArray[1] = '0'
} else {
versionArray[1] = String(Number(versionArray[1]) + 1)
}
versionArray[2] = '0'
} else {
versionArray[2] = String(Number(versionArray[2]) + 1)
}
return versionArray.join('.')
}

function loadTrainList() {
$.get(`${repolink}/modelmanage/query_train_job?repoId=${repoId}`, (data) => {

const n_length = data.length
if(n_length > 0){
let train_html = ''
for (let i = 0; i < n_length; i++) {
train_html += `<div class="item" data-value="${data[i].JobID}">${data[i].DisplayJobName}</div>`
train_html += '</div>'
}
$("#job-name").append(train_html)
$("#choice_model").removeClass("loading")
$('#choice_model .default.text').text(data[0].DisplayJobName)
$('#choice_model input[name="jobId"]').val(data[0].JobID)
loadTrainVersion()
}else{
$("#choice_model").removeClass("loading")
}
})
}

function loadTrainVersion(value) {
let tmp = $('#choice_model input[name="jobId"]').val();
let jobId = !value ? $('#choice_model input[name="jobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?jobId=${jobId}`, (data) => {
const n_length = data.length
let train_html = '';
modelData = data;
for (let i = 0; i < n_length; i++) {
var VersionName = data[i].VersionName || 'V0001';
train_html += `<div class="item" data-value="${VersionName}">${VersionName}</div>`
train_html += '</div>'
}
if (data.length) {
$("#job-version").append(train_html)
$("#choice_version").removeClass("loading")
var versionName = data[0].VersionName;
if (versionName == null || versionName == "") {
versionName = "V0001";
}
$('#choice_version .default.text').text(versionName)
$('#choice_version input[name="versionName"]').val(versionName)
setEngine(data[0])
loadModelFile(data[0])
}

})
}

function loadModelFile(trainJob){
console.log("trainJob=", trainJob);
$('#choice_file').dropdown('clear')
$("#model-file").empty()
if(trainJob ==null || trainJob ==""){
console.log("trainJob is null");
}else{
let type = trainJob.Type;
if(type == 2){
if(trainJob.ComputeResource=="NPU"){
type=1;
}else{
type=0;
}
}
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&versionName=${trainJob.VersionName}`, (data) => {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
const n_length = data.length
let file_html=''
let firstFileName =''
var zNodes=[];
var nodesMap={};
for (let i=0;i<n_length;i++){
var parentNodeMap = nodesMap;
var fileSplits = data[i].FileName.split("/");
for(let j=0;j < fileSplits.length;j++){
if(fileSplits[j] == ""){
break;
}
if(parentNodeMap[fileSplits[j]] == null){
parentNodeMap[fileSplits[j]] = {};
}
parentNodeMap = parentNodeMap[fileSplits[j]];
}
}
for (let i=0;i<n_length;i++){
var parentNodeMap = nodesMap;
var fileSplits = data[i].FileName.split("/");
for(let j=0;j < fileSplits.length;j++){
if(fileSplits[j] == ""){
if(data[i].FileName[data[i].FileName.length -1] =="/"){
if(Object.keys(parentNodeMap).length ==0){
parentNodeMap[dirKey]="true";
}
}
break;
}
parentNodeMap = parentNodeMap[fileSplits[j]];
}
}
convertToNode(zNodes,nodesMap);
$.fn.zTree.init($("#treeDemo"), setting, zNodes);
})
}
}

function convertToNode(nodeList,nodesMap){
var keyList = Object.keys(nodesMap);
keyList.sort(function(a,b){
return a-b;
});
var isFirst = true;
for(var i=0; i<keyList.length;i++){
var node = {};
node["name"] = keyList[i];
nodeList.push(node);
if(nodesMap[keyList[i]] != null && Object.keys(nodesMap[keyList[i]]).length >0){
if(nodesMap[keyList[i]][dirKey] != null){
node["open"] = false;
node["isParent"] = true;
}else{
node["children"]=[];
if(isFirst){
node["open"] = true;
isFirst= false;
}
convertToNode(node["children"],nodesMap[keyList[i]]);
}
}
}
}

function setEngine(trainJob) {
console.log("trainJob=", trainJob);
$('#choice_Engine').dropdown('clear')
$("#job-Engine").empty()
if (trainJob.EngineName != null && trainJob.EngineName != "") {
srcEngine = trainJob.EngineName.split('-')[0]
srcEngine = srcEngine.trim().toLowerCase();
let selectedText = "PyTorch";
let selectedValue = 0;
let itemHtml = "<option class=\"item\" data-value=\"0\">PyTorch</option>";
if (srcEngine == 'tensorflow') {
selectedText = "TensorFlow";
selectedValue = 1;
itemHtml += "<option class=\"active item\" data-value=\"1\">TensorFlow</option>";
} else {
itemHtml += "<option class=\"item\" data-value=\"1\">TensorFlow</option>";
}
if (srcEngine == 'mindspore') {
selectedText = "MindSpore";
selectedValue = 2;
itemHtml += "<option class=\"active item\" data-value=\"2\">MindSpore</option>";
} else {
itemHtml += "<option class=\"item\" data-value=\"2\">MindSpore</option>";
}
itemHtml += "<option class=\"item\" data-value=\"4\">PaddlePaddle</option>"
itemHtml += "<option class=\"item\" data-value=\"5\">OneFlow</option>"
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"

$('#choice_Engine .default.text').text(selectedText)
$('#choice_Engine input[name="engine"]').val(selectedValue)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
} else {
let itemHtml = "<option class=\"active item\" data-value=\"0\">PyTorch</option>";
itemHtml += "<option class=\"item\" data-value=\"1\">TensorFlow</option>"
itemHtml += "<option class=\"item\" data-value=\"2\">MindSpore</option>"
itemHtml += "<option class=\"item\" data-value=\"4\">PaddlePaddle</option>"
itemHtml += "<option class=\"item\" data-value=\"5\">OneFlow</option>"
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="engine"]').val(0)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
}
}

function check() {
let jobid = document.getElementById("jobId").value;
let versionname = document.getElementById("versionName").value;
let name = document.getElementById("name").value;
let version = document.getElementById("version").value;
let modelSelectedFile = document.getElementById("modelSelectedFile").value;
if (name == "") {
$("#modelname").closest('.required').addClass("error");
return false;
} else {
$("#modelname").closest('.required').removeClass("error");
}
if (versionname == "") {
$("#verionname").closest('.required').addClass("error");
return false;
} else {
$("#verionname").closest('.required').removeClass("error");
}
if (jobid == "") {
$("#jobId").closest('.required').addClass("error");
return false;
} else {
$("#jobId").closest('.required').removeClass("error");
}
if (modelSelectedFile == "") {
$("#modelSelectedFile").closest('.required').addClass("error");
return false;
} else {
$("#modelSelectedFile").closest('.required').removeClass("error");
}
if (versionname == "") {
$("#versionName").closest('.required').addClass("error");
return false;
} else {
$("#versionName").closest('.required').removeClass("error");
}
return true;
}

function submitSaveModel() {
let flag = check();
if (!flag) return false;
$(".ui.error.message").hide();
let cName = $("input[name='name']").val();
let version = $("input[name='version']").val();
let data = $("#formId").serialize();
const initModel = $("input[name='initModel']").val();
let url_href = location.href.split("create_online_model")[0] + 'create_new_model';
$("#mask").css({ display: "block", "z-index": "9999" });
$.ajax({
url: url_href,
type: "POST",
data: data,
success: function (res) {
backToModelListPage();
},
error: function (xhr) {
// 隐藏 loading
// 只有请求不正常(状态码不为200)才会执行
$(".ui.error.message").text(xhr.responseText);
$(".ui.error.message").show();
},
complete: function (xhr) {
$("#mask").css({ display: "none", "z-index": "1" });
},
});
}

function backToModelListPage() {
let url_href = location.href.split("create_online_model")[0] + 'show_model';
window.location.href = url_href;
}
window.submitSaveModel = submitSaveModel;
window.backToModelListPage = backToModelListPage;
})();
</script>

+ 53
- 28
templates/repo/modelmanage/index.tmpl View File

@@ -25,6 +25,23 @@
border-bottom-left-radius: 4px;
box-shadow: 0 2px 3px 0 rgb(34 36 38 / 15%);
}
.m-blue-btn {
background-color: rgb(22, 132, 252) !important;
}
.m-blue-btn:hover {
background-color: #66b1ff !important;
color: #fff;
}

.m-blue-btn:focus {
background-color: #66b1ff !important;
color: #fff;
}

.m-blue-btn:active {
background-color: #3a8ee6 !important;
color: #fff;
}
</style>
<link rel="stylesheet" href="/self/ztree/css/zTreeStyle/zTreeStyle.css" type="text/css">

@@ -57,8 +74,10 @@
</div>
<div class="column right aligned">
<!-- -->
<a class="ui button {{if .Permission.CanWrite $.UnitTypeModelManage}} blue m-blue-btn {{else}} disabled {{end}}"
href="{{.RepoLink}}/modelmanage/create_local_model_1">{{$.i18n.Tr "repo.model.manage.import_local_model"}}</a>
<a class="ui button {{if .Permission.CanWrite $.UnitTypeModelManage}} green {{else}} disabled {{end}}"
onclick="showcreate(this)">{{$.i18n.Tr "repo.model.manage.import_new_model"}}</a>
href="{{.RepoLink}}/modelmanage/create_online_model">{{$.i18n.Tr "repo.model.manage.import_online_model"}}</a>
</div>
</div>
{{if eq $.MODEL_COUNT 0}}
@@ -66,6 +85,7 @@
<div class="ui icon header bgtask-header-pic"></div>
<div class="bgtask-content-header">{{$.i18n.Tr "repo.model.manage.notcreatemodel"}}</div>
<div class="bgtask-content">
<!--
{{if $.RepoIsEmpty}}
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.model.manage.init1"}}<a href="{{.RepoLink}}">{{$.i18n.Tr "repo.model.manage.init2"}}</a></div>
{{end}}
@@ -73,6 +93,8 @@
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.model.manage.createtrainjob_tip"}}<a
href="{{.RepoLink}}/modelarts/train-job">&nbsp;{{$.i18n.Tr "repo.model.manage.createtrainjob"}}</a></div>
{{end}}
-->
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.model.manage.createmodel_tip"}}<a href="{{.RepoLink}}/modelarts/train-job">&nbsp;{{$.i18n.Tr "repo.model.manage.createtrainjob"}}</a></div>
<div class="bgtask-content-txt">{{$.i18n.Tr "repo.platform_instructions1"}}<a href="https://git.openi.org.cn/zeizei/OpenI_Learning">&nbsp;{{$.i18n.Tr "repo.platform_instructions2"}}&nbsp;</a>{{$.i18n.Tr "repo.platform_instructions3"}}</div>

</div>
@@ -138,20 +160,20 @@
<input type="hidden" name="_csrf" value="">
<div class="inline fields">
<div class="required two wide field right aligned">
<label for="JobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
<label for="jobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
</div>
<div class="required thirteen wide inline field">
<div class="ui dropdown selection search loading" id="choice_model">
<input type="hidden" id="JobId" name="JobId" required>
<input type="hidden" id="jobId" name="jobId" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-name">
</div>
</div>
<label for="VersionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<label for="versionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<span>&nbsp;</span>
<div class="ui dropdown selection search" id="choice_version">
<input type="hidden" id="VersionName" name="VersionName" required>
<input type="hidden" id="versionName" name="versionName" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.version"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-version">
@@ -162,18 +184,18 @@
</div>
<div class="required inline fields" id="modelname">
<div class="two wide field right aligned">
<label for="Name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<label for="name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
</div>
<div class="eight wide field">
<input id="name" name="Name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
<input id="name" name="name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
</div>
<div class="required inline fields" id="verionname">
<div class="required inline fields" id="verionName">
<div class="two wide field right aligned">
<label for="Version">{{.i18n.Tr "repo.model.manage.version"}}</label>
<label for="version">{{.i18n.Tr "repo.model.manage.version"}}</label>
</div>
<div class="eight wide field">
<input id="version" name="Version" value="" readonly required maxlength="255">
<input id="version" name="version" value="" readonly required maxlength="255">
</div>
</div>

@@ -182,7 +204,7 @@
<label for="Engine">{{.i18n.Tr "repo.model.manage.engine"}}</label>
</div>
<div class="ui ten wide field dropdown selection search" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text newtext">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -209,7 +231,7 @@
<label for="Label">{{.i18n.Tr "repo.model.manage.modellabel"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<input id="label" name="Label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
<input id="label" name="label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
</div>
<div class="inline fields">
@@ -217,7 +239,7 @@
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<textarea id="Description" name="Description" rows="3"
<textarea id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -331,7 +353,7 @@
centered: false,
onShow: function () {
$('#model_header').text({{.i18n.Tr "repo.model.manage.import_new_model"}})
$('input[name="Version"]').addClass('model_disabled')
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
@@ -368,7 +390,7 @@
console.log("model version:" + value);
if (modelData != null) {
for (var i = 0; i < modelData.length; i++) {
if (modelData[i].VersionName == value) {
if (modelData[i].versionName == value) {
setEngine(modelData[i])
loadModelFile(modelData[i])
break;
@@ -406,7 +428,7 @@
$("#job-name").append(train_html)
$("#choice_model").removeClass("loading")
$('#choice_model .default.text').text(data[0].DisplayJobName)
$('#choice_model input[name="JobId"]').val(data[0].JobID)
$('#choice_model input[name="jobId"]').val(data[0].JobID)
loadTrainVersion()
}else{
$("#choice_model").removeClass("loading")
@@ -414,13 +436,15 @@
})
}
function loadTrainVersion(value) {
let JobID = !value ? $('#choice_model input[name="JobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?JobID=${JobID}`, (data) => {
let tmp = $('#choice_model input[name="jobId"]').val();
let jobId = !value ? $('#choice_model input[name="jobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?jobId=${jobId}`, (data) => {
const n_length = data.length
let train_html = '';
modelData = data;
for (let i = 0; i < n_length; i++) {
train_html += `<div class="item" data-value="${data[i].VersionName}">${data[i].VersionName}</div>`
var VersionName = data[i].VersionName || 'V0001';
train_html += `<div class="item" data-value="${VersionName}">${VersionName}</div>`
train_html += '</div>'
}
if (data.length) {
@@ -431,7 +455,7 @@
versionName = "V0001";
}
$('#choice_version .default.text').text(versionName)
$('#choice_version input[name="VersionName"]').val(versionName)
$('#choice_version input[name="versionName"]').val(versionName)
setEngine(data[0])
loadModelFile(data[0])
}
@@ -453,7 +477,9 @@
type=0;
}
}
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&VersionName=${trainJob.VersionName}`, (data) => {
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&versionName=${trainJob.VersionName}`, (data) => {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
const n_length = data.length
let file_html=''
let firstFileName =''
@@ -518,12 +544,12 @@
}
}
}
function setEngine(modelVersion) {
console.log("modelVersion=" + modelVersion);
function setEngine(trainJob) {
console.log("trainJob=" + trainJob);
$('#choice_Engine').dropdown('clear')
$("#job-Engine").empty()
if (modelVersion.EngineName != null && modelVersion.EngineName != "") {
srcEngine = modelVersion.EngineName.split('-')[0]
if (trainJob.EngineName != null && trainJob.EngineName != "") {
srcEngine = trainJob.EngineName.split('-')[0]
srcEngine = srcEngine.trim().toLowerCase();
let selectedText = "PyTorch";
let selectedValue = 0;
@@ -548,7 +574,7 @@
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"

$('#choice_Engine .default.text').text(selectedText)
$('#choice_Engine input[name="Engine"]').val(selectedValue)
$('#choice_Engine input[name="engine"]').val(selectedValue)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
} else {
@@ -560,10 +586,9 @@
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
}
}
</script>

+ 5
- 530
templates/repo/modelmanage/showinfo.tmpl View File

@@ -1,535 +1,10 @@
{{template "base/head" .}}
<div class="repository">
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-modelmanage-common-detail.css?v={{MD5 AppVer}}" />
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<style>
.model_header_text{
font-size: 14px;
color: #101010;
font-weight: bold;
}
.ti_form{
text-align: left;
max-width: 100%;
vertical-align: middle;
}
.ti-text-form-label {
padding-bottom: 20px;
padding-right: 20px;
color: #8a8e99;
font-size: 14px;
white-space: nowrap !important;
width: 80px;
line-height: 30px;
}
.ti-text-form-content {
line-height: 30px;
padding-bottom: 20px;
width: 100%;
}
.change-version{
min-width: auto !important;
border: 1px solid rgba(187, 187, 187, 100) !important;
border-radius: .38571429rem !important;
margin-left: 1.5em;
}
.title-word-elipsis{
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
width: 30%;
}
.word-elipsis{
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
padding-right: 80px;
}
.half-table{
width: 50%;
float: left;
}
.text-width80 {
width: 100px;
line-height: 30px;
}
.tableStyle{
width:100%;
table-layout: fixed;
}
.iword-elipsis{
display: inline-block;
width: 80%;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
</style>
<div class="ui container">
<h4 class="ui header" id="vertical-segment">
<!-- <a href="javascript:window.history.back();"><i class="arrow left icon"></i>返回</a> -->
<div class="ui breadcrumb">
<a class="section" href="{{$.RepoLink}}/modelmanage/show_model">
{{$.i18n.Tr "repo.model.manage.model_manage"}}
</a>
<div class="divider"> / </div>
<div class="active section">{{.name}}</div>
</div>
<select class="ui dropdown tiny change-version" id="dropdown" onchange="changeInfo(this.value)">
</select>
</h4>
<div id="showInfo" style="border:1px solid #e2e2e2;padding: 20px 60px;margin-top:24px">
<div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);">
<a class="active item" data-tab="first">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item" data-tab="second">{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first">
<div class="half-table">
<span class="model_header_text">{{$.i18n.Tr "repo.model.manage.baseinfo"}}</span>
<table class="tableStyle" style="margin-top:20px;">
<tbody>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.model_name"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="ModelName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.version"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Version" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.migrate_items_labels"}}</td>
<td class="ti-text-form-content">
<div id="Label" style="overflow: hidden;width: 95%;">
</div>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.model_size"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Size" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.createtime"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="CreateTime" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.description"}}</td>
<td class="ti-text-form-content" >
<div id="edit-td" style="display:flex">
<span id="Description" title="" class="iword-elipsis"></span>
<i id="edit-pencil" data-id="" data-desc="" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></i>
</div>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job"}}</td>
<td class="ti-text-form-content word-elipsis">
<a id="DisplayJobNameHref" class="title" style="font-size: 14px;" target="_blank">
<span id="DisplayJobName" class="fitted" style="width: 90%;vertical-align: middle;"></span>
</a>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.code_version"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="CodeBranch" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.start_file"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="BootFile" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.train_dataset"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="DatasetName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Parameters" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.AI_Engine"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="EngineName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.standard"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="FlavorName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.compute_node"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="WorkServerNumber" title=""></span></td>
</tr>
</tbody>
</table>
</div>
<div class="half-table">
<span class="model_header_text">{{$.i18n.Tr "repo.model.manage.model_accuracy"}}</span>
<table class="tableStyle" style="margin-top:20px;">
<tbody>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.Accuracy"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Accuracy" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">F1</td>
<td class="ti-text-form-content word-elipsis"><span id="F1" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.Precision"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Precision" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.Recall"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Recall" title=""></span></td>
</tr>
</tbody>
</table>
</div>
<div style="clear: both;"></div>
</div>
<div class="ui tab" data-tab="second">
<input type="hidden" name="model" value="-1">
<input type="hidden" name="modelback" value="-1">
<div class='ui breadcrumb model_file_bread' id='file_breadcrumb'>
<div class="active section"></div>
<div class="divider"> / </div>
</div>
<div id="dir_list">
</div>
</div>
</div>
</div>
<div id="__vue-root"></div>
</div>
</div>
<script src="{{StaticUrlPrefix}}/js/vp-modelmanage-common-detail.js?v={{MD5 AppVer}}"></script>
{{template "base/footer" .}}
<script>
let url = location.href.split('show_model')[0]
let trainJobUrl =url.split('modelmanage')[0]
let ID = location.search.split('?name=').pop()
$(document).ready(function(){
$('.secondary.menu .item').tab();
});
$(document).ready(loadInfo);
function changeInfo(version){
$.get(`${url}show_model_info_api?name=${ID}`,(data)=>{
let versionData = data.filter((item)=>{
return item.Version === version
})
let returnArray = []
returnArray = transObj(versionData)
let [initObj,initModelAcc,id] = returnArray
editorCancel('','')
renderInfo(initObj,initModelAcc,id)
loadModelFile(versionData[0].ID,versionData[0].Version,'','','init')
})
}
function loadInfo(){
$.get(`${url}show_model_info_api?name=${ID}`,(data)=>{
let html = ''
for (let i=0;i<data.length;i++){
if(!data[i].IsCanOper){
$("#edit-pencil").css("display","none")
}
html += `<option value="${data[i].Version}">${data[i].Version}</option>`
}
$('#dropdown').append(html)
let returnArray = []
returnArray = transObj(data)
let [initObj,initModelAcc,id] = returnArray
renderInfo(initObj,initModelAcc,id)
loadModelFile(data[0].ID,data[0].Version,'','','init')
})
}
function getEngineName(model){
if(model.Engine == 0){
return "PyTorch";
}else if(model.Engine == 1 || model.Engine == 121 || model.Engine == 38){
return "TensorFlow";
}else if(model.Engine == 2 || model.Engine == 122 || model.Engine == 35 || model.Engine == 37){
return "MindSpore";
}else if(model.Engine == 3){
return "Other";
}else if(model.Engine == 4){
return "PaddlePaddle";
}else if(model.Engine == 5){
return "OneFlow";
}else if(model.Engine == 6){
return "MXNet";
}
else{
return "Other"
}
}
function transObj(data){
let {ID,Name,Version,Label,Size,Description,CreatedUnix,Accuracy,CodeBranch,CodeCommitID,TrainTaskInfo} = data[0]
let modelAcc = JSON.parse(Accuracy)
TrainTaskInfo = JSON.parse(TrainTaskInfo)
// Parameters = JSON.parse(Parameters)
let {Parameters} = TrainTaskInfo
let EngineName = getEngineName(data[0])
Parameters = JSON.parse(Parameters)
Parameters = Parameters.parameter.length === 0 ? '--':Parameters.parameter
let size = tranSize(Size)
let time = transTime(CreatedUnix)
let initObj = {
ModelName:Name || '--',
Version:Version,
Label:Label || '--',
Size:size,
CreateTime:time,
Description:Description || '--',
CodeBranch:CodeBranch || '--',
CodeCommitID:CodeCommitID || '--',
BootFile:TrainTaskInfo.BootFile || '--',
DatasetName:TrainTaskInfo.DatasetName || '--',
Parameters:TrainTaskInfo.Parameters || '--',
FlavorName:TrainTaskInfo.FlavorName || '--',
WorkServerNumber:TrainTaskInfo.WorkServerNumber || '1',
Parameters:Parameters,
EngineName:EngineName,
DisplayJobName:TrainTaskInfo.DisplayJobName || '--',
TrainJobVersionName:TrainTaskInfo.VersionName || '',
CloudBrainJobID:TrainTaskInfo.JobID|| '',
CloudBrainType:TrainTaskInfo.Type,
}
let initModelAcc = {
Accuracy: modelAcc.Accuracy || '--',
F1: modelAcc.F1 || '--',
Precision:modelAcc.Precision || '--',
Recall: modelAcc.Recall || '--'
}
return [initObj,initModelAcc,ID]
}
function transTime(time){
let date = new Date(time * 1000);//时间戳为10位需*1000,时间戳为13位的话不需乘1000
let Y = date.getFullYear() + '-';
let M = (date.getMonth()+1 < 10 ? '0'+(date.getMonth()+1):date.getMonth()+1) + '-';
let D = (date.getDate()< 10 ? '0'+date.getDate():date.getDate())+ ' ';
let h = (date.getHours() < 10 ? '0'+date.getHours():date.getHours())+ ':';
let m = (date.getMinutes() < 10 ? '0'+date.getMinutes():date.getMinutes()) + ':';
let s = date.getSeconds() < 10 ? '0'+date.getSeconds():date.getSeconds();
return Y+M+D+h+m+s;
}
function tranSize(value){
if(null==value||value==''){
return "0 Bytes";
}
var unitArr = new Array("Bytes","KB","MB","GB","TB","PB","EB","ZB","YB");
var index=0;
var srcsize = parseFloat(value);
index=Math.floor(Math.log(srcsize)/Math.log(1024));
var size =srcsize/Math.pow(1024,index);
size=size.toFixed(2);//保留的小数位数
return size+unitArr[index];
}
function editorFn(context){
let id= context.dataset.id
let text = context.dataset.desc
let textValue = text.replace(/enter;/g,'\r\n')
$('#edit-td').replaceWith(`<div id='edit-div' style='width:80%;display: inline-block;'><textarea id='textarea-value' value='' rows='3' maxlength='255' style='width:80%;white-space: nowrap;' id='edit-text'>${textValue}</textarea><i class='check icon' style='color: #50d4ab;' onclick='editorSure("${text}","${id}")'></i><i class='times icon' style='color: #f66f6a;' onclick='editorCancel("${text}","${id}")'></i></div>`);
}
function editorCancel(text,id){
let objkey = text.replace(/enter;/g,'\r\n')
$('#edit-div').replaceWith(`<div id="edit-td" style="display:flex;"><span id="Description" title="${objkey}" class="iword-elipsis">${objkey}</span><i id="edit-pencil" data-id="${id}" data-desc="${text}" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></div>`)
}
function editorSure(text,id){
let description=$('#textarea-value').val()
let sourcetext = $('#textarea-value').val().replace(/\n/g,'enter;')
let data = {
ID:id,
Description:description
}
$.ajax({
url:`${url}modify_model`,
type:'PUT',
data:data
}).done((res)=>{
$('#edit-div').replaceWith(`<div id="edit-td" style="display:flex;"><span id="Description" title="${description}" class="iword-elipsis">${description}</span><i id="edit-pencil" data-id="${id}" data-desc="${sourcetext}" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></div>`)
})
}
function renderInfo(obj,accObj,id){
for(let key in obj){
if(key==="Description"){
let descriptionText=obj[key].replace(/\r\n|\n/g,'enter;')
$(`#${key}`).text(obj[key])
$(`#${key}`).attr("title",obj[key])
$('#edit-pencil').attr("data-id",id)
$('#edit-pencil').attr("data-desc",descriptionText)
}
else if(key==="Label"){
$('#Label').empty()
if(obj[key]==='--'){
$('#Label').text(obj[key])
}else{
let labelArray = obj[key].trim().replace(/ +/g,' ').split(' ')
let html=''
for(let i=0;i<labelArray.length;i++){
html += `<a class="ui label" title="${labelArray[i]}">${labelArray[i]}</a>`
}
$('#Label').append(html)
}
}
else if(key==="CodeCommitID"){
let codeCommit = obj[key].slice(0,10)
let html = `<a style="margin-left:1rem" class="ui label" title="${codeCommit}">${codeCommit}</a>`
$('#CodeBranch').append(html)

}
else if(key==="DisplayJobName"){
let type=obj["CloudBrainType"]
let href=""
if(type==1){
href=trainJobUrl + "modelarts/train-job/" + obj["CloudBrainJobID"]
}else if(type==0){
href=trainJobUrl + "cloudbrain/train-job/" + obj["CloudBrainJobID"]
}else if(type==2){
href=trainJobUrl + "grampus/train-job/" + obj["CloudBrainJobID"]
}
$(`#DisplayJobNameHref`).attr("href",href)
$(`#DisplayJobNameHref`).attr("title",obj[key])
$(`#${key}`).text(obj[key])

let versionName = obj["TrainJobVersionName"]
if(versionName!=""){
let html = `<span style="margin-left:1rem" class="ui label">${versionName}</span>`
$('#DisplayJobName').append(html)
}
}
else if(key==="Parameters"){
if(obj[key]==='--'){
$(`#${key}`).text(obj[key])
}else{
const parameterArray = obj[key].map(element => {
let labelValue = `${element.label}=${element.value}`
return labelValue
});
const parameter = parameterArray.join('; ')
$(`#${key}`).text(parameter)
$(`#${key}`).attr("title",parameter)
}
}
else{
$(`#${key}`).text(obj[key])
$(`#${key}`).attr("title",obj[key])
}
}
for(let key in accObj){
$(`#${key}`).text(accObj[key])
$(`#${key}`).attr("title",accObj[key])
}
}

function loadModelFile(ID,version_name,parents,filename,init){
$.get(`${url}query_onelevel_modelfile?ID=${ID}&parentDir=${parents}`, (data) => {
$('#dir_list').empty()
renderDir(data,ID,version_name)
if(init==="init"){
$('input[name=model]').val("")
$('input[name=modelback]').val(version_name)
$('#file_breadcrumb').empty()
let htmlBread = ""
htmlBread += `<div class='active section'>${version_name}</div>`
htmlBread += "<div class='divider'> / </div>"
$('#file_breadcrumb').append(htmlBread)
}else{
renderBrend(ID,version_name,parents,filename,init)
}
})
}
function renderSize(value){
if(null==value||value==''){
return "0 Bytes";
}
var unitArr = new Array("Bytes","KB","MB","GB","TB","PB","EB","ZB","YB");
var index=0;
var srcsize = parseFloat(value);
index=Math.floor(Math.log(srcsize)/Math.log(1024));
var size =srcsize/Math.pow(1024,index);
size=size.toFixed(2);//保留的小数位数
return size+unitArr[index];
}

function renderBrend(ID,version_name,parents,filename,init){
if(init=="folder"){
let htmlBrend = ""
let sectionName=$('#file_breadcrumb .active.section').text()
let parents1 = $('input[name=model]').val()
let filename1 = $('input[name=modelback]').val()
if(parents1===""){
$('#file_breadcrumb .active.section').replaceWith(`<a class='section' onclick="loadModelFile('${ID}','${version_name}','${parents1}','','init')">${sectionName}</a>`)
}else{
$('#file_breadcrumb .active.section').replaceWith(`<a class='section' onclick="loadModelFile('${ID}','${version_name}','${parents1}','${filename1}')">${sectionName}</a>`)
}
htmlBrend += `<div class='active section'>${filename}</div>`
htmlBrend += "<div class='divider'> / </div>"
$('#file_breadcrumb').append(htmlBrend)
$('input[name=model]').val(parents)
$('input[name=modelback]').val(filename)
}else{
$('input[name=model]').val(parents)
$('input[name=modelback]').val(filename)
let selectEle = $('#file_breadcrumb a.section').filter(
(index, item) => {
return item.text == filename;
}
);
selectEle.nextAll().remove();
selectEle.after("<div class='divider'> / </div>");
selectEle.replaceWith(`<div class='active section'>${filename}</div>`);
}
}
function renderDir(data,ID,version_name){
let html=""
html += "<div class='ui grid' style='margin:0;'>"
html += "<div class='row' style='padding: 0;'>"
html += "<div class='ui sixteen wide column' style='padding:1rem;'>"
html += "<div class='dir list'>"
html += "<table id='repo-files-table' class='ui single line table pad20'>"
html += '<tbody>'
for(let i=0;i<data.length;i++){
let dirs_size = renderSize(data[i].Size)
html += "<tr>"
html += "<td class='name six wid'>"
html += "<span class='truncate'>"
html += "<span class='octicon octicon-file-directory'>"
html += "</span>"
if(data[i].IsDir){
html += `<a onclick="loadModelFile('${ID}','${version_name}','${data[i].ParenDir}','${data[i].FileName}','folder')">`
html += "<span class='fitted'><i class='folder icon' width='16' height='16' aria-hidden='true'></i>" + data[i].FileName + "</span>"
}else{
html += `<a href="${url}${ID}/downloadsingle?parentDir=${data[i].ParenDir}&fileName=${data[i].FileName}">`
html += "<span class='fitted'><i class='file icon' width='16' height='16' aria-hidden='true'></i>" + data[i].FileName + "</span>"
}
html += '</a>'
html += "</span>"
html += "</td>"
html += "<td class='message seven wide'>"
if(data[i].IsDir){
html += "<span class='truncate has-emoji'></span>"
}else{
html += "<span class='truncate has-emoji'>"+ `${dirs_size}` + "</span>"
}
html += "</td>"

html += "<td class='text right age three wide'>"
html += "<span class='truncate has-emoji'>" + data[i].ModTime + "</span>"
html += "</td>"
html += "</tr>"
}
html += "</tbody>"
html += "</table>"
html += "</div>"
html += "</div>"
html += "</div>"
html += "</div>"
$('#dir_list').append(html)
}
</script>

+ 3
- 1
templates/repo/modelsafety/show.tmpl View File

@@ -861,8 +861,10 @@
$('td.ti-text-form-content.spec div').text(specStr);
SPEC && $('td.ti-text-form-content.resorce_type div').text(getListValueWithKey(ACC_CARD_TYPE, SPEC.AccCardType));
}
var oLogHref = $('#-log-down').attr('href');
var repoPath = {{$.RepoRelPath}};
var oLogHref = `/api/v1/repos/${repoPath}/cloudbrain`;
$('#-log-down').attr('href', oLogHref + `/${res.ID}/download_log_file`);
$('.full-log-dialog').attr('data-href', oLogHref + `/${res.ID}/download_log_file`);
if (res.ResultJson) {
try {
resultData = JSON.parse(res.ResultJson);


+ 2
- 1
templates/repo/view_file.tmpl View File

@@ -1,4 +1,5 @@
<div class="{{TabSizeClass .Editorconfig .FileName}} non-diff-file-content">

<div class="{{TabSizeClass .Editorconfig .FileName}} non-diff-file-content gallery">
<h4 class="file-header ui top attached header">
<div class="file-header-left">
{{if .ReadmeInList}}


+ 1
- 1
templates/user/dashboard/navbar.tmpl View File

@@ -12,7 +12,7 @@
{{.i18n.Tr "home.switch_dashboard_context"}}
</div>
<div class="scrolling menu items">
<a class="{{if eq .ContextUser.ID .SignedUser.ID}}active selected{{end}} item" href="{{AppSubUrl}}/{{if .PageIsIssues}}issues{{else if .PageIsPulls}}pulls{{else if .PageIsMilestonesDashboard}}milestones{{end}}">
<a class="{{if eq .ContextUser.ID .SignedUser.ID}}active selected{{end}} item" href="{{AppSubUrl}}/{{if .PageIsIssues}}issues{{else if .PageIsPulls}}pulls{{else if .PageIsMilestonesDashboard}}milestones{{else}}dashboard{{end}}">
<img class="ui avatar image" src="{{.SignedUser.RelAvatarLink}}" width="28" height="28">
{{.SignedUser.Name}}
</a>


+ 124
- 91
web_src/js/components/Model.vue View File

@@ -13,117 +13,118 @@
:header-cell-style="tableHeaderStyle"
>
<el-table-column
prop="Name"
prop="name"
:label="i18n.model_name"
align="left"
min-width="17%"
min-width="20%"
>
<template slot-scope="scope">
<div class="expand-icon" v-if="scope.row.hasChildren === false">
<i class="el-icon-arrow-right"></i>
</div>
<!-- <i class="el-icon-time"></i> -->
<span v-if="!scope.row.Children" :class="scope.row.modelType == '1' ? 'm-local' : 'm-online'">{{ scope.row.modelType == '1' ? i18n.local : i18n.online }}</span>
<a
class="text-over"
:href="showinfoHref + scope.row.Name"
:title="scope.row.Name"
>{{ scope.row.Name }}</a
:href="showinfoHref + encodeURIComponent(scope.row.name)"
:title="scope.row.name"
>{{ scope.row.name }}</a
>
</template>
</el-table-column>
<el-table-column
prop="Status"
prop="status"
:label="i18n.model_status"
align="center"
min-width="6.5%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.Status_title">
<i style="vertical-align: middle" :class="scope.row.Status"></i
<span class="text-over" :title="scope.row.status_title">
<i style="vertical-align: middle" :class="scope.row.status"></i
></span>
</template>
</el-table-column>
<el-table-column
prop="Version"
prop="version"
:label="i18n.model_version"
align="center"
min-width="6%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.Version">{{
scope.row.Version
<span class="text-over" :title="scope.row.version">{{
scope.row.version
}}</span>
</template>
</el-table-column>
<el-table-column
prop="VersionCount"
prop="versionCount"
:label="i18n.model_version_num"
align="center"
min-width="7%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.VersionCount">{{
scope.row.VersionCount
<span class="text-over" :title="scope.row.versionCount">{{
scope.row.versionCount
}}</span>
</template>
</el-table-column>

<el-table-column
prop="Size"
prop="size"
:label="i18n.model_size"
align="center"
min-width="10%"
>
<template slot-scope="scope">
<span class="text-over">{{ renderSize(scope.row.Size) }}</span>
<span class="text-over">{{ renderSize(scope.row.size) }}</span>
</template>
</el-table-column>
<el-table-column
prop="EngineName"
prop="engineName"
:label="i18n.model_egine"
align="center"
min-width="8%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.EngineName">{{
scope.row.EngineName
<span class="text-over" :title="scope.row.engineName">{{
scope.row.engineName
}}</span>
</template>
</el-table-column>
<el-table-column
prop="ComputeResource"
prop="computeResource"
:label="i18n.model_compute_resource"
align="center"
min-width="8%"
>
<template slot-scope="scope">
<span class="text-over">{{ scope.row.ComputeResource }}</span>
<span class="text-over">{{ scope.row.computeResource }}</span>
</template>
</el-table-column>
<el-table-column
prop="CreatedUnix"
prop="createdUnix"
:label="i18n.model_create_time"
align="center"
min-width="13.75%"
>
<template slot-scope="scope">
{{ transTime(scope.row.CreatedUnix) }}
{{ transTime(scope.row.createdUnix) }}
</template>
</el-table-column>
<el-table-column
prop="UserName"
prop="userName"
:label="i18n.model_creator"
align="center"
min-width="6.75%"
>
<template slot-scope="scope">
<a
:href="!scope.row.UserName ? '#' : '/' + scope.row.UserName"
:title="scope.row.UserName || defaultAvatarName"
:href="!scope.row.userName ? '#' : '/' + scope.row.userName"
:title="scope.row.userName || defaultAvatarName"
>
<img
class="ui avatar image"
:src="scope.row.UserRelAvatarLink || defaultAvatar"
:src="scope.row.userRelAvatarLink || defaultAvatar"
/>
</a>
</template>
@@ -131,37 +132,41 @@

<el-table-column
:label="i18n.model_operation"
min-width="17%"
min-width="15%"
align="center"
>
<template slot-scope="scope">
<div class="space-around">
<a
<div class="space-around" >
<!--<a
:style="{
visibility: !scope.row.Children ? 'visible' : 'hidden',
}"
:class="{ disabled: !scope.row.IsCanOper }"
:class="{ disabled: !scope.row.isCanOper }"
@click="
showcreateVue(
scope.row.Name,
scope.row.Version,
scope.row.Label
scope.row.name,
scope.row.version,
scope.row.label
)
"
>{{ i18n.model_create_new_ver }}</a
>
<a
:href="loadhref + scope.row.ID"
:class="{ disabled: !scope.row.IsCanOper }"
>{{ i18n.model_download }}</a
>
<a
:class="{ disabled: !scope.row.IsCanDelete }"
>-->
<a class="op-btn"
v-show="scope.row.modelType == 1"
:href="url + 'create_local_model_1?type=1&name=' + encodeURIComponent(scope.row.name) + '&id=' + scope.row.id"
:class="{ disabled: !scope.row.isCanOper }"
>{{ i18n.modify }}</a>
<a class="op-btn" v-show="scope.row.modelType != 1" style="color:transparent;cursor:default;" >{{ i18n.modify }}</a>
<a class="op-btn"
:href="loadhref + scope.row.id"
:class="{ disabled: !scope.row.isCanOper }"
>{{ i18n.model_download }}</a>
<a class="op-btn"
:class="{ disabled: !scope.row.isCanDelete }"
@click="
deleteModel(scope.row.ID, scope.row.cName, scope.row.rowKey)
deleteModel(scope.row.id, scope.row.cName, scope.row.rowKey)
"
>{{ i18n.model_delete }}</a
>
>{{ i18n.model_delete }}</a>
</div>
</template>
</el-table-column>
@@ -219,17 +224,18 @@ export default {
},
})
.then((res) => {
let TrainTaskInfo;
let trainTaskInfo;
let tableData;
tableData = res.data;
for (let i = 0; i < tableData.length; i++) {
TrainTaskInfo = JSON.parse(tableData[i].TrainTaskInfo);
tableData[i].EngineName = this.getEngineName(tableData[i]);
tableData[i].ComputeResource = TrainTaskInfo.ComputeResource;
tableData[i].cName = tableData[i].Name;
tableData[i].rowKey = tableData[i].ID + Math.random();
tableData[i].Name = "";
tableData[i].VersionCount = "";
trainTaskInfo = JSON.parse(tableData[i].trainTaskInfo || '{}');
tableData[i].engineName = this.getEngineName(tableData[i]);
// tableData[i].computeResource = trainTaskInfo.ComputeResource;
tableData[i].computeResource = tableData[i].type == '0' ? 'CPU/GPU' : 'NPU';
tableData[i].cName = tableData[i].name;
tableData[i].rowKey = tableData[i].id + Math.random();
tableData[i].name = "";
tableData[i].versionCount = "";
tableData[i].Children = true;
}
resolve(tableData || []);
@@ -258,10 +264,10 @@ export default {
centered: false,
onShow: function () {
$("#model_header").text(title);
$('input[name="Name"]').addClass("model_disabled");
$('input[name="Name"]').attr("readonly", "readonly");
$('input[name="name"]').addClass("model_disabled");
$('input[name="name"]').attr("readonly", "readonly");
$('input[name="modelSelectedFile"]').attr("readonly", "readonly");
$('input[name="Version"]').addClass("model_disabled");
$('input[name="version"]').addClass("model_disabled");
$(".ui.dimmer").css({
"background-color": "rgb(136, 136, 136,0.7)",
});
@@ -274,8 +280,8 @@ export default {
},
onHide: function () {
document.getElementById("formId").reset();
$('input[name="Name"]').removeClass("model_disabled");
$('input[name="Name"]').removeAttr("readonly");
$('input[name="name"]').removeClass("model_disabled");
$('input[name="name"]').removeAttr("readonly");
$('input[name="modelSelectedFile"]').removeAttr("readonly");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
@@ -290,8 +296,8 @@ export default {
.modal("show");
},
check() {
let jobid = document.getElementById("JobId").value;
let versionname = document.getElementById("VersionName").value;
let jobid = document.getElementById("jobId").value;
let versionname = document.getElementById("versionName").value;
let name = document.getElementById("name").value;
let version = document.getElementById("version").value;
let modelSelectedFile =
@@ -333,8 +339,8 @@ export default {
let context = this;
let flag = this.check();
if (flag) {
let cName = $("input[name='Name']").val();
let version = $("input[name='Version']").val();
let cName = $("input[name='name']").val();
let version = $("input[name='version']").val();
let data = $("#formId").serialize();
const initModel = $("input[name='initModel']").val();
let url_href =
@@ -387,7 +393,7 @@ export default {
let childrenIndex = store.states.lazyTreeNodeMap[
parentRow.rowKey
].findIndex((child) => child.rowKey == row.rowKey);
parentRow.VersionCount = parentRow.VersionCount - 1;
parentRow.versionCount = parentRow.versionCount - 1;
const parent = store.states.lazyTreeNodeMap[parentRow.rowKey];
if (parent.length === 1) {
this.getModelList();
@@ -398,7 +404,7 @@ export default {
}
},
deleteModel(id, name, rowKey) {
let row = { cName: name, ID: id, rowKey: rowKey };
let row = { cName: name, id: id, rowKey: rowKey };
let _this = this;
let flag = 1;
$(".ui.basic.modal.first")
@@ -410,7 +416,7 @@ export default {
_this.$axios
.delete(_this.url + "delete_model", {
params: {
ID: id,
id: id,
},
})
.then((res) => {
@@ -442,21 +448,21 @@ export default {
.modal("show");
},
getEngineName(model) {
if (model.Engine == 0) {
if (model.engine == 0) {
return "PyTorch";
} else if (model.Engine == 1 || model.Engine == 121) {
} else if (model.engine == 1 || model.engine == 121) {
return "TensorFlow";
} else if (
model.Engine == 2 ||
model.Engine == 122 ||
model.Engine == 35
model.engine == 2 ||
model.engine == 122 ||
model.engine == 35
) {
return "MindSpore";
} else if (model.Engine == 4) {
} else if (model.engine == 4) {
return "PaddlePaddle";
} else if (model.Engine == 5) {
} else if (model.engine == 5) {
return "OneFlow";
} else if (model.Engine == 6) {
} else if (model.engine == 6) {
return "MXNet";
} else {
return "Other";
@@ -474,40 +480,40 @@ export default {
try {
this.loadNodeMap.clear();
this.$axios
.get(location.href + "_api", {
.get(this.url + "show_model_api", {
params: this.params,
})
.then((res) => {
$(".ui.grid").removeAttr("style");
$("#loadContainer").removeClass("loader");
let TrainTaskInfo;
let trainTaskInfo;
this.tableData = res.data.data;
for (let i = 0; i < this.tableData.length; i++) {
TrainTaskInfo = JSON.parse(this.tableData[i].TrainTaskInfo);
this.tableData[i].cName = this.tableData[i].Name;
this.tableData[i].rowKey = this.tableData[i].ID + Math.random();
this.tableData[i].EngineName = this.getEngineName(
trainTaskInfo = JSON.parse(this.tableData[i].trainTaskInfo || '{}');
this.tableData[i].cName = this.tableData[i].name;
this.tableData[i].rowKey = this.tableData[i].id + Math.random();
this.tableData[i].engineName = this.getEngineName(
this.tableData[i]
);
this.tableData[i].ComputeResource = TrainTaskInfo.ComputeResource;
this.tableData[i].hasChildren =
res.data.data[i].VersionCount === 1 ? false : true;
if (this.tableData[i].Status !== 1) {
// this.tableData[i].computeResource = trainTaskInfo.ComputeResource;
this.tableData[i].computeResource = this.tableData[i].type == '0' ? 'CPU/GPU' : 'NPU';
this.tableData[i].hasChildren = res.data.data[i].versionCount === 1 ? false : true;
if (this.tableData[i].status !== 1) {
countStatus++;
}

switch (this.tableData[i].Status) {
switch (this.tableData[i].status) {
case 1:
this.tableData[i].Status = "WAITING";
this.tableData[i].Status_title = this.i18n.model_wait;
this.tableData[i].status = "WAITING";
this.tableData[i].status_title = this.i18n.model_wait;
break;
case 2:
this.tableData[i].Status = "FAILED";
this.tableData[i].Status_title = this.tableData[i].StatusDesc;
this.tableData[i].status = "FAILED";
this.tableData[i].status_title = this.tableData[i].statusDesc;
break;
default:
this.tableData[i].Status = "SUCCEEDED";
this.tableData[i].Status_title = this.i18n.model_success;
this.tableData[i].status = "SUCCEEDED";
this.tableData[i].status_title = this.i18n.model_success;
break;
}
}
@@ -531,7 +537,7 @@ export default {
},
computed: {
loadhref() {
return this.url + "downloadall?ID=";
return this.url + "downloadall?id=";
},
showinfoHref() {
return this.url + "show_model_info?name=";
@@ -615,6 +621,24 @@ export default {
white-space: nowrap;
}

.m-local {
background-color: rgb(22, 132, 252);
color: white;
padding: 2px 3px;
border-radius: 4px;
font-size: 12px;
margin-right: 2px;
}

.m-online {
background-color: rgb(91, 185, 115);
color: white;
padding: 2px 3px;
border-radius: 4px;
font-size: 12px;
margin-right: 2px;
}

.el-icon-arrow-right {
font-family: element-icons !important;
speak: none;
@@ -677,6 +701,15 @@ export default {
justify-content: space-around;
}

.op-btn-c {
text-align: right;
padding-right: 20px;
}

.op-btn {
margin: 0 0 0 5px;
}

.disabled {
cursor: default;
pointer-events: none;


+ 1
- 1
web_src/js/components/basic/editDialog.vue View File

@@ -1,6 +1,6 @@
<template>

<el-dialog :close-on-click-modal="!deleteLoading" v-dlg-drag :title="dialogTitle" :visible.sync="deleteDialog">
<el-dialog :close-on-click-modal="!deleteLoading" :title="dialogTitle" :visible.sync="deleteDialog">
<div class="message-box__content">


+ 6
- 6
web_src/js/components/images/Images.vue View File

@@ -120,13 +120,13 @@
</template>
</el-table-column>
<el-table-column
prop="createdUnix"
prop="updatedUnix"
label="创建时间"
align="center"
min-width="14%"
>
<template slot-scope="scope">
{{ scope.row.createdUnix | transformTimestamp }}
{{ scope.row.updatedUnix | transformTimestamp }}
</template>
</el-table-column>
<el-table-column align="center" min-width="21%" label="操作">
@@ -369,13 +369,13 @@
</template>
</el-table-column>
<el-table-column
prop="createdUnix"
prop="updatedUnix"
label="创建时间"
align="center"
min-width="14%"
>
<template slot-scope="scope">
{{ scope.row.createdUnix | transformTimestamp }}
{{ scope.row.updatedUnix | transformTimestamp }}
</template>
</el-table-column>
<el-table-column align="center" min-width="21%" label="操作">
@@ -595,13 +595,13 @@
</template>
</el-table-column>
<el-table-column
prop="createdUnix"
prop="updatedUnix"
label="创建时间"
align="center"
min-width="14%"
>
<template slot-scope="scope">
{{ scope.row.createdUnix | transformTimestamp }}
{{ scope.row.updatedUnix | transformTimestamp }}
</template>
</el-table-column>
<el-table-column align="center" min-width="21%" label="操作">


+ 2
- 2
web_src/js/components/images/adminImages.vue View File

@@ -75,9 +75,9 @@
</a>
</template>
</el-table-column>
<el-table-column prop="createdUnix" label="创建时间" align="center" min-width="13%">
<el-table-column prop="updatedUnix" label="创建时间" align="center" min-width="13%">
<template slot-scope="scope">
{{scope.row.createdUnix | transformTimestamp}}
{{scope.row.updatedUnix | transformTimestamp}}
</template>
</el-table-column>
<el-table-column align="center" min-width="23%" label="操作">


+ 4
- 4
web_src/js/features/cloudbrainShow.js View File

@@ -799,9 +799,9 @@ export default async function initCloudrainSow() {
if (value) {
let html = "";
nameMap[value].forEach((element) => {
let { TrainTaskInfo } = element;
TrainTaskInfo = JSON.parse(TrainTaskInfo);
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`;
//let { trainTaskInfo } = element;
//trainTaskInfo = JSON.parse(trainTaskInfo);
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`;
});
$("#model_name_version").append(html);
const initVersionText = $(
@@ -937,7 +937,7 @@ export default async function initCloudrainSow() {
return new Promise((resolve, reject) => {
$.get(
`${RepoLink}/modelmanage/query_modelfile_for_predict`,
{ ID: value },
{ id: value },
(data) => {
resolve(data);
}


+ 1
- 1
web_src/js/features/cloudrbanin.js View File

@@ -398,7 +398,7 @@ export default async function initCloudrain() {
$(`#${jobName}`).popup("toggle");
} else {
let versionData = data.filter((item) => {
return item.Version === versionName;
return item.version === versionName;
});
if (versionData.length == 0) {
$(`#${jobName}`).popup("toggle");


+ 6
- 0
web_src/js/features/i18nVue.js View File

@@ -105,6 +105,9 @@ export const i18nVue = {
file_sync_fail:"文件同步失败",
no_file_to_download:"没有文件可以下载",
task_not_finished:"任务还未结束,稍后再来看看",
local:"本地",
online:"线上",
modify:"修改",
},
US: {
computer_vision: "computer vision",
@@ -216,5 +219,8 @@ export const i18nVue = {
file_sync_fail:"File synchronization failed",
no_file_to_download:"No files can be downloaded",
task_not_finished:"Task not finished yet, please wait",
local:"Local",
online:"Online",
modify:"Modify",
},
};

+ 41
- 10
web_src/js/index.js View File

@@ -6,8 +6,10 @@ import "./publicpath.js";
import "./polyfills.js";
import "./features/letteravatar.js";
import Vue from "vue";
import ElementUI from "element-ui";
import "element-ui/lib/theme-chalk/index.css";
import localeEn from 'element-ui/lib/locale/lang/en';
import localeZh from 'element-ui/lib/locale/lang/zh-CN';
import ElementUI from "element-ui";
import axios from "axios";
import qs from "qs";
import Cookies from "js-cookie";
@@ -53,16 +55,22 @@ import { Message } from "element-ui";

import { i18nVue } from "./features/i18nVue.js";
import './features/ad.js';
import { Fancybox } from "./vendor/fancybox.esm.js";


Vue.use(ElementUI);
Vue.prototype.$axios = axios;
Vue.prototype.$Cookies = Cookies;
Vue.prototype.qs = qs;
Vue.prototype.$message = Message;
Vue.prototype.$locale = i18nVue;
window.i18n = i18nVue[document.querySelector('html').getAttribute('lang') == 'zh-CN' ? 'CN' : 'US'];
const lang = document.querySelector('html').getAttribute('lang');
window.i18n = i18nVue[lang == 'zh-CN' ? 'CN' : 'US'];
const { AppSubUrl, StaticUrlPrefix, csrf } = window.config;

Vue.use(ElementUI, {
locale: lang === 'zh-CN' ? localeZh : localeEn,
});

Object.defineProperty(Vue.prototype, "$echarts", {
value: echarts,
});
@@ -5069,12 +5077,7 @@ function initcreateRepo() {
initcreateRepo();

function initChartsNpu() {
const url = window.location.href;
const urlArr = url.split("/");
let userName = urlArr.slice(-5)[0];
let repoPath = urlArr.slice(-4)[0];
let jobID = urlArr.slice(-1)[0];

const repoPath = $('.metric_chart').data('path')
let options = {
legend: {
data: [],
@@ -5125,7 +5128,7 @@ function initChartsNpu() {
document.getElementById(`metric-${versionName}`)
);
$.get(
`${window.config.AppSubUrl}/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/metric_statistics?version_name=${versionName}&statistic_type=each&metrics=`,
`${window.config.AppSubUrl}/api/v1/repos/${repoPath}`,
(res) => {
let filterDta = res.MetricsInfo.filter((item) => {
return ![
@@ -5177,3 +5180,31 @@ function initChartsNpu() {
}

initChartsNpu();

Fancybox.bind('.gallery img', {
// Do not create a gallery
groupAttr: null,

// Do not hide page scrollbars
hideScrollbar: false,

// Disable drag to close guesture
dragToClose: false,

// Hide close button
closeButton: false,

// Disable toolbar
Toolbar: false,

// Disable zoom animation; close on click and wheel events
Image: {
zoom: false,
click: "close",
wheel: "close",
},

// Custom animations
showClass: "fancybox-zoomIn",
hideClass: "fancybox-zoomOut",
});

+ 2
- 0
web_src/js/vendor/fancybox.esm.js
File diff suppressed because it is too large
View File


+ 1
- 0
web_src/less/index.less View File

@@ -1,5 +1,6 @@
@import "~highlight.js/styles/github.css";
@import "./vendor/gitGraph.css";
@import "./vendor/fancyapp.less";
// @import "~/remixicon/fonts/remixicon.css";
@import "_svg";
@import "_tribute";


+ 791
- 0
web_src/less/vendor/fancyapp.less View File

@@ -0,0 +1,791 @@
.carousel {
position: relative;
box-sizing: border-box;
}
.carousel *,
.carousel *:before,
.carousel *:after {
box-sizing: inherit;
}
.carousel.is-draggable {
cursor: move;
cursor: grab;
}
.carousel.is-dragging {
cursor: move;
cursor: grabbing;
}
.carousel__viewport {
position: relative;
overflow: hidden;
max-width: 100%;
max-height: 100%;
}
.carousel__track {
display: flex;
}
.carousel__slide {
flex: 0 0 auto;
width: var(--carousel-slide-width, 60%);
max-width: 100%;
padding: 1rem;
position: relative;
overflow-x: hidden;
overflow-y: auto;
overscroll-behavior: contain;
}
.has-dots {
margin-bottom: calc(0.5rem + 22px);
}
.carousel__dots {
margin: 0 auto;
padding: 0;
position: absolute;
top: calc(100% + 0.5rem);
left: 0;
right: 0;
display: flex;
justify-content: center;
list-style: none;
user-select: none;
}
.carousel__dots .carousel__dot {
margin: 0;
padding: 0;
display: block;
position: relative;
width: 22px;
height: 22px;
cursor: pointer;
}
.carousel__dots .carousel__dot:after {
content: "";
width: 8px;
height: 8px;
border-radius: 50%;
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
background-color: currentColor;
opacity: 0.25;
transition: opacity 0.15s ease-in-out;
}
.carousel__dots .carousel__dot.is-selected:after {
opacity: 1;
}
.carousel__button {
width: var(--carousel-button-width, 48px);
height: var(--carousel-button-height, 48px);
padding: 0;
border: 0;
display: flex;
justify-content: center;
align-items: center;
pointer-events: all;
cursor: pointer;
color: var(--carousel-button-color, currentColor);
background: var(--carousel-button-bg, transparent);
border-radius: var(--carousel-button-border-radius, 50%);
box-shadow: var(--carousel-button-shadow, none);
transition: opacity 0.15s ease;
}
.carousel__button.is-prev,
.carousel__button.is-next {
position: absolute;
top: 50%;
transform: translateY(-50%);
}
.carousel__button.is-prev {
left: 10px;
}
.carousel__button.is-next {
right: 10px;
}
.carousel__button[disabled] {
cursor: default;
opacity: 0.3;
}
.carousel__button svg {
width: var(--carousel-button-svg-width, 50%);
height: var(--carousel-button-svg-height, 50%);
fill: none;
stroke: currentColor;
stroke-width: var(--carousel-button-svg-stroke-width, 1.5);
stroke-linejoin: bevel;
stroke-linecap: round;
filter: var(--carousel-button-svg-filter, none);
pointer-events: none;
}
html.with-fancybox {
scroll-behavior: auto;
}
body.compensate-for-scrollbar {
overflow: hidden !important;
touch-action: none;
}
.fancybox__container {
position: fixed;
top: 0;
left: 0;
bottom: 0;
right: 0;
direction: ltr;
margin: 0;
padding: env(safe-area-inset-top, 0px) env(safe-area-inset-right, 0px)
env(safe-area-inset-bottom, 0px) env(safe-area-inset-left, 0px);
box-sizing: border-box;
display: flex;
flex-direction: column;
color: var(--fancybox-color, #fff);
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
overflow: hidden;
z-index: 1050;
outline: none;
transform-origin: top left;
--carousel-button-width: 48px;
--carousel-button-height: 48px;
--carousel-button-svg-width: 24px;
--carousel-button-svg-height: 24px;
--carousel-button-svg-stroke-width: 2.5;
--carousel-button-svg-filter: drop-shadow(1px 1px 1px rgba(0, 0, 0, 0.4));
}
.fancybox__container *,
.fancybox__container *::before,
.fancybox__container *::after {
box-sizing: inherit;
}
.fancybox__container :focus {
outline: none;
}
body:not(.is-using-mouse) .fancybox__container :focus {
box-shadow: 0 0 0 1px #fff,
0 0 0 2px var(--fancybox-accent-color, rgba(1, 210, 232, 0.94));
}
@media all and (min-width: 1024px) {
.fancybox__container {
--carousel-button-width: 48px;
--carousel-button-height: 48px;
--carousel-button-svg-width: 27px;
--carousel-button-svg-height: 27px;
}
}
.fancybox__backdrop {
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
z-index: -1;
background: var(--fancybox-bg, rgba(24, 24, 27, 0.92));
}
.fancybox__carousel {
position: relative;
flex: 1 1 auto;
min-height: 0;
height: 100%;
z-index: 10;
}
.fancybox__carousel.has-dots {
margin-bottom: calc(0.5rem + 22px);
}
.fancybox__viewport {
position: relative;
width: 100%;
height: 100%;
overflow: visible;
cursor: default;
}
.fancybox__track {
display: flex;
height: 100%;
}
.fancybox__slide {
flex: 0 0 auto;
width: 100%;
max-width: 100%;
margin: 0;
padding: 48px 8px 8px 8px;
position: relative;
overscroll-behavior: contain;
display: flex;
flex-direction: column;
outline: 0;
overflow: auto;
--carousel-button-width: 36px;
--carousel-button-height: 36px;
--carousel-button-svg-width: 22px;
--carousel-button-svg-height: 22px;
}
.fancybox__slide::before,
.fancybox__slide::after {
content: "";
flex: 0 0 0;
margin: auto;
}
@media all and (min-width: 1024px) {
.fancybox__slide {
padding: 64px 100px;
}
}
.fancybox__content {
margin: 0 env(safe-area-inset-right, 0px) 0 env(safe-area-inset-left, 0px);
padding: 36px;
color: var(--fancybox-content-color, #374151);
background: var(--fancybox-content-bg, #fff);
position: relative;
align-self: center;
display: flex;
flex-direction: column;
z-index: 20;
}
.fancybox__content :focus:not(.carousel__button.is-close) {
outline: thin dotted;
box-shadow: none;
}
.fancybox__caption {
align-self: center;
max-width: 100%;
margin: 0;
padding: 1rem 0 0 0;
line-height: 1.375;
color: var(--fancybox-color, currentColor);
visibility: visible;
cursor: auto;
flex-shrink: 0;
overflow-wrap: anywhere;
}
.is-loading .fancybox__caption {
visibility: hidden;
}
.fancybox__container > .carousel__dots {
top: 100%;
color: var(--fancybox-color, #fff);
}
.fancybox__nav .carousel__button {
z-index: 40;
}
.fancybox__nav .carousel__button.is-next {
right: 8px;
}
@media all and (min-width: 1024px) {
.fancybox__nav .carousel__button.is-next {
right: 40px;
}
}
.fancybox__nav .carousel__button.is-prev {
left: 8px;
}
@media all and (min-width: 1024px) {
.fancybox__nav .carousel__button.is-prev {
left: 40px;
}
}
.carousel__button.is-close {
position: absolute;
top: 8px;
right: 8px;
top: calc(env(safe-area-inset-top, 0px) + 8px);
right: calc(env(safe-area-inset-right, 0px) + 8px);
z-index: 40;
}
@media all and (min-width: 1024px) {
.carousel__button.is-close {
right: 40px;
}
}
.fancybox__content > .carousel__button.is-close {
position: absolute;
top: -40px;
right: 0;
color: var(--fancybox-color, #fff);
}
.fancybox__no-click,
.fancybox__no-click button {
pointer-events: none;
}
.fancybox__spinner {
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 50px;
height: 50px;
color: var(--fancybox-color, currentColor);
}
.fancybox__slide .fancybox__spinner {
cursor: pointer;
z-index: 1053;
}
.fancybox__spinner svg {
animation: fancybox-rotate 2s linear infinite;
transform-origin: center center;
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
margin: auto;
width: 100%;
height: 100%;
}
.fancybox__spinner svg circle {
fill: none;
stroke-width: 2.75;
stroke-miterlimit: 10;
stroke-dasharray: 1, 200;
stroke-dashoffset: 0;
animation: fancybox-dash 1.5s ease-in-out infinite;
stroke-linecap: round;
stroke: currentColor;
}
@keyframes fancybox-rotate {
100% {
transform: rotate(360deg);
}
}
@keyframes fancybox-dash {
0% {
stroke-dasharray: 1, 200;
stroke-dashoffset: 0;
}
50% {
stroke-dasharray: 89, 200;
stroke-dashoffset: -35px;
}
100% {
stroke-dasharray: 89, 200;
stroke-dashoffset: -124px;
}
}
.fancybox__backdrop,
.fancybox__caption,
.fancybox__nav,
.carousel__dots,
.carousel__button.is-close {
opacity: var(--fancybox-opacity, 1);
}
.fancybox__container.is-animated[aria-hidden="false"] .fancybox__backdrop,
.fancybox__container.is-animated[aria-hidden="false"] .fancybox__caption,
.fancybox__container.is-animated[aria-hidden="false"] .fancybox__nav,
.fancybox__container.is-animated[aria-hidden="false"] .carousel__dots,
.fancybox__container.is-animated[aria-hidden="false"]
.carousel__button.is-close {
animation: 0.15s ease backwards fancybox-fadeIn;
}
.fancybox__container.is-animated.is-closing .fancybox__backdrop,
.fancybox__container.is-animated.is-closing .fancybox__caption,
.fancybox__container.is-animated.is-closing .fancybox__nav,
.fancybox__container.is-animated.is-closing .carousel__dots,
.fancybox__container.is-animated.is-closing .carousel__button.is-close {
animation: 0.15s ease both fancybox-fadeOut;
}
.fancybox-fadeIn {
animation: 0.15s ease both fancybox-fadeIn;
}
.fancybox-fadeOut {
animation: 0.1s ease both fancybox-fadeOut;
}
.fancybox-zoomInUp {
animation: 0.2s ease both fancybox-zoomInUp;
}
.fancybox-zoomOutDown {
animation: 0.15s ease both fancybox-zoomOutDown;
}
.fancybox-throwOutUp {
animation: 0.15s ease both fancybox-throwOutUp;
}
.fancybox-throwOutDown {
animation: 0.15s ease both fancybox-throwOutDown;
}
@keyframes fancybox-fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
@keyframes fancybox-fadeOut {
to {
opacity: 0;
}
}
@keyframes fancybox-zoomInUp {
from {
transform: scale(0.97) translate3d(0, 16px, 0);
opacity: 0;
}
to {
transform: scale(1) translate3d(0, 0, 0);
opacity: 1;
}
}
@keyframes fancybox-zoomOutDown {
to {
transform: scale(0.97) translate3d(0, 16px, 0);
opacity: 0;
}
}
@keyframes fancybox-throwOutUp {
to {
transform: translate3d(0, -30%, 0);
opacity: 0;
}
}
@keyframes fancybox-throwOutDown {
to {
transform: translate3d(0, 30%, 0);
opacity: 0;
}
}
.fancybox__carousel .carousel__slide {
scrollbar-width: thin;
scrollbar-color: #ccc rgba(255, 255, 255, 0.1);
}
.fancybox__carousel .carousel__slide::-webkit-scrollbar {
width: 8px;
height: 8px;
}
.fancybox__carousel .carousel__slide::-webkit-scrollbar-track {
background-color: rgba(255, 255, 255, 0.1);
}
.fancybox__carousel .carousel__slide::-webkit-scrollbar-thumb {
background-color: #ccc;
border-radius: 2px;
box-shadow: inset 0 0 4px rgba(0, 0, 0, 0.2);
}
.fancybox__carousel.is-draggable .fancybox__slide,
.fancybox__carousel.is-draggable .fancybox__slide .fancybox__content {
cursor: move;
cursor: grab;
}
.fancybox__carousel.is-dragging .fancybox__slide,
.fancybox__carousel.is-dragging .fancybox__slide .fancybox__content {
cursor: move;
cursor: grabbing;
}
.fancybox__carousel .fancybox__slide .fancybox__content {
cursor: auto;
}
.fancybox__carousel .fancybox__slide.can-zoom_in .fancybox__content {
cursor: zoom-in;
}
.fancybox__carousel .fancybox__slide.can-zoom_out .fancybox__content {
cursor: zoom-out;
}
.fancybox__carousel .fancybox__slide.is-draggable .fancybox__content {
cursor: move;
cursor: grab;
}
.fancybox__carousel .fancybox__slide.is-dragging .fancybox__content {
cursor: move;
cursor: grabbing;
}
.fancybox__image {
transform-origin: 0 0;
user-select: none;
transition: none;
}
.has-image .fancybox__content {
padding: 0;
background: rgba(0, 0, 0, 0);
min-height: 1px;
}
.is-closing .has-image .fancybox__content {
overflow: visible;
}
.has-image[data-image-fit="contain"] {
overflow: visible;
touch-action: none;
}
.has-image[data-image-fit="contain"] .fancybox__content {
flex-direction: row;
flex-wrap: wrap;
}
.has-image[data-image-fit="contain"] .fancybox__image {
max-width: 100%;
max-height: 100%;
object-fit: contain;
}
.has-image[data-image-fit="contain-w"] {
overflow-x: hidden;
overflow-y: auto;
}
.has-image[data-image-fit="contain-w"] .fancybox__content {
min-height: auto;
}
.has-image[data-image-fit="contain-w"] .fancybox__image {
max-width: 100%;
height: auto;
}
.has-image[data-image-fit="cover"] {
overflow: visible;
touch-action: none;
}
.has-image[data-image-fit="cover"] .fancybox__content {
width: 100%;
height: 100%;
}
.has-image[data-image-fit="cover"] .fancybox__image {
width: 100%;
height: 100%;
object-fit: cover;
}
.fancybox__carousel .fancybox__slide.has-iframe .fancybox__content,
.fancybox__carousel .fancybox__slide.has-map .fancybox__content,
.fancybox__carousel .fancybox__slide.has-pdf .fancybox__content,
.fancybox__carousel .fancybox__slide.has-video .fancybox__content,
.fancybox__carousel .fancybox__slide.has-html5video .fancybox__content {
max-width: 100%;
flex-shrink: 1;
min-height: 1px;
overflow: visible;
}
.fancybox__carousel .fancybox__slide.has-iframe .fancybox__content,
.fancybox__carousel .fancybox__slide.has-map .fancybox__content,
.fancybox__carousel .fancybox__slide.has-pdf .fancybox__content {
width: 100%;
height: 80%;
}
.fancybox__carousel .fancybox__slide.has-video .fancybox__content,
.fancybox__carousel .fancybox__slide.has-html5video .fancybox__content {
width: 960px;
height: 540px;
max-width: 100%;
max-height: 100%;
}
.fancybox__carousel .fancybox__slide.has-map .fancybox__content,
.fancybox__carousel .fancybox__slide.has-pdf .fancybox__content,
.fancybox__carousel .fancybox__slide.has-video .fancybox__content,
.fancybox__carousel .fancybox__slide.has-html5video .fancybox__content {
padding: 0;
background: rgba(24, 24, 27, 0.9);
color: #fff;
}
.fancybox__carousel .fancybox__slide.has-map .fancybox__content {
background: #e5e3df;
}
.fancybox__html5video,
.fancybox__iframe {
border: 0;
display: block;
height: 100%;
width: 100%;
background: rgba(0, 0, 0, 0);
}
.fancybox-placeholder {
position: absolute;
width: 1px;
height: 1px;
padding: 0;
margin: -1px;
overflow: hidden;
clip: rect(0, 0, 0, 0);
white-space: nowrap;
border-width: 0;
}
.fancybox__thumbs {
flex: 0 0 auto;
position: relative;
padding: 0px 3px;
opacity: var(--fancybox-opacity, 1);
}
.fancybox__container.is-animated[aria-hidden="false"] .fancybox__thumbs {
animation: 0.15s ease-in backwards fancybox-fadeIn;
}
.fancybox__container.is-animated.is-closing .fancybox__thumbs {
opacity: 0;
}
.fancybox__thumbs .carousel__slide {
flex: 0 0 auto;
width: var(--fancybox-thumbs-width, 96px);
margin: 0;
padding: 8px 3px;
box-sizing: content-box;
display: flex;
align-items: center;
justify-content: center;
overflow: visible;
cursor: pointer;
}
.fancybox__thumbs .carousel__slide .fancybox__thumb::after {
content: "";
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
border-width: 5px;
border-style: solid;
border-color: var(--fancybox-accent-color, rgba(34, 213, 233, 0.96));
opacity: 0;
transition: opacity 0.15s ease;
border-radius: var(--fancybox-thumbs-border-radius, 4px);
}
.fancybox__thumbs .carousel__slide.is-nav-selected .fancybox__thumb::after {
opacity: 0.92;
}
.fancybox__thumbs .carousel__slide > * {
pointer-events: none;
user-select: none;
}
.fancybox__thumb {
position: relative;
width: 100%;
padding-top: calc(100% / (var(--fancybox-thumbs-ratio, 1.5)));
background-size: cover;
background-position: center center;
background-color: rgba(255, 255, 255, 0.1);
background-repeat: no-repeat;
border-radius: var(--fancybox-thumbs-border-radius, 4px);
}
.fancybox__toolbar {
position: absolute;
top: 0;
right: 0;
left: 0;
z-index: 20;
background: linear-gradient(
to top,
hsla(0deg, 0%, 0%, 0) 0%,
hsla(0deg, 0%, 0%, 0.006) 8.1%,
hsla(0deg, 0%, 0%, 0.021) 15.5%,
hsla(0deg, 0%, 0%, 0.046) 22.5%,
hsla(0deg, 0%, 0%, 0.077) 29%,
hsla(0deg, 0%, 0%, 0.114) 35.3%,
hsla(0deg, 0%, 0%, 0.155) 41.2%,
hsla(0deg, 0%, 0%, 0.198) 47.1%,
hsla(0deg, 0%, 0%, 0.242) 52.9%,
hsla(0deg, 0%, 0%, 0.285) 58.8%,
hsla(0deg, 0%, 0%, 0.326) 64.7%,
hsla(0deg, 0%, 0%, 0.363) 71%,
hsla(0deg, 0%, 0%, 0.394) 77.5%,
hsla(0deg, 0%, 0%, 0.419) 84.5%,
hsla(0deg, 0%, 0%, 0.434) 91.9%,
hsla(0deg, 0%, 0%, 0.44) 100%
);
padding: 0;
touch-action: none;
display: flex;
justify-content: space-between;
--carousel-button-svg-width: 20px;
--carousel-button-svg-height: 20px;
opacity: var(--fancybox-opacity, 1);
text-shadow: var(
--fancybox-toolbar-text-shadow,
1px 1px 1px rgba(0, 0, 0, 0.4)
);
}
@media all and (min-width: 1024px) {
.fancybox__toolbar {
padding: 8px;
}
}
.fancybox__container.is-animated[aria-hidden="false"] .fancybox__toolbar {
animation: 0.15s ease-in backwards fancybox-fadeIn;
}
.fancybox__container.is-animated.is-closing .fancybox__toolbar {
opacity: 0;
}
.fancybox__toolbar__items {
display: flex;
}
.fancybox__toolbar__items--left {
margin-right: auto;
}
.fancybox__toolbar__items--center {
position: absolute;
left: 50%;
transform: translateX(-50%);
}
.fancybox__toolbar__items--right {
margin-left: auto;
}
@media (max-width: 640px) {
.fancybox__toolbar__items--center:not(:last-child) {
display: none;
}
}
.fancybox__counter {
min-width: 72px;
padding: 0 10px;
line-height: var(--carousel-button-height, 48px);
text-align: center;
font-size: 17px;
font-variant-numeric: tabular-nums;
-webkit-font-smoothing: subpixel-antialiased;
}
.fancybox__progress {
background: var(--fancybox-accent-color, rgba(34, 213, 233, 0.96));
height: 3px;
left: 0;
position: absolute;
right: 0;
top: 0;
transform: scaleX(0);
transform-origin: 0;
transition-property: transform;
transition-timing-function: linear;
z-index: 30;
user-select: none;
}
.fancybox__container:fullscreen::backdrop {
opacity: 0;
}
.fancybox__button--fullscreen g:nth-child(2) {
display: none;
}
.fancybox__container:fullscreen .fancybox__button--fullscreen g:nth-child(1) {
display: none;
}
.fancybox__container:fullscreen .fancybox__button--fullscreen g:nth-child(2) {
display: block;
}
.fancybox__button--slideshow g:nth-child(2) {
display: none;
}
.fancybox__container.has-slideshow .fancybox__button--slideshow g:nth-child(1) {
display: none;
}
.fancybox__container.has-slideshow .fancybox__button--slideshow g:nth-child(2) {
display: block;
}

.gallery img {
cursor: zoom-in;
}

.fancybox__container {
--fancybox-bg: rgba(193, 201, 210, 0.7);
}

.fancybox-zoomOut {
animation: 0.2s ease-in-out fancybox-zoomOut both;
}

.fancybox-zoomIn {
animation: 0.25s ease-in-out fancybox-zoomIn both;
}

@keyframes fancybox-zoomIn {
from {
opacity: 0;
transform: scale3d(0.8, 0.8, 0.8);
}

100% {
opacity: 1;
}
}

@keyframes fancybox-zoomOut {
from {
opacity: 1;
}

to {
opacity: 0;
transform: scale3d(0.8, 0.8, 0.8);
}
}

+ 106
- 0
web_src/vuepages/apis/modules/modelmanage.js View File

@@ -0,0 +1,106 @@
import service from "../service";
import Qs from 'qs';

// 保存本地模型
export const saveLocalModel = (data) => {
return service({
url: `${data.repo}/modelmanage/create_local_model`,
method: 'post',
headers: { 'Content-type': 'application/x-www-form-urlencoded' },
params: {},
data: Qs.stringify(data),
});
};

// 修改模型
// data: {id,type,name,version,engine,label,description:}
export const modifyModel = (data) => {
return service({
url: `${data.repo}/modelmanage/modify_model`,
method: 'put',
headers: { 'Content-type': 'application/x-www-form-urlencoded' },
params: {},
data: Qs.stringify(data),
});
};

// 求模型信息
export const getModelInfoByName = (params) => {
return service({
url: `${params.repo}/modelmanage/show_model_info_api`,
method: 'get',
params,
data: {},
});
};

// 求模型中文件列表
// params {repo, ID, parentDir}
export const getModelFiles = (params) => {
return service({
url: `${params.repo}/modelmanage/query_onelevel_modelfile`,
method: 'get',
params,
data: {},
});
};

// 删除模型文件
// params {repo, id, fileName}
export const deleteModelFile = (params) => {
return service({
url: `${params.repo}/modelmanage/delete_model_file`,
method: 'delete',
params,
data: {},
});
};

/* 文件上传相关 */
// 上传文件1: 获取文件chunks信息
// params: { md5, type: 0-CPU/GPU,1-NPU, file_name, scene: 'model', modeluuid }
// return: uploadID, uuid, uploaded, chunks, attachID, modeluuid, modelName, fileName
export const getChunks = (params) => {
return service({
url: `/attachments/model/get_chunks`,
method: 'get',
params,
data: {},
});
};

// 上传文件2: 上传新文件
// params: { totalChunkCounts, md5, size, fileType, type, file_name, scene=model, modeluuid=xxxx }
// return: uploadID, uuid
export const getNewMultipart = (params) => {
return service({
url: `/attachments/model/new_multipart`,
method: 'get',
params,
data: {},
});
};

// 上传文件3: 获取分片上传地址
// params: { uuid, uploadID, size, chunkNumber, type, file_name, scene=model }
// return: url
export const getMultipartUrl = (params) => {
return service({
url: `/attachments/model/get_multipart_url`,
method: 'get',
params,
data: {},
});
};

// 上传文件4: 完成上传后
// data: { uuid, uploadID, size, type, file_name, dataset_id, description, scene=model, modeluuid=xxxx }
export const setCompleteMultipart = (data) => {
return service({
url: `/attachments/model/complete_multipart`,
method: 'post',
headers: { 'Content-type': 'application/x-www-form-urlencoded' },
params: {},
data: Qs.stringify(data),
});
};

+ 13
- 0
web_src/vuepages/apis/modules/resources.js View File

@@ -110,6 +110,19 @@ export const getResSpecificationList = (params) => {
});
}

// 查询资源规格列表(所有)
// cluster 所属集群 :OpenI 启智集群,C2Net 智算集群
// queue 所属队列id
// status 状态 : 1 待审核 2已上架 3已下架
export const getResSpecificationListAll = (params) => {
return service({
url: '/admin/resources/specification/list/all',
method: 'get',
params,
data: {},
});
}

// 同步智算网络资源池(队列)
export const syncResSpecification = () => {
return service({


+ 3
- 0
web_src/vuepages/const/index.js View File

@@ -14,3 +14,6 @@ export const AI_CENTER = [{ k: 'OpenIOne', v: i18n.t('resourcesManagement.OpenIO
export const COMPUTER_RESOURCES = [{ k: 'GPU', v: 'GPU' }, { k: 'NPU', v: 'NPU' }, { k: 'MLU', v: 'MLU' }];
export const ACC_CARD_TYPE = [{ k: 'T4', v: 'T4' }, { k: 'A100', v: 'A100' }, { k: 'V100', v: 'V100' }, { k: 'ASCEND910', v: 'Ascend 910' }, { k: 'MLU270', v: 'MLU270' }, { k: 'RTX3080', v: 'RTX3080' }];
export const SPECIFICATION_STATUS = [{ k: '1', v: i18n.t('resourcesManagement.willOnShelf') }, { k: '2', v: i18n.t('resourcesManagement.onShelf') }, { k: '3', v: i18n.t('resourcesManagement.offShelf') }];

// 模型
export const MODEL_ENGINES = [{ k: '0', v: 'PyTorch' }, { k: '1', v: 'TensorFlow' }, { k: '2', v: 'MindSpore' }, { k: '4', v: 'PaddlePaddle' }, { k: '5', v: 'OneFlow' }, { k: '6', v: 'MXNet' }, { k: '3', v: 'Other' }];

+ 62
- 1
web_src/vuepages/langs/config/en-US.js View File

@@ -187,12 +187,73 @@ const en = {
onlyFace:'Only face',
onlyLicensePlate:'Only license plate',
dragThePictureHere:'Drag the picture here',
or:'or',
or:' or ',
clickUpload:'Click upload',
dataDesensitizationModelExperience:'Data desensitization model experience',
dataDesensitizationModelDesc:'Use AI technology to desensitize the face and license plate number in the picture. For more information about this model, please visit the project',
limitFilesUpload:'Only jpg/jpeg/png files can be uploaded',
limitSizeUpload:'The size of the uploaded file cannot exceed 20M!',
modelManage: {
modelManage: 'Model management',
modelName: 'Model name',
useCluster: 'Available clusters',
local: 'Local',
online: 'Online',
createModel: 'Create Model',
importLocalModel: 'Import Lacal Model',
importOnlineModel: 'Import Online Model',
modifyModelInfo: 'Modify model information',
addModelFiles: 'Add model files',
uploadModelFiles: 'Upload model files',
pleaseInputModelName: 'Please input model name',
version: 'Version',
modelEngine: 'Model engine',
modelLabel: 'Model label',
modelLabelInputTips: 'Input labels, multiple labels are separated by spaces',
modelDescr: 'Model description',
modelDescrInputTips: 'The description should not exceed 256 characters',
confirm: 'Confirm',
cancel: 'Cancel',
modelCreateFailed: 'Model create failed',
modelModifyFailed: 'Model modify failed',
fileUpload: 'File upload',
upload: 'Upload',
uploadStatus: 'Upload status',
modelFileUploadDefaultTips: 'Click to add files or drag files here directly',
modelFileUploadErrTips: 'Up to 10 files can be uploaded at a time, and the total file size of the model does not exceed {size}GB',
modelFileNameTips: 'The file name should not exceed 128 characters',
fileIstoBig: 'File is to big',
removeFile: 'Rmove file',
uploadSuccess: 'upload success',
uploadFailed: 'upload failed',
calcFileMd5: 'Calculating file MD5...',
uploading: 'Uploading...',
fileHasAlreadyInTheModel: 'This file has already in the model: ',
basicInfo: 'Basic information',
modelSize: 'Model size',
descr: 'Description',
createTime: 'Create Time',
label: 'Label',
trainTaskInfo: 'Train task information',
trainTask: 'Train task',
codeBranch: 'Code branch',
bootFile: 'Boot file',
trainDataset: 'Train dataset',
specInfo: 'Specifications',
workServerNumber: 'Amount of compute node',
runParameters: 'Run parameters',
seeMore: 'View more',
collapseDetails: 'Collapse details',
modelFilesList: 'Mode files list',
fileName: 'File name',
fileSize: 'File size',
updateTime: 'Upate Time',
operate: 'Operation',
delete: 'Delete',
infoModificationFailed: 'Information modify failed',
deleteModelFileConfirmTips: 'Are you sure you want to delete the current model file?',
modelFileDeleteFailed: 'Model file delete failed',
},
}

export default en;

+ 61
- 0
web_src/vuepages/langs/config/zh-CN.js View File

@@ -193,6 +193,67 @@ const zh = {
dataDesensitizationModelDesc:'利用人工智能AI技术,把图片中的人脸、车牌号码进行脱敏处理。该模型更多信息请访问项目',
limitFilesUpload:'只能上传 jpg/jpeg/png 格式的文件',
limitSizeUpload:'上传文件大小不能超过 20M !',
modelManage: {
modelManage: '模型管理',
modelName: '模型名称',
useCluster: '可用集群',
local: '本地',
online: '线上',
createModel: '创建模型',
importLocalModel: '导入本地模型',
importOnlineModel: '导入线上模型',
modifyModelInfo: '修改模型信息',
addModelFiles: '增加模型文件',
uploadModelFiles: '上传模型文件',
pleaseInputModelName: '请输入模型名称',
version: '版本',
modelEngine: '模型框架',
modelLabel: '模型标签',
modelLabelInputTips: '输入标签,多个标签用空格区分',
modelDescr: '模型描述',
modelDescrInputTips: '描述字数不超过255个字符',
confirm: '确定',
cancel: '取消',
modelCreateFailed: '模型创建失败',
modelModifyFailed: '模型修改失败',
fileUpload: '文件上传',
upload: '上传',
uploadStatus: '上传状态',
modelFileUploadDefaultTips: '点击添加文件或直接拖拽文件到此处',
modelFileUploadErrTips: '单次最多上传10个文件,模型总文件大小不超过{size}G',
modelFileNameTips: '文件名长度不超过128个字符',
fileIstoBig: '文件太大',
removeFile: '移除文件',
uploadSuccess: '上传成功',
uploadFailed: '上传失败',
calcFileMd5: '计算文件MD5...',
uploading: '上传中...',
fileHasAlreadyInTheModel: '该文件已上传在模型:',
basicInfo: '基本信息',
modelSize: '模型大小',
descr: '描述',
createTime: '创建时间',
label: '标签',
trainTaskInfo: '训练相关信息',
trainTask: '训练任务',
codeBranch: '代码分支',
bootFile: '启动文件',
trainDataset: '训练数据集',
specInfo: '规格',
workServerNumber: '计算节点',
runParameters: '运行参数',
seeMore: '查看更多信息',
collapseDetails: '折叠详细信息',
modelFilesList: '模型文件列表',
fileName: '文件名称',
fileSize: '文件大小',
updateTime: '更新时间',
operate: '操作',
delete: '删除',
infoModificationFailed: '信息修改失败',
deleteModelFileConfirmTips: '请确认是否删除当前模型文件?',
modelFileDeleteFailed: '模型文件删除失败',
},
}

export default zh;

+ 3
- 5
web_src/vuepages/pages/model/tuomin/index.vue View File

@@ -33,11 +33,9 @@
drag
>
<div class="el-upload__text">
{{ $t("dragThePictureHere")
}}<span style="color: rgba(136, 136, 136, 0.87)">{{
$t("or")
}}</span
>{{ $t("clickUpload") }}
<span>
<span>{{ $t("dragThePictureHere") }}</span><span style="color: rgba(136, 136, 136, 0.87)">{{ $t("or") }}</span><span>{{ $t("clickUpload") }}</span>
</span>
</div>
</el-upload>



+ 706
- 0
web_src/vuepages/pages/modelmanage/common/modelmanage-common-detail.vue View File

@@ -0,0 +1,706 @@
<template>
<div>
<div class="ui header">
<div class="ui breadcrumb">
<a class="section" :href="`${repo}/modelmanage/show_model`">{{ $t('modelManage.modelManage') }}</a>
<div class="divider"> / </div>
<div class="active section">{{ this.state.name }}</div>
</div>
<div class="version">
<el-select v-model="curVersion" @change="changeVersion" placeholder="">
<el-option v-for="item in modelList" :value="item.version" :key="item.version" :label="item.version">
</el-option>
</el-select>
</div>
</div>
<div class="content">
<div class="detail-info">
<div class="title">{{ $t('modelManage.basicInfo') }}:</div>
<div class="area-c">
<div class="area">
<div class="row">
<div class="tit">{{ $t('modelManage.useCluster') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.typeStr">
{{ state.typeStr }}
</div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.modelSize') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.modelSize">{{ state.modelSize }}</div>
</div>
</div>
<div class="row" :class="isEidtDescr ? 'edit-row' : ''">
<div class="tit">{{ $t('modelManage.descr') }}:</div>
<div class="val" :class="isEidtDescr ? 'edit-val' : ''">
<div v-if="!isEidtDescr" class="txt-wrap" :title="state.description"
style="max-width:100%;width:unset;padding-right:20px;">
<span>{{ state.description }}</span>
<i v-if="canOperate" style="position:absolute;right:0;top:3px;color:rgb(22, 132, 252);cursor:pointer;"
class="el-icon-edit" @click="editDescr = state._description; isEidtDescr = true;"></i>
</div>
<div class="txt-edit" v-if="isEidtDescr">
<el-input type="textarea" v-model="editDescr" :maxLength="255"
:placeholder="$t('modelManage.modelDescrInputTips')"></el-input>
<i style="position:absolute;right:-4px;bottom:20px;color:rgb(255, 37, 37);cursor:pointer;"
class="icon times" @click="isEidtDescr = false;"></i>
<i style="position:absolute;right:-5px;bottom:2px;color:rgb(39, 177, 72);cursor:pointer;"
@click="submitEidt('descr')" class="icon check"></i>
</div>
</div>
</div>
</div>
<div class="area">
<div class="row">
<div class="tit">{{ $t('modelManage.modelEngine') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.engineName">{{ state.engineName }}</div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.createTime') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.createTime">{{ state.createTime }}</div>
</div>
</div>
<div class="row" :class="isEidtLabel ? 'edit-row' : ''">
<div class="tit">{{ $t('modelManage.label') }}:</div>
<div class="val" :class="isEidtLabel ? 'edit-val' : ''">
<div v-if="!isEidtLabel" class="txt-wrap" :title="state.label"
style="max-width:100%;width:unset;padding-right:20px;">
<span>{{ state.label }}</span>
<i v-if="canOperate" style="position:absolute;right:0;top:3px;color:rgb(22, 132, 252);cursor:pointer;"
class="el-icon-edit" @click="editLabel = state._label; isEidtLabel = true;"></i>
</div>
<div class="txt-edit" v-if="isEidtLabel">
<el-input v-model="editLabel" :maxLength="255" :placeholder="$t('modelManage.modelLabelInputTips')"
@input="labelInput"></el-input>
<i style="position:absolute;right:-5px;bottom:20px;color:rgb(255, 37, 37);cursor:pointer;"
class="icon times" @click="isEidtLabel = false;"></i>
<i style="position:absolute;right:-5px;bottom:2px;color:rgb(39, 177, 72);cursor:pointer;"
@click="submitEidt('label')" class="icon check"></i>
</div>
</div>
</div>
</div>
</div>
<div v-show="isExpanded" style="margin-top:8px;" class="title">{{ $t('modelManage.trainTaskInfo') }}:</div>
<div v-show="isExpanded" class="area-c">
<div class="area">
<div class="row">
<div class="tit">{{ $t('modelManage.trainTask') }}:</div>
<div class="val">
<div class="txt-wrap" v-html="state.displayJobName"></div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.codeBranch') }}:</div>
<div class="val">
<div class="txt-wrap" v-html="state.branchName"></div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.bootFile') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.bootFile">{{ state.bootFile }}</div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.trainDataset') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.datasetName">{{ state.datasetName }}</div>
</div>
</div>
</div>
<div class="area">
<div class="row">
<div class="tit">{{ $t('modelManage.specInfo') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.specStr">{{ state.specStr }}</div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.workServerNumber') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.workServerNumber">{{ state.workServerNumber }}</div>
</div>
</div>
<div class="row">
<div class="tit">{{ $t('modelManage.runParameters') }}:</div>
<div class="val">
<div class="txt-wrap" :title="state.parameters">{{ state.parameters }}</div>
</div>
</div>
</div>
</div>
</div>
<div class="expand-line">
<div class="line"></div>
<div class="expand-btn" @click="isExpanded = !isExpanded">
<i class="icon chevron circle down" :class="isExpanded ? 'up' : ''"></i>
<span>{{ isExpanded ? $t('modelManage.collapseDetails') : $t('modelManage.seeMore') }}</span>
</div>
<div class="line"></div>
</div>
<div class="files-info">
<div class="top">
<div style="width:100%;margin-right:20px;">
<div class="title">{{ $t('modelManage.modelFilesList') }}:</div>
<div class="title files-path-c" style="margin-top:8px;margin-bottom:4px">
<div class="file-path" v-for="(item, index) in filePath">
<span v-if="index == filePath.length - 1" class="path-name">{{ item.label }}</span>
<a v-if="index != filePath.length - 1" class="path-name canback" @click="goBackDir(item)">{{ item.label
}}</a>
<span style="color:rgba(0,0,0,.4);" class="divider"> / </span>
</div>
</div>
</div>
<div>
<el-button v-if="modelType == 1 && canOperate" type="primary" icon="el-icon-upload" @click="goUploadPage">
{{ $t('modelManage.uploadModelFiles') }}
</el-button>
</div>
</div>
<div class="table-container">
<el-table ref="tableRef" :data="filesList" row-key="sn" style="width: 100%" v-loading="loading" stripe>
<el-table-column column-key="FileName" prop="FileName" sortable
:sort-method="(a, b) => a.FileName.toLocaleLowerCase().localeCompare(b.FileName.toLocaleLowerCase())"
:label="$t('modelManage.fileName')" align="left" header-align="left">
<template slot-scope="scope">
<div class="tbl-file-name">
<a v-if="scope.row.IsDir" @click="goNextDir(scope.row)" href="javascript:;">
<div class="fitted" :title="scope.row.FileName">
<i class="icon folder" width="16" height="16" aria-hidden="true"></i>
<span>{{ scope.row.FileName }}</span>
</div>
</a>
<a v-else :class="!canOperate ? 'disabled-download' : ''"
:href="canOperate ? `${repo}/modelmanage/${state.id}/downloadsingle?parentDir=${filePath.length > 1 ? encodeURIComponent(filePath.map(item => item.path).join('/').slice(1) + '/') : ''}&fileName=${scope.row.FileName}` : 'javascript:;'">
<div class="fitted" :title="scope.row.FileName">
<i class="icon file" width="16" height="16" aria-hidden="true"></i>
<span>{{ scope.row.FileName }}</span>
</div>
</a>
</div>
</template>
</el-table-column>
<el-table-column column-key="SizeShow" prop="SizeShow" sortable :sort-method="(a, b) => a.Size - b.Size"
:label="$t('modelManage.fileSize')" align="center" header-align="center" width="200">
</el-table-column>
<el-table-column column-key="ModTime" prop="ModTime" sortable
:sort-method="(a, b) => a.ModTimeNum - b.ModTimeNum" :label="$t('modelManage.updateTime')" align="center"
header-align="center" width="200">
</el-table-column>
<el-table-column v-if="modelType == 1 && canDelete" column-key="operate" prop="operate"
:label="$t('modelManage.operate')" align="center" header-align="center" width="200">
<template slot-scope="scope">
<span v-if="!scope.row.IsDir" class="btn-del" @click="deleteFile(scope.row)">{{ $t('modelManage.delete')
}}</span>
</template>
</el-table-column>
</el-table>
</div>
</div>
</div>
</div>
</template>

<script>

import { getModelInfoByName, modifyModel, getModelFiles, deleteModelFile } from '~/apis/modules/modelmanage';
import { getUrlSearchParams, getListValueWithKey, transFileSize, renderSpecStr } from '~/utils';
import { MODEL_ENGINES } from '~/const';
import { formatDate } from 'element-ui/lib/utils/date-util';

const REPO_NAME = location.pathname.split('/')[2];
const MAX_LABEL_COUNT = 5;

export default {
data() {
return {
modelType: '0', // 1-本地, 0-线上
canOperate: false,
canDelete: false,
isExpanded: false,
loading: false,
repo: location.pathname.split('/').slice(0, 3).join('/'),
state: {
type: 0,
id: '',
name: '',
version: '0.0.1',
engine: '0',
label: '',
description: '',
},
editDescr: '',
isEidtDescr: false,
editLabel: '',
isEidtLabel: false,
engineList: MODEL_ENGINES,
curVersion: '',
modelList: [],
filesList: [],
filePath: [],
};
},
components: {},
methods: {
getDirFiles(dir) {
dir = dir.length ? dir.slice(1) : '';
getModelFiles({
repo: this.repo,
ID: this.state.id,
parentDir: dir,
}).then(res => {
const list = res.data || [];
list.forEach(item => {
item.SizeShow = item.IsDir ? '' : transFileSize(item.Size);
item.ModTimeNum = new Date(item.ModTime).getTime();
})
list.sort((a, b) => b.ModTimeNum - a.ModTimeNum);
this.filesList = list;
this.$refs['tableRef']?.clearSort();
}).catch(err => {
console.log(err);
});
},
goNextDir(item) {
this.filePath.push({
label: item.FileName,
path: item.FileName
});
const dir = this.filePath.map((item) => item.path).join('/');
this.getDirFiles(dir);
},
goBackDir(item) {
const index = this.filePath.findIndex(pth => item === pth);
this.filePath = this.filePath.slice(0, index + 1);
const dir = this.filePath.map((item) => item.path).join('/');
this.getDirFiles(dir);
},
changeVersion(version, noFileRefresh) {
const data = this.modelList.filter((model) => model.version == version)[0];
this.modelType = data.modelType;
this.canOperate = data.isCanOper;
this.canDelete = data.isCanDelete;
this.state.type = data.type;
this.state.typeStr = data.type == 0 ? 'CPU/GPU' : data.type == 1 ? 'NPU' : '';
this.state.id = data.id;
this.state.name = data.name;
this.state.version = data.version;
this.state.engine = data.engine.toString();
this.state.engineName = getListValueWithKey(MODEL_ENGINES, data.engine.toString());
this.state.modelSize = transFileSize(data.size);
this.state.label = data.label || '--';
this.state._label = data.label;
this.state.description = data.description || '--';
this.state._description = data.description;
this.state.createTime = formatDate(new Date(data.createdUnix * 1000), 'yyyy-MM-dd HH:mm:ss');

const trainTaskInfo = data.trainTaskInfo ? JSON.parse(data.trainTaskInfo) : '';
Object.assign(this.state, {
displayJobName: '--',
branchName: '--',
bootFile: '--',
datasetName: '--',
parameters: '--',
workServerNumber: '--',
specStr: '--',
});
if (trainTaskInfo) {
const parameters = trainTaskInfo.Parameters ? JSON.parse(trainTaskInfo.Parameters).parameter : [];
const parametersStr = parameters.map((item) => { return item.label + '=' + item.value }).join('; ');
const taskType = trainTaskInfo.Type;
let taskUrl = location.href.split('modelmanage')[0];
if (taskType == 0) {
taskUrl = taskUrl + 'cloudbrain/train-job/' + trainTaskInfo.JobID;
} else if (taskType == 1) {
taskUrl = taskUrl + 'modelarts/train-job/' + trainTaskInfo.JobID;
} else if (taskType == 2) {
taskUrl = taskUrl + 'grampus/train-job/' + trainTaskInfo.JobID;
}
const versionName = trainTaskInfo.VersionName;
const versionHtml = versionName ? `<span class="append-txt" title="${versionName}">${versionName}</span>` : '';
const codeCommitID = data.codeCommitID;
const codeCommitIDHtml = codeCommitID ? `<span class="append-txt" title="${codeCommitID}">${codeCommitID.slice(0, 10)}</span>` : '';
let specObj;
try {
specObj = trainTaskInfo.FlavorName ? JSON.parse(trainTaskInfo.FlavorName) : '';
} catch (e) {
specObj = trainTaskInfo.FlavorName;
}
const sepcStr = typeof specObj == 'object' ? renderSpecStr(specObj, false) : specObj;
Object.assign(this.state, {
displayJobName: `<a href="${taskUrl}" title="${trainTaskInfo.DisplayJobName}">${trainTaskInfo.DisplayJobName}</a>${versionHtml}`,
branchName: `<span>${trainTaskInfo.BranchName}</span>${codeCommitIDHtml}`,
bootFile: trainTaskInfo.BootFile,
datasetName: trainTaskInfo.DatasetName,
parameters: parametersStr || '--',
workServerNumber: trainTaskInfo.WorkServerNumber || '--',
specStr: sepcStr || '--',
});
}
this.curVersion = version;
if (!noFileRefresh) {
this.filePath = [{ label: version, path: '' }];
this.getDirFiles('')
}
},
goUploadPage() {
window.location.href = `${this.repo}/modelmanage/create_local_model_2?type=1&name=${encodeURIComponent(this.state.name)}&id=${this.state.id}`;
},
backToModelListPage() {
const list = window.location.href.split('/');
list.pop();
list.push('show_model');
window.location.href = list.join('/');
},
labelInput() {
const hasEndSpace = this.editLabel[this.editLabel.length - 1] == ' ';
const list = this.editLabel.trim().split(' ').filter(label => label != '');
this.editLabel = list.slice(0, MAX_LABEL_COUNT).join(' ') + (hasEndSpace && list.length < MAX_LABEL_COUNT ? ' ' : '');
},
submitEidt(type) {
const obj = {
repo: this.repo,
type: this.state.type,
id: this.state.id,
name: this.state.name,
version: this.state.version,
engine: this.state.engine,
label: type == 'label' ? this.editLabel : this.state.label,
description: type == 'descr' ? this.editDescr : this.state.description,
};
modifyModel(obj).then(res => {
res = res.data;
if (res && res.code == '0') {
if (type == 'label') {
this.state.label = this.editLabel;
this.state._label = this.editLabel;
this.isEidtLabel = false;
} else if (type == 'descr') {
this.state.description = this.editDescr;
this.state._description = this.editDescr;
this.isEidtDescr = false;
}
} else {
this.$message({
type: 'error',
message: this.$t('modelManage.infoModificationFailed'),
});
}
}).catch(err => {
console.log(err);
this.$message({
type: 'error',
message: this.$t('modelManage.infoModificationFailed'),
});
});
},
deleteFile(file) {
this.$confirm(this.$t('modelManage.deleteModelFileConfirmTips'), this.$t('tips'), {
confirmButtonText: this.$t('confirm1'),
cancelButtonText: this.$t('cancel'),
type: 'warning',
lockScroll: false,
}).then(() => {
this.loading = true;
deleteModelFile({
repo: this.repo,
id: this.state.id,
fileName: file.FileName,
}).then(res => {
res = res.data;
if (res.code == '0') {
setTimeout(() => {
this.loading = false;
this.updateModelInfo();
const dir = this.filePath.map((item) => item.path).join('/');
this.getDirFiles(dir);
}, 30);
} else {
this.loading = false;
this.$message({
type: 'error',
message: this.$t('modelManage.modelFileDeleteFailed'),
});
}
}).catch(err => {
console.log(err);
this.$message({
type: 'error',
message: this.$t('modelManage.modelFileDeleteFailed'),
});
});
}).catch(() => { });
},
updateModelInfo() {
getModelInfoByName({
repo: this.repo,
name: this.state.name,
}).then(res => {
const list = res.data || [];
this.modelList = list;
const noFileRefresh = true;
this.changeVersion(this.curVersion, noFileRefresh);
}).catch(err => {
console.log(err);
});
},
},
mounted() {
const urlParams = getUrlSearchParams();
if (urlParams.name) {
this.state.name = urlParams.name;
this.loading = true;
getModelInfoByName({
repo: this.repo,
name: urlParams.name,
}).then(res => {
this.loading = false;
const list = res.data || [];
this.modelList = list;
if (list && list.length) {
const data = list[0];
this.changeVersion(data.version);
}
}).catch(err => {
this.loading = false;
console.log(err);
this.backToModelListPage();
});
} else {
this.backToModelListPage();
}
},
beforeDestroy() {
},
};
</script>

<style scoped lang="less">
.header {
display: flex;
align-items: center;

.version {
margin-left: 16px;
width: 90px;
}
}

.content {
.title {
font-weight: 550;
font-size: 14px;
color: rgb(16, 16, 16);
margin-bottom: 10px;
}

.detail-info {
border: 1px solid rgb(232, 232, 232);
border-bottom: none;
padding: 22px;
padding-bottom: 1px;

.area-c {
display: flex;

.area {
flex: 1;

.row {
display: flex;
height: 32px;
margin-bottom: 4px;
align-items: center;

&.edit-row {
height: unset;
}

.tit {
width: 160px;
text-align: right;
color: rgb(136, 136, 136);
}

.val {
flex: 1;
color: rgb(16, 16, 16);
position: relative;
height: 20px;

&.edit-val {
height: unset;
}

.txt-wrap {
position: absolute;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
width: 100%;

/deep/.append-txt {
margin-left: 6px;
background-color: gainsboro;
padding: 2px;
border-radius: 2px;
font-size: 12px;
}
}
}

.txt-edit {
padding-right: 20px;
}
}
}
}
}

.expand-line {
display: flex;
align-items: center;
border: 1px solid rgb(232, 232, 232);
border-top: none;
border-bottom: none;
padding: 16px 0;

.line {
flex: 1;
height: 1px;
background-color: rgb(232, 232, 232);
margin: 0 22px;
}

.expand-btn {
color: rgba(22, 132, 252, 1);
cursor: pointer;

.icon {
margin-right: 2px;
font-size: 14px;
color: rgba(22, 132, 252, 0.8),
}
}
}


.files-info {
border: 1px solid rgb(232, 232, 232);
border-top: none;
border-bottom: none;

.top {
padding: 0 22px 8px 22px;
display: flex;
align-items: center;
justify-content: space-between;
}

.files-path-c {
margin-bottom: 4px;
height: 20px;

.file-path {
margin-right: 6px;
float: left;

.path-name {
&.canback {
color: #4183c4;
}
}
}
}

.table-container {
/deep/ .el-table__header {
th {
background: rgb(245, 245, 246);
color: rgb(16, 16, 16);
font-weight: 400;
font-size: 14px;
}
}

/deep/ .el-table__body {
td {
color: rgb(16, 16, 16);
font-weight: 400;
font-size: 14px;
}
}

.tbl-file-name {
height: 32px;
display: flex;
align-items: center;
overflow: hidden;
font-size: 16px;
font-weight: 500;
position: relative;

a {
max-width: 100%;

.fitted {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
max-width: 100%;
}
}

.disabled-download {
cursor: default;
pointer-events: none;
color: rgba(0, 0, 0, .6) !important;
opacity: .45 !important;
}
}

.btn-del {
color: #0366d6;
cursor: pointer;
}
}
}
}


.el-select-dropdown__item.selected {
color: rgba(0, 0, 0, .95);
}

/deep/ .el-select {
.is-focus {
.el-input__inner {
border-color: #85b7d9;
}
}
}

.el-select {
/deep/ .el-input__inner {
font-weight: 600;
}
}

/deep/ .el-input__inner {
&:focus {
border-color: #85b7d9;
}
}

/deep/ .el-textarea__inner {
&:focus {
border-color: #85b7d9;
}
}
</style>

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save