Browse Source

Merge branch 'V20220926' into zouap_dev

tags/v1.22.9.2^2
chenshihai 3 years ago
parent
commit
e81cfe53bd
100 changed files with 5520 additions and 516 deletions
  1. +101
    -0
      models/action.go
  2. +165
    -1
      models/action_list.go
  3. +24
    -0
      models/ai_model_manage.go
  4. +8
    -0
      models/attachment.go
  5. +122
    -21
      models/cloudbrain.go
  6. +27
    -0
      models/cloudbrain_spec.go
  7. +1
    -0
      models/cloudbrain_static.go
  8. +24
    -0
      models/error.go
  9. +22
    -0
      models/helper.go
  10. +184
    -0
      models/limit_config.go
  11. +8
    -0
      models/models.go
  12. +142
    -0
      models/point_account.go
  13. +21
    -0
      models/point_account_log.go
  14. +6
    -0
      models/repo.go
  15. +19
    -2
      models/repo_watch.go
  16. +79
    -0
      models/reward_admin_log.go
  17. +478
    -0
      models/reward_operate_record.go
  18. +115
    -0
      models/reward_periodic_task.go
  19. +44
    -0
      models/task_accomplish_log.go
  20. +374
    -0
      models/task_config.go
  21. +4
    -0
      models/user.go
  22. +4
    -0
      models/wechat_bind.go
  23. +5
    -0
      modules/auth/cloudbrain.go
  24. +5
    -0
      modules/auth/grampus.go
  25. +6
    -0
      modules/auth/modelarts.go
  26. +1
    -1
      modules/auth/user_form.go
  27. +11
    -8
      modules/auth/wechat/access_token.go
  28. +3
    -3
      modules/auth/wechat/bind.go
  29. +2
    -0
      modules/auth/wechat/client.go
  30. +14
    -0
      modules/cloudbrain/cloudbrain.go
  31. +8
    -4
      modules/cloudbrain/resty.go
  32. +21
    -0
      modules/context/point.go
  33. +26
    -0
      modules/cron/tasks_basic.go
  34. +25
    -1
      modules/dataset/dataset.go
  35. +22
    -0
      modules/eventsource/manager_run.go
  36. +56
    -5
      modules/grampus/grampus.go
  37. +6
    -5
      modules/grampus/resty.go
  38. +18
    -2
      modules/modelarts/modelarts.go
  39. +76
    -0
      modules/notification/action/action.go
  40. +6
    -0
      modules/notification/base/notifier.go
  41. +18
    -0
      modules/notification/base/null.go
  42. +38
    -0
      modules/notification/notification.go
  43. +27
    -0
      modules/notification/reward/point.go
  44. +83
    -1
      modules/redis/redis_client/client.go
  45. +17
    -0
      modules/redis/redis_key/account_redis_key.go
  46. +7
    -0
      modules/redis/redis_key/cloudbrain_redis_key.go
  47. +2
    -0
      modules/redis/redis_key/key_base.go
  48. +26
    -0
      modules/redis/redis_key/limit_redis_key.go
  49. +21
    -0
      modules/redis/redis_key/reward_redis_key.go
  50. +10
    -0
      modules/redis/redis_key/serial_redis_key.go
  51. +14
    -0
      modules/redis/redis_key/task_redis_key.go
  52. +16
    -9
      modules/redis/redis_lock/lock.go
  53. +45
    -24
      modules/setting/setting.go
  54. +1
    -1
      modules/templates/helper.go
  55. +10
    -0
      modules/util/uuid_util.go
  56. +23
    -5
      options/locale/locale_en-US.ini
  57. +23
    -3
      options/locale/locale_zh-CN.ini
  58. +1
    -1
      package.json
  59. +13
    -2
      public/home/home.js
  60. +3
    -0
      routers/admin/dataset.go
  61. +1
    -0
      routers/api/v1/api.go
  62. +2
    -0
      routers/api/v1/repo/cloudbrain_dashboard.go
  63. +1
    -0
      routers/authentication/wechat.go
  64. +1
    -1
      routers/authentication/wechat_event.go
  65. +5
    -0
      routers/image/image.go
  66. +49
    -24
      routers/repo/ai_model_manage.go
  67. +153
    -11
      routers/repo/cloudbrain.go
  68. +311
    -84
      routers/repo/grampus.go
  69. +158
    -254
      routers/repo/modelarts.go
  70. +24
    -0
      routers/reward/point/account.go
  71. +45
    -0
      routers/reward/point/limit.go
  72. +170
    -0
      routers/reward/point/point.go
  73. +41
    -10
      routers/routes/routes.go
  74. +68
    -0
      routers/task/config.go
  75. +15
    -0
      routers/task/task.go
  76. +2
    -0
      routers/user/setting/profile.go
  77. +1
    -1
      services/phone/phone.go
  78. +50
    -0
      services/reward/admin_operate.go
  79. +145
    -0
      services/reward/cloudbrain_deduct.go
  80. +100
    -0
      services/reward/limiter/config.go
  81. +258
    -0
      services/reward/limiter/limiter.go
  82. +54
    -0
      services/reward/notify.go
  83. +278
    -0
      services/reward/operator.go
  84. +131
    -0
      services/reward/period_task.go
  85. +150
    -0
      services/reward/point/account/point_account.go
  86. +65
    -0
      services/reward/point/point_operate.go
  87. +47
    -0
      services/reward/record.go
  88. +28
    -0
      services/reward/serial.go
  89. +14
    -12
      services/socketwrap/clientManager.go
  90. +50
    -0
      services/task/period/handler.go
  91. +145
    -0
      services/task/task.go
  92. +228
    -0
      services/task/task_config.go
  93. +0
    -0
      services/wechat/auto_reply.go
  94. +14
    -7
      services/wechat/event_handle.go
  95. +8
    -0
      templates/admin/cloudbrain/list.tmpl
  96. +11
    -1
      templates/base/footer.tmpl
  97. +5
    -5
      templates/base/footer_content.tmpl
  98. +5
    -5
      templates/base/footer_content_fluid.tmpl
  99. +4
    -2
      templates/base/head_navbar.tmpl
  100. +2
    -0
      templates/base/head_navbar_fluid.tmpl

+ 101
- 0
models/action.go View File

@@ -60,6 +60,12 @@ const (
ActionCreateGPUTrainTask //31 ActionCreateGPUTrainTask //31
ActionCreateGrampusNPUTrainTask //32 ActionCreateGrampusNPUTrainTask //32
ActionCreateGrampusGPUTrainTask //33 ActionCreateGrampusGPUTrainTask //33
ActionBindWechat //34
ActionDatasetRecommended //35
ActionCreateImage //36
ActionImageRecommend //37
ActionChangeUserAvatar //38

) )


// Action represents user operation type and other information to // Action represents user operation type and other information to
@@ -81,6 +87,19 @@ type Action struct {
IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"`
Content string `xorm:"TEXT"` Content string `xorm:"TEXT"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
Cloudbrain *Cloudbrain `xorm:"-"`
}

type ActionShow struct {
OpType ActionType
TaskType TaskType
RepoLink string
ShortRepoFullDisplayName string
Content string
RefName string
IssueInfos []string
CommentLink string
Cloudbrain *CloudbrainShow4Action
} }


// GetOpType gets the ActionType of this action. // GetOpType gets the ActionType of this action.
@@ -218,6 +237,40 @@ func (a *Action) GetRepoLink() string {
return "/" + a.GetRepoPath() return "/" + a.GetRepoPath()
} }


func (a *Action) ToShow() *ActionShow {
actionShow := &ActionShow{}
actionShow.OpType = a.OpType
actionShow.TaskType = GetTaskTypeFromAction(a.OpType)
actionShow.Content = a.Content
actionShow.RefName = a.RefName

if strings.Contains(a.Content, "|") && a.IsIssueAction() {
actionShow.IssueInfos = a.GetIssueInfos()
}

if a.Repo != nil {
actionShow.RepoLink = a.GetRepoLink()
actionShow.ShortRepoFullDisplayName = a.ShortRepoFullDisplayName()
}
if a.Comment != nil {
actionShow.CommentLink = a.GetCommentLink()
}

if a.Cloudbrain != nil {
c := &CloudbrainShow4Action{
ID: a.Cloudbrain.ID,
JobID: a.Cloudbrain.JobID,
Type: a.Cloudbrain.Type,
JobType: a.Cloudbrain.JobType,
DisplayJobName: a.Cloudbrain.DisplayJobName,
ComputeResource: a.Cloudbrain.ComputeResource,
}
actionShow.Cloudbrain = c
}

return actionShow
}

// GetRepositoryFromMatch returns a *Repository from a username and repo strings // GetRepositoryFromMatch returns a *Repository from a username and repo strings
func GetRepositoryFromMatch(ownerName string, repoName string) (*Repository, error) { func GetRepositoryFromMatch(ownerName string, repoName string) (*Repository, error) {
var err error var err error
@@ -315,6 +368,39 @@ func (a *Action) GetIssueContent() string {
return issue.Content return issue.Content
} }


func (a *Action) IsCloudbrainAction() bool {
switch a.OpType {
case ActionCreateDebugGPUTask,
ActionCreateDebugNPUTask,
ActionCreateTrainTask,
ActionCreateInferenceTask,
ActionCreateBenchMarkTask,
ActionCreateGPUTrainTask,
ActionCreateGrampusNPUTrainTask,
ActionCreateGrampusGPUTrainTask:
return true
}
return false
}

func (a *Action) IsIssueAction() bool {
switch a.OpType {
case ActionCreateIssue,
ActionCloseIssue,
ActionClosePullRequest,
ActionReopenIssue,
ActionReopenPullRequest,
ActionCommentPull,
ActionCommentIssue,
ActionCreatePullRequest,
ActionApprovePullRequest,
ActionRejectPullRequest,
ActionMergePullRequest:
return true
}
return false
}

// GetFeedsOptions options for retrieving feeds // GetFeedsOptions options for retrieving feeds
type GetFeedsOptions struct { type GetFeedsOptions struct {
RequestedUser *User // the user we want activity for RequestedUser *User // the user we want activity for
@@ -404,3 +490,18 @@ func GetUnTransformedActions() ([]*Action, error) {
Find(&actions) Find(&actions)
return actions, err return actions, err
} }

func GetActionByIds(ids []int64) ([]*Action, error) {
if len(ids) == 0 {
return nil, nil
}
actions := make([]*Action, 0)
err := x.In("id", ids).Find(&actions)
if err != nil {
return nil, err
}
if err := ActionList(actions).LoadAllAttributes(); err != nil {
return nil, fmt.Errorf("ActionList loadAttributes: %v", err)
}
return actions, nil
}

+ 165
- 1
models/action_list.go View File

@@ -4,7 +4,11 @@


package models package models


import "fmt"
import (
"fmt"
"strconv"
"xorm.io/builder"
)


// ActionList defines a list of actions // ActionList defines a list of actions
type ActionList []*Action type ActionList []*Action
@@ -26,6 +30,9 @@ func (actions ActionList) loadUsers(e Engine) ([]*User, error) {


userIDs := actions.getUserIDs() userIDs := actions.getUserIDs()
userMaps := make(map[int64]*User, len(userIDs)) userMaps := make(map[int64]*User, len(userIDs))
if len(userIDs) == 0 {
return make([]*User, 0), nil
}
err := e. err := e.
In("id", userIDs). In("id", userIDs).
Find(&userMaps) Find(&userMaps)
@@ -61,6 +68,9 @@ func (actions ActionList) loadRepositories(e Engine) ([]*Repository, error) {


repoIDs := actions.getRepoIDs() repoIDs := actions.getRepoIDs()
repoMaps := make(map[int64]*Repository, len(repoIDs)) repoMaps := make(map[int64]*Repository, len(repoIDs))
if len(repoIDs) == 0 {
return make([]*Repository, 0), nil
}
err := e. err := e.
In("id", repoIDs). In("id", repoIDs).
Find(&repoMaps) Find(&repoMaps)
@@ -79,6 +89,133 @@ func (actions ActionList) LoadRepositories() ([]*Repository, error) {
return actions.loadRepositories(x) return actions.loadRepositories(x)
} }


func (actions ActionList) getCommentIDs() []int64 {
commentIDs := make(map[int64]struct{}, len(actions))
for _, action := range actions {
if action.CommentID == 0 {
continue
}
if _, ok := commentIDs[action.CommentID]; !ok {
commentIDs[action.CommentID] = struct{}{}
}
}
return keysInt64(commentIDs)
}

func (actions ActionList) loadComments(e Engine) ([]*Comment, error) {
if len(actions) == 0 {
return nil, nil
}

commentIDs := actions.getCommentIDs()

commentMaps := make(map[int64]*Comment, len(commentIDs))
if len(commentIDs) == 0 {
return make([]*Comment, 0), nil
}
err := e.
In("id", commentIDs).
Find(&commentMaps)
if err != nil {
return nil, fmt.Errorf("find comment: %v", err)
}

for _, action := range actions {
if action.CommentID > 0 {
action.Comment = commentMaps[action.CommentID]
}
}
return valuesComment(commentMaps), nil
}

// LoadComments loads actions' all comments
func (actions ActionList) LoadComments() ([]*Comment, error) {
return actions.loadComments(x)
}

func (actions ActionList) getCloudbrainIDs() []int64 {
cloudbrainIDs := make(map[int64]struct{}, 0)
for _, action := range actions {
if !action.IsCloudbrainAction() {
continue
}
cloudbrainId, _ := strconv.ParseInt(action.Content, 10, 64)
if _, ok := cloudbrainIDs[cloudbrainId]; !ok {
cloudbrainIDs[cloudbrainId] = struct{}{}
}
}
return keysInt64(cloudbrainIDs)
}

func (actions ActionList) getCloudbrainJobIDs() []string {
cloudbrainJobIDs := make(map[string]struct{}, 0)
for _, action := range actions {
if !action.IsCloudbrainAction() {
continue
}
if _, ok := cloudbrainJobIDs[action.Content]; !ok {
cloudbrainJobIDs[action.Content] = struct{}{}
}
}
return keysString(cloudbrainJobIDs)
}

func (actions ActionList) loadCloudbrains(e Engine) ([]*Cloudbrain, error) {
if len(actions) == 0 {
return nil, nil
}
cloudbrainIDs := actions.getCloudbrainIDs()
cloudbrainJobIDs := actions.getCloudbrainJobIDs()

cloudbrainMaps := make(map[int64]*Cloudbrain, len(cloudbrainIDs))
if len(cloudbrainIDs) == 0 {
return make([]*Cloudbrain, 0), nil
}
//由于各个类型的云脑任务在发布action的时候,content字段保存的ID含义不同,部分取的是ID,部分取的是jobId
//所以在查询action对应的cloudbrain对象时,以这两个字段做为条件查询
cond := builder.Or(builder.In("id", cloudbrainIDs)).Or(builder.In("job_id", cloudbrainJobIDs))
err := e.
Where(cond).Unscoped().
Find(&cloudbrainMaps)
if err != nil {
return nil, fmt.Errorf("find cloudbrain: %v", err)
}

cloudBrainJobIdMap := make(map[string]*Cloudbrain, len(cloudbrainIDs))
for _, v := range cloudbrainMaps {
cloudBrainJobIdMap[v.JobID] = v
}

for _, action := range actions {
if !action.IsCloudbrainAction() {
continue
}
cloudbrainId, _ := strconv.ParseInt(action.Content, 10, 64)
if cloudbrainId > 0 {
if c, ok := cloudbrainMaps[cloudbrainId]; ok {
if c.DisplayJobName == action.RefName || c.JobName == action.RefName {
action.Cloudbrain = c
continue
}

}
}
if c, ok := cloudBrainJobIdMap[action.Content]; ok {
if c.DisplayJobName == action.RefName || c.JobName == action.RefName {
action.Cloudbrain = c
continue
}

}
}
return valuesCloudbrain(cloudbrainMaps), nil
}

// LoadComments loads actions' all comments
func (actions ActionList) LoadCloudbrains() ([]*Comment, error) {
return actions.loadComments(x)
}

// loadAttributes loads all attributes // loadAttributes loads all attributes
func (actions ActionList) loadAttributes(e Engine) (err error) { func (actions ActionList) loadAttributes(e Engine) (err error) {
if _, err = actions.loadUsers(e); err != nil { if _, err = actions.loadUsers(e); err != nil {
@@ -96,3 +233,30 @@ func (actions ActionList) loadAttributes(e Engine) (err error) {
func (actions ActionList) LoadAttributes() error { func (actions ActionList) LoadAttributes() error {
return actions.loadAttributes(x) return actions.loadAttributes(x)
} }

// LoadAllAttributes loads all attributes of the actions
// compare with LoadAttributes() ,LoadAllAttributes() loads Comment and Cloudbrain attribute
func (actions ActionList) LoadAllAttributes() error {
return actions.loadAllAttributes(x)
}

// loadAllAttributes
func (actions ActionList) loadAllAttributes(e Engine) (err error) {
if _, err = actions.loadUsers(e); err != nil {
return
}

if _, err = actions.loadRepositories(e); err != nil {
return
}

if _, err = actions.loadComments(e); err != nil {
return
}

if _, err = actions.loadCloudbrains(e); err != nil {
return
}

return nil
}

+ 24
- 0
models/ai_model_manage.go View File

@@ -25,6 +25,7 @@ type AiModelManage struct {
DownloadCount int `xorm:"NOT NULL DEFAULT 0"` DownloadCount int `xorm:"NOT NULL DEFAULT 0"`
Engine int64 `xorm:"NOT NULL DEFAULT 0"` Engine int64 `xorm:"NOT NULL DEFAULT 0"`
Status int `xorm:"NOT NULL DEFAULT 0"` Status int `xorm:"NOT NULL DEFAULT 0"`
StatusDesc string `xorm:"varchar(500)"`
Accuracy string `xorm:"varchar(1000)"` Accuracy string `xorm:"varchar(1000)"`
AttachmentId string `xorm:"NULL"` AttachmentId string `xorm:"NULL"`
RepoId int64 `xorm:"INDEX NULL"` RepoId int64 `xorm:"INDEX NULL"`
@@ -286,6 +287,23 @@ func ModifyModelDescription(id string, description string) error {
return nil return nil
} }


func ModifyModelStatus(id string, modelSize int64, status int, modelPath string, statusDesc string) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("size", "status", "path", "status_desc").Update(&AiModelManage{
Size: modelSize,
Status: status,
Path: modelPath,
StatusDesc: statusDesc,
})
if err != nil {
return err
}
log.Info("success to update ModelStatus from db.re=" + fmt.Sprint((re)))
return nil
}

func ModifyModelNewProperty(id string, new int, versioncount int) error { func ModifyModelNewProperty(id string, new int, versioncount int) error {
var sess *xorm.Session var sess *xorm.Session
sess = x.ID(id) sess = x.ID(id)
@@ -356,6 +374,12 @@ func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) {
) )
} }


if (opts.Status) >= 0 {
cond = cond.And(
builder.Eq{"ai_model_manage.status": opts.Status},
)
}

count, err := sess.Where(cond).Count(new(AiModelManage)) count, err := sess.Where(cond).Count(new(AiModelManage))
if err != nil { if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err) return nil, 0, fmt.Errorf("Count: %v", err)


+ 8
- 0
models/attachment.go View File

@@ -701,3 +701,11 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {


return attachments, count, nil return attachments, count, nil
} }

func GetAllDatasetContributorByDatasetId(datasetId int64) ([]*User, error) {
r := make([]*User, 0)
if err := x.Select("distinct(public.user.*)").Table("attachment").Join("LEFT", "user", "public.user.ID = attachment.uploader_id").Where("attachment.dataset_id = ?", datasetId).Find(&r); err != nil {
return nil, err
}
return r, nil
}

+ 122
- 21
models/cloudbrain.go View File

@@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"path"
"strconv" "strconv"
"strings" "strings"
"time" "time"
@@ -101,7 +102,8 @@ const (
ModelArtsTrainJobCheckRunningCompleted ModelArtsJobStatus = "CHECK_RUNNING_COMPLETED" //审核作业已经完成 ModelArtsTrainJobCheckRunningCompleted ModelArtsJobStatus = "CHECK_RUNNING_COMPLETED" //审核作业已经完成
ModelArtsTrainJobCheckFailed ModelArtsJobStatus = "CHECK_FAILED" //审核作业失败 ModelArtsTrainJobCheckFailed ModelArtsJobStatus = "CHECK_FAILED" //审核作业失败


DURATION_STR_ZERO = "00:00:00"
DURATION_STR_ZERO = "00:00:00"
CloudbrainKeyDuration = 24 * time.Hour


//grampus //grampus
GrampusStatusPending = "pending" GrampusStatusPending = "pending"
@@ -187,6 +189,7 @@ type Cloudbrain struct {
ModelName string //模型名称 ModelName string //模型名称
ModelVersion string //模型版本 ModelVersion string //模型版本
CkptName string //权重文件名称 CkptName string //权重文件名称
PreTrainModelUrl string //预训练模型地址
ResultUrl string //推理结果的obs路径 ResultUrl string //推理结果的obs路径


User *User `xorm:"-"` User *User `xorm:"-"`
@@ -199,6 +202,51 @@ type Cloudbrain struct {
Spec *Specification `xorm:"-"` Spec *Specification `xorm:"-"`
} }


type CloudbrainShow struct {
ID int64
JobID string
RepoFullName string
Type int
JobType string
DisplayJobName string
Duration string
ResourceSpec *Specification
ComputeResource string
AiCenter string
WorkServerNumber int
}

type CloudbrainShow4Action struct {
ID int64
JobID string
Type int
JobType string
DisplayJobName string
ComputeResource string
}

func (task *Cloudbrain) ToShow() *CloudbrainShow {
n := 1
if task.WorkServerNumber > 1 {
n = task.WorkServerNumber
}
c := &CloudbrainShow{
ID: task.ID,
JobID: task.JobID,
JobType: task.JobType,
Type: task.Type,
DisplayJobName: task.DisplayJobName,
Duration: task.TrainJobDuration,
ResourceSpec: task.Spec,
ComputeResource: task.ComputeResource,
WorkServerNumber: n,
}
if task.Repo != nil {
c.RepoFullName = task.Repo.FullName()
}
return c
}

func (task *Cloudbrain) ComputeAndSetDuration() { func (task *Cloudbrain) ComputeAndSetDuration() {
var d int64 var d int64
if task.StartTime == 0 { if task.StartTime == 0 {
@@ -239,7 +287,7 @@ func (task *Cloudbrain) IsRunning() bool {
} }


func ConvertDurationToStr(duration int64) string { func ConvertDurationToStr(duration int64) string {
if duration == 0 {
if duration <= 0 {
return DURATION_STR_ZERO return DURATION_STR_ZERO
} }
return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60) return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60)
@@ -596,11 +644,23 @@ type ResourceSpecs struct {
} }


type ResourceSpec struct { type ResourceSpec struct {
Id int `json:"id"`
CpuNum int `json:"cpu"`
GpuNum int `json:"gpu"`
MemMiB int `json:"memMiB"`
ShareMemMiB int `json:"shareMemMiB"`
Id int `json:"id"`
CpuNum int `json:"cpu"`
GpuNum int `json:"gpu"`
MemMiB int `json:"memMiB"`
ShareMemMiB int `json:"shareMemMiB"`
UnitPrice int64 `json:"unitPrice"`
}

type FlavorInfos struct {
FlavorInfo []*FlavorInfo `json:"flavor_info"`
}

type FlavorInfo struct {
Id int `json:"id"`
Value string `json:"value"`
Desc string `json:"desc"`
UnitPrice int64 `json:"unitPrice"`
} }


type SpecialPools struct { type SpecialPools struct {
@@ -1420,14 +1480,23 @@ type GrampusStopJobResponse struct {
} }


type GrampusTasks struct { type GrampusTasks struct {
Command string `json:"command"`
Name string `json:"name"`
ImageId string `json:"imageId"`
ResourceSpecId string `json:"resourceSpecId"`
ImageUrl string `json:"imageUrl"`
CenterID []string `json:"centerID"`
CenterName []string `json:"centerName"`
ReplicaNum int `json:"replicaNum"`
Command string `json:"command"`
Name string `json:"name"`
ImageId string `json:"imageId"`
ResourceSpecId string `json:"resourceSpecId"`
ImageUrl string `json:"imageUrl"`
CenterID []string `json:"centerID"`
CenterName []string `json:"centerName"`
ReplicaNum int `json:"replicaNum"`
Datasets []GrampusDataset `json:"datasets"`
Models []GrampusDataset `json:"models"`
}

type GrampusDataset struct {
Name string `json:"name"`
Bucket string `json:"bucket"`
EndPoint string `json:"endPoint"`
ObjectKey string `json:"objectKey"`
} }


type CreateGrampusJobRequest struct { type CreateGrampusJobRequest struct {
@@ -2220,12 +2289,34 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er
return cloudbrains, count, nil return cloudbrains, count, nil
} }


func GetStartedCloudbrainTaskByUpdatedUnix(startTime, endTime time.Time) ([]Cloudbrain, error) {
r := make([]Cloudbrain, 0)
err := x.Where("updated_unix >= ? and updated_unix <= ? and start_time > 0", startTime.Unix(), endTime.Unix()).Unscoped().Find(&r)
if err != nil {
return nil, err
}
return r, nil
}

func GetCloudbrainByIds(ids []int64) ([]*Cloudbrain, error) {
if len(ids) == 0 {
return nil, nil
}
cloudbrains := make([]*Cloudbrain, 0)
err := x.In("id", ids).Unscoped().Find(&cloudbrains)
if err != nil {
return nil, err
}
return cloudbrains, nil
}

type DatasetInfo struct { type DatasetInfo struct {
DataLocalPath string DataLocalPath string
Name string Name string
FullName string
} }


func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) {
func GetDatasetInfo(uuidStr string, grampusType ...string) (map[string]DatasetInfo, string, error) {
var datasetNames string var datasetNames string
uuids := strings.Split(uuidStr, ";") uuids := strings.Split(uuidStr, ";")
if len(uuids) > setting.MaxDatasetNum { if len(uuids) > setting.MaxDatasetNum {
@@ -2258,16 +2349,26 @@ func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) {
return nil, datasetNames, errors.New("the dataset name is same") return nil, datasetNames, errors.New("the dataset name is same")
} }
} }
var dataLocalPath string
if len(grampusType) > 0 {
if grampusType[0] == GPU {
dataLocalPath = setting.Attachment.Minio.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID
} else {
dataLocalPath = setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + "/"
}


dataLocalPath := setting.Attachment.Minio.RealPath +
setting.Attachment.Minio.Bucket + "/" +
setting.Attachment.Minio.BasePath +
AttachmentRelativePath(attach.UUID) +
attach.UUID
} else {
dataLocalPath = setting.Attachment.Minio.RealPath +
setting.Attachment.Minio.Bucket + "/" +
setting.Attachment.Minio.BasePath +
AttachmentRelativePath(attach.UUID) +
attach.UUID
}


datasetInfos[attach.UUID] = DatasetInfo{ datasetInfos[attach.UUID] = DatasetInfo{
DataLocalPath: dataLocalPath, DataLocalPath: dataLocalPath,
Name: fileName, Name: fileName,
FullName: attach.Name,
} }
if i == 0 { if i == 0 {
datasetNames = attach.Name datasetNames = attach.Name


+ 27
- 0
models/cloudbrain_spec.go View File

@@ -72,6 +72,8 @@ func NewCloudBrainSpec(cloudbrainId int64, s Specification) CloudbrainSpec {
} }
} }


var StatusChangeChan = make(chan *Cloudbrain, 50)

func InsertCloudbrainSpec(c CloudbrainSpec) (int64, error) { func InsertCloudbrainSpec(c CloudbrainSpec) (int64, error) {
return x.Insert(&c) return x.Insert(&c)
} }
@@ -107,3 +109,28 @@ func CountNoSpecHistoricTask() (int64, error) {
} }
return n, nil return n, nil
} }

// GetResourceSpecMapByCloudbrainIDs
func GetResourceSpecMapByCloudbrainIDs(ids []int64) (map[int64]*Specification, error) {
specs := make([]*CloudbrainSpec, 0)
if err := x.In("cloudbrain_id", ids).Find(&specs); err != nil {
return nil, err
}
r := make(map[int64]*Specification, len(ids))
for _, s := range specs {
r[s.CloudbrainID] = s.ConvertToSpecification()
}
return r, nil
}

func GetCloudbrainTaskUnitPrice(task Cloudbrain) (int, error) {
s, err := GetCloudbrainSpecByID(task.ID)
if err != nil {
return 0, err
}
var n = 1
if task.WorkServerNumber > 1 {
n = task.WorkServerNumber
}
return s.UnitPrice * n, nil
}

+ 1
- 0
models/cloudbrain_static.go View File

@@ -34,6 +34,7 @@ type TaskDetail struct {
CardDuration string `json:"CardDuration"` CardDuration string `json:"CardDuration"`
AiCenter string `json:"AiCenter"` AiCenter string `json:"AiCenter"`
FlavorName string `json:"FlavorName"` FlavorName string `json:"FlavorName"`
Spec *Specification `json:"Spec"`
} }


func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) { func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) {


+ 24
- 0
models/error.go View File

@@ -2012,3 +2012,27 @@ func IsErrTagNotExist(err error) bool {
_, ok := err.(ErrTagNotExist) _, ok := err.(ErrTagNotExist)
return ok return ok
} }

type ErrRecordNotExist struct {
}

func IsErrRecordNotExist(err error) bool {
_, ok := err.(ErrRecordNotExist)
return ok
}

func (err ErrRecordNotExist) Error() string {
return fmt.Sprintf("record not exist in database")
}

type ErrInsufficientPointsBalance struct {
}

func IsErrInsufficientPointsBalance(err error) bool {
_, ok := err.(ErrInsufficientPointsBalance)
return ok
}

func (err ErrInsufficientPointsBalance) Error() string {
return fmt.Sprintf("Insufficient points balance")
}

+ 22
- 0
models/helper.go View File

@@ -11,6 +11,13 @@ func keysInt64(m map[int64]struct{}) []int64 {
} }
return keys return keys
} }
func keysString(m map[string]struct{}) []string {
var keys = make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
return keys
}


func valuesRepository(m map[int64]*Repository) []*Repository { func valuesRepository(m map[int64]*Repository) []*Repository {
var values = make([]*Repository, 0, len(m)) var values = make([]*Repository, 0, len(m))
@@ -27,3 +34,18 @@ func valuesUser(m map[int64]*User) []*User {
} }
return values return values
} }

func valuesComment(m map[int64]*Comment) []*Comment {
var values = make([]*Comment, 0, len(m))
for _, v := range m {
values = append(values, v)
}
return values
}
func valuesCloudbrain(m map[int64]*Cloudbrain) []*Cloudbrain {
var values = make([]*Cloudbrain, 0, len(m))
for _, v := range m {
values = append(values, v)
}
return values
}

+ 184
- 0
models/limit_config.go View File

@@ -0,0 +1,184 @@
package models

import (
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
)

type LimitType string

const (
LimitTypeTask LimitType = "TASK"
LimitTypeRewardPoint LimitType = "REWARD_POINT"
)

func (l LimitType) Name() string {
switch l {
case LimitTypeTask:
return "TASK"
case LimitTypeRewardPoint:
return "REWARD_POINT"
default:
return ""
}
}

type LimitScope string

const (
LimitScopeAllUsers LimitScope = "ALL_USERS"
LimitScopeSingleUser LimitScope = "SINGLE_USER"
)

func (l LimitScope) Name() string {
switch l {
case LimitScopeAllUsers:
return "ALL_USERS"
case LimitScopeSingleUser:
return "SINGLE_USER"
default:
return ""
}
}

type LimiterRejectPolicy string

const (
JustReject LimiterRejectPolicy = "JUST_REJECT"
PermittedOnce LimiterRejectPolicy = "PERMITTED_ONCE"
FillUp LimiterRejectPolicy = "FillUp"
)

type LimitConfig struct {
ID int64 `xorm:"pk autoincr"`
Title string
RefreshRate string `xorm:"NOT NULL"`
Scope string `xorm:"NOT NULL"`
LimitNum int64 `xorm:"NOT NULL"`
LimitCode string
LimitType string `xorm:"NOT NULL"`
RelatedId int64 `xorm:"INDEX"`
CreatorId int64 `xorm:"NOT NULL"`
CreatorName string
DeleterId int64
DeleterName string
CreatedUnix timeutil.TimeStamp `xorm:"created"`
DeletedAt timeutil.TimeStamp `xorm:"deleted"`
}

type LimitConfigQueryOpts struct {
RefreshRate string
Scope LimitScope
LimitCode string
LimitType LimitType
}

type LimitConfigVO struct {
ID int64
Title string
RefreshRate string
Scope string
LimitNum int64
LimitCode string
LimitType string
Creator string
CreatedUnix timeutil.TimeStamp
}

func (l *LimitConfig) ToLimitConfigVO() *LimitConfigVO {
return &LimitConfigVO{
ID: l.ID,
Title: l.Title,
RefreshRate: l.RefreshRate,
Scope: l.Scope,
LimitNum: l.LimitNum,
LimitCode: l.LimitCode,
LimitType: l.LimitType,
Creator: l.CreatorName,
CreatedUnix: l.CreatedUnix,
}
}

func GetLimitConfigByLimitType(limitType LimitType) ([]LimitConfig, error) {
r := make([]LimitConfig, 0)
err := x.Where(" limit_type = ?", limitType.Name()).Find(&r)
if err != nil {
return nil, err
} else if len(r) == 0 {
return nil, ErrRecordNotExist{}
}
return r, nil
}

func GetLimitersByRelatedIdWithDeleted(limitType LimitType) ([]LimitConfig, error) {
r := make([]LimitConfig, 0)
err := x.Unscoped().Where(" = ?", limitType.Name()).Find(&r)
if err != nil {
return nil, err
} else if len(r) == 0 {
return nil, ErrRecordNotExist{}
}
return r, nil
}

func AddLimitConfig(l *LimitConfig) error {
sess := x.NewSession()
defer sess.Close()

//delete old limit config
cond := builder.NewCond()
cond = cond.And(builder.Eq{"limit_type": l.LimitType})
cond = cond.And(builder.Eq{"scope": l.Scope})
if l.LimitCode == "" {
subCond := builder.NewCond()
subCond = subCond.Or(builder.IsNull{"limit_code"})
subCond = subCond.Or(builder.Eq{"limit_code": ""})
cond = cond.And(subCond)
} else {
cond = cond.And(builder.Eq{"limit_code": l.LimitCode})
}
_, err := sess.Where(cond).Delete(&LimitConfig{})
if err != nil {
sess.Rollback()
return err
}

//add new config
_, err = sess.Insert(l)
if err != nil {
sess.Rollback()
return err
}

sess.Commit()
return nil
}

func DeleteLimitConfig(config LimitConfig, deleterId int64, deleterName string) error {
sess := x.NewSession()
defer sess.Close()

_, err := x.ID(config.ID).Update(&LimitConfig{DeleterName: deleterName, DeleterId: deleterId})
if err != nil {
sess.Rollback()
return err
}
_, err = x.ID(config.ID).Delete(&LimitConfig{})
if err != nil {
sess.Rollback()
return err
}
sess.Commit()
return nil
}

func GetLimitConfigById(id int64) (*LimitConfig, error) {
r := &LimitConfig{}
isOk, err := x.ID(id).Get(r)
if err != nil {
return nil, err
} else if !isOk {
return nil, nil
}
return r, nil
}

+ 8
- 0
models/models.go View File

@@ -144,6 +144,14 @@ func init() {
new(WechatBindLog), new(WechatBindLog),
new(OrgStatistic), new(OrgStatistic),
new(SearchRecord), new(SearchRecord),
new(TaskConfig),
new(TaskAccomplishLog),
new(RewardOperateRecord),
new(LimitConfig),
new(RewardPeriodicTask),
new(PointAccountLog),
new(PointAccount),
new(RewardAdminLog),
new(AiModelConvert), new(AiModelConvert),
new(ResourceQueue), new(ResourceQueue),
new(ResourceSpecification), new(ResourceSpecification),


+ 142
- 0
models/point_account.go View File

@@ -0,0 +1,142 @@
package models

import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
)

type PointAccountStatus int

// Possible PointAccountStatus types.
const (
PointAccountNormal int = iota + 1 // 1
PointAccountFreeze // 2
PointAccountDeleted // 3
)

type PointAccount struct {
ID int64 `xorm:"pk autoincr"`
AccountCode string `xorm:"INDEX NOT NULL"`
Balance int64 `xorm:"NOT NULL DEFAULT 0"`
TotalEarned int64 `xorm:"NOT NULL DEFAULT 0"`
TotalConsumed int64 `xorm:"NOT NULL DEFAULT 0"`
UserId int64 `xorm:"INDEX NOT NULL"`
Status int `xorm:"NOT NULL"`
Version int64 `xorm:"NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

func (account *PointAccount) Increase(amount int64, sourceId string) error {
sess := x.NewSession()
defer sess.Close()
sql := "update point_account set balance = balance + ?,total_earned = total_earned + ? ,version = version + 1 where account_code = ? "
_, err := sess.Exec(sql, amount, amount, account.AccountCode)
if err != nil {
sess.Rollback()
return err
}
accountLog := &PointAccountLog{
AccountCode: account.AccountCode,
UserId: account.UserId,
Type: IncreaseAccountBalance,
SourceId: sourceId,
PointsAmount: amount,
BalanceBefore: account.Balance,
BalanceAfter: account.Balance + amount,
AccountVersion: account.Version,
}
_, err = sess.Insert(accountLog)
if err != nil {
sess.Rollback()
return err
}
sess.Commit()
return nil
}

func (account *PointAccount) Decrease(amount int64, sourceId string) error {
sess := x.NewSession()
defer sess.Close()
sql := "update point_account set balance = balance - ?,total_consumed = total_consumed + ? ,version = version + 1 where account_code = ? "
_, err := sess.Exec(sql, amount, amount, account.AccountCode)
if err != nil {
sess.Rollback()
return err
}
accountLog := &PointAccountLog{
AccountCode: account.AccountCode,
UserId: account.UserId,
Type: DecreaseAccountBalance,
SourceId: sourceId,
PointsAmount: amount,
BalanceBefore: account.Balance,
BalanceAfter: account.Balance - amount,
AccountVersion: account.Version,
}
_, err = sess.Insert(accountLog)
if err != nil {
sess.Rollback()
return err
}
sess.Commit()
return nil
}

func GetAccountByUserId(userId int64) (*PointAccount, error) {
p := &PointAccount{}
has, err := x.Where("user_id = ?", userId).Get(p)
if err != nil {
return nil, err
}
if !has {
return nil, ErrRecordNotExist{}
}
return p, nil
}

func InsertAccount(tl *PointAccount) (int64, error) {
return x.Insert(tl)
}

type SearchPointAccountOpts struct {
ListOptions
Keyword string
}

type SearchPointAccountResponse struct {
Records []*UserPointAccount
PageSize int
Page int
Total int64
}

type UserPointAccount struct {
UserId int64
UserName string
Email string
Balance int64
TotalEarned int64
TotalConsumed int64
}

func (UserPointAccount) TableName() string {
return "user"
}

func GetPointAccountMapByUserIds(userIds []int64) (map[int64]*PointAccount, error) {
if len(userIds) == 0 {
return make(map[int64]*PointAccount, 0), nil
}
accounts := make([]*PointAccount, 0)
err := x.In("user_id", userIds).Find(&accounts)
if err != nil {
log.Error("GetPointAccountMapByUserIds error.%v", err)
return nil, err
}
accountMap := make(map[int64]*PointAccount, 0)
for _, v := range accounts {
accountMap[v.UserId] = v
}
return accountMap, nil
}

+ 21
- 0
models/point_account_log.go View File

@@ -0,0 +1,21 @@
package models

import "code.gitea.io/gitea/modules/timeutil"

const (
IncreaseAccountBalance = "increase"
DecreaseAccountBalance = "decrease"
)

type PointAccountLog struct {
ID int64 `xorm:"pk autoincr"`
AccountCode string `xorm:"INDEX NOT NULL"`
UserId int64 `xorm:"INDEX NOT NULL"`
Type string `xorm:"NOT NULL"`
SourceId string `xorm:"INDEX NOT NULL"`
PointsAmount int64 `xorm:"NOT NULL"`
BalanceBefore int64 `xorm:"NOT NULL"`
BalanceAfter int64 `xorm:"NOT NULL"`
AccountVersion int64 `xorm:"NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
}

+ 6
- 0
models/repo.go View File

@@ -237,6 +237,12 @@ type Repository struct {
LowerAlias string `xorm:"INDEX"` LowerAlias string `xorm:"INDEX"`
} }


type RepositoryShow struct {
Name string
RepoType RepoType
Alias string
}

// SanitizedOriginalURL returns a sanitized OriginalURL // SanitizedOriginalURL returns a sanitized OriginalURL
func (repo *Repository) SanitizedOriginalURL() string { func (repo *Repository) SanitizedOriginalURL() string {
if repo.OriginalURL == "" { if repo.OriginalURL == "" {


+ 19
- 2
models/repo_watch.go View File

@@ -25,6 +25,7 @@ const (
) )


var ActionChan = make(chan *Action, 200) var ActionChan = make(chan *Action, 200)
var ActionChan4Task = make(chan Action, 200)


// Watch is connection request for receiving repository notification. // Watch is connection request for receiving repository notification.
type Watch struct { type Watch struct {
@@ -182,6 +183,7 @@ func notifyWatchers(e Engine, actions ...*Action) error {
var permCode []bool var permCode []bool
var permIssue []bool var permIssue []bool
var permPR []bool var permPR []bool
var permDataset []bool


for _, act := range actions { for _, act := range actions {
repoChanged := repo == nil || repo.ID != act.RepoID repoChanged := repo == nil || repo.ID != act.RepoID
@@ -199,6 +201,14 @@ func notifyWatchers(e Engine, actions ...*Action) error {
if _, err = e.InsertOne(act); err != nil { if _, err = e.InsertOne(act); err != nil {
return fmt.Errorf("insert new actioner: %v", err) return fmt.Errorf("insert new actioner: %v", err)
} }
// After InsertOne(act),the act has ID
// Send the act to task chan
ActionChan4Task <- *act

// If it has nothing to do with repo, return directly
if act.Repo == nil && act.RepoID == 0 {
return nil
}


if repoChanged { if repoChanged {
act.loadRepo() act.loadRepo()
@@ -225,12 +235,14 @@ func notifyWatchers(e Engine, actions ...*Action) error {
permCode = make([]bool, len(watchers)) permCode = make([]bool, len(watchers))
permIssue = make([]bool, len(watchers)) permIssue = make([]bool, len(watchers))
permPR = make([]bool, len(watchers)) permPR = make([]bool, len(watchers))
permDataset = make([]bool, len(watchers))
for i, watcher := range watchers { for i, watcher := range watchers {
user, err := getUserByID(e, watcher.UserID) user, err := getUserByID(e, watcher.UserID)
if err != nil { if err != nil {
permCode[i] = false permCode[i] = false
permIssue[i] = false permIssue[i] = false
permPR[i] = false permPR[i] = false
permDataset[i] = false
continue continue
} }
perm, err := getUserRepoPermission(e, repo, user) perm, err := getUserRepoPermission(e, repo, user)
@@ -238,11 +250,13 @@ func notifyWatchers(e Engine, actions ...*Action) error {
permCode[i] = false permCode[i] = false
permIssue[i] = false permIssue[i] = false
permPR[i] = false permPR[i] = false
permDataset[i] = false
continue continue
} }
permCode[i] = perm.CanRead(UnitTypeCode) permCode[i] = perm.CanRead(UnitTypeCode)
permIssue[i] = perm.CanRead(UnitTypeIssues) permIssue[i] = perm.CanRead(UnitTypeIssues)
permPR[i] = perm.CanRead(UnitTypePullRequests) permPR[i] = perm.CanRead(UnitTypePullRequests)
permDataset[i] = perm.CanRead(UnitTypeDatasets)
} }
} }


@@ -267,6 +281,10 @@ func notifyWatchers(e Engine, actions ...*Action) error {
if !permPR[i] { if !permPR[i] {
continue continue
} }
case ActionDatasetRecommended:
if !permDataset[i] {
continue
}
} }


if _, err = e.InsertOne(act); err != nil { if _, err = e.InsertOne(act); err != nil {
@@ -279,7 +297,6 @@ func notifyWatchers(e Engine, actions ...*Action) error {


// NotifyWatchers creates batch of actions for every watcher. // NotifyWatchers creates batch of actions for every watcher.
func NotifyWatchers(actions ...*Action) error { func NotifyWatchers(actions ...*Action) error {

error := notifyWatchers(x, actions...) error := notifyWatchers(x, actions...)
producer(actions...) producer(actions...)
return error return error
@@ -287,7 +304,7 @@ func NotifyWatchers(actions ...*Action) error {


func producer(actions ...*Action) { func producer(actions ...*Action) {
for _, action := range actions { for _, action := range actions {
if !action.IsPrivate{
if !action.IsPrivate {
ActionChan <- action ActionChan <- action
} }
} }


+ 79
- 0
models/reward_admin_log.go View File

@@ -0,0 +1,79 @@
package models

import (
"code.gitea.io/gitea/modules/timeutil"
)

const (
RewardAdminLogProcessing = 1
RewardAdminLogSuccess = 2
RewardAdminLogFailed = 3
)

type RewardAdminLog struct {
ID int64 `xorm:"pk autoincr"`
LogId string `xorm:"INDEX NOT NULL"`
Amount int64 `xorm:"NOT NULL"`
RewardType string
Remark string
Status int
TargetUserId int64 `xorm:"INDEX NOT NULL"`
CreatorId int64 `xorm:"NOT NULL"`
CreatorName string
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
}

func (r *RewardAdminLog) ToShow() *RewardAdminLogShow {
return &RewardAdminLogShow{
CreatorName: r.CreatorName,
}
}

type RewardAdminLogShow struct {
CreatorName string
}

type AdminLogAndUser struct {
AdminRewardAdminLog RewardAdminLog `xorm:"extends"`
User User `xorm:"extends"`
}

func getRewardAdminLog(ra *RewardAdminLog) (*RewardAdminLog, error) {
has, err := x.Get(ra)
if err != nil {
return nil, err
} else if !has {
return nil, ErrRecordNotExist{}
}
return ra, nil
}

func InsertRewardAdminLog(ra *RewardAdminLog) (int64, error) {
return x.Insert(ra)
}

func UpdateRewardAdminLogStatus(logId string, oldStatus, newStatus int) error {
_, err := x.Where("log_id = ? and status = ?", logId, oldStatus).Update(&RewardAdminLog{Status: newStatus})
if err != nil {
return err
}
return nil
}

func GetRewardAdminLogByLogIds(logIds []string) ([]*RewardAdminLog, error) {
if len(logIds) == 0 {
return nil, nil
}
adminLogs := make([]*AdminLogAndUser, 0)
err := x.Table("reward_admin_log").Join("LEFT", "user", "reward_admin_log.creator_id = public.user.id").In("reward_admin_log.log_id", logIds).Find(&adminLogs)
if err != nil {
return nil, err
}
r := make([]*RewardAdminLog, len(adminLogs))
for i, v := range adminLogs {
temp := &v.AdminRewardAdminLog
temp.CreatorName = v.User.Name
r[i] = temp
}
return r, nil
}

+ 478
- 0
models/reward_operate_record.go View File

@@ -0,0 +1,478 @@
package models

import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"strconv"
"strings"
"xorm.io/builder"
)

type SourceType string

const (
SourceTypeAccomplishTask SourceType = "ACCOMPLISH_TASK"
SourceTypeAdminOperate SourceType = "ADMIN_OPERATE"
SourceTypeRunCloudbrainTask SourceType = "RUN_CLOUDBRAIN_TASK"
)

func (r SourceType) Name() string {
switch r {
case SourceTypeAccomplishTask:
return "ACCOMPLISH_TASK"
case SourceTypeAdminOperate:
return "ADMIN_OPERATE"
case SourceTypeRunCloudbrainTask:
return "RUN_CLOUDBRAIN_TASK"
default:
return ""
}
}

type RewardType string

const (
RewardTypePoint RewardType = "POINT"
)

func (r RewardType) Name() string {
switch r {
case RewardTypePoint:
return "POINT"
default:
return ""
}
}
func (r RewardType) Show() string {
switch r {
case RewardTypePoint:
return "积分"
default:
return ""
}
}
func GetRewardTypeInstance(s string) RewardType {
switch s {
case RewardTypePoint.Name():
return RewardTypePoint
default:
return ""
}
}

type RewardOperateType string

func (r RewardOperateType) Name() string {
switch r {
case OperateTypeIncrease:
return "INCREASE"
case OperateTypeDecrease:
return "DECREASE"
default:
return ""
}
}
func (r RewardOperateType) Show() string {
switch r {
case OperateTypeIncrease:
return "奖励"
case OperateTypeDecrease:
return "扣减"
default:
return ""
}
}

func GetRewardOperateTypeInstance(s string) RewardOperateType {
switch s {
case OperateTypeIncrease.Name():
return OperateTypeIncrease
case OperateTypeDecrease.Name():
return OperateTypeDecrease
default:
return ""
}
}

const (
OperateTypeIncrease RewardOperateType = "INCREASE"
OperateTypeDecrease RewardOperateType = "DECREASE"
OperateTypeNull RewardOperateType = "NIL"
)

const (
OperateStatusOperating = "OPERATING"
OperateStatusSucceeded = "SUCCEEDED"
OperateStatusFailed = "FAILED"
)

const Semicolon = ";"

type RewardOperateOrderBy string

const (
RewardOrderByIDDesc RewardOperateOrderBy = "reward_operate_record.id desc"
)

type RewardRecordList []*RewardOperateRecord
type RewardRecordShowList []*RewardOperateRecordShow

func (l RewardRecordShowList) loadAttribute(isAdmin bool) {
l.loadAction()
l.loadCloudbrain()
if isAdmin {
l.loadAdminLog()
}
}

func (l RewardRecordShowList) loadAction() error {
if len(l) == 0 {
return nil
}
actionIds := make([]int64, 0)
actionIdMap := make(map[int64]*RewardOperateRecordShow, 0)
for _, r := range l {
if r.SourceType != SourceTypeAccomplishTask.Name() {
continue
}
i, _ := strconv.ParseInt(r.SourceId, 10, 64)
actionIds = append(actionIds, i)
actionIdMap[i] = r
}
actions, err := GetActionByIds(actionIds)
if err != nil {
return err
}
for _, v := range actions {
actionIdMap[v.ID].Action = v.ToShow()
}
return nil
}

func (l RewardRecordShowList) loadCloudbrain() error {
if len(l) == 0 {
return nil
}
cloudbrainIds := make([]int64, 0)
cloudbrainMap := make(map[int64]*RewardOperateRecordShow, 0)
for _, r := range l {
if r.SourceType != SourceTypeRunCloudbrainTask.Name() {
continue
}
i, _ := strconv.ParseInt(r.SourceId, 10, 64)
cloudbrainIds = append(cloudbrainIds, i)
cloudbrainMap[i] = r
}
cloudbrains, err := GetCloudbrainByIds(cloudbrainIds)
if err != nil {
return err
}
var repoIds []int64
var taskIds []int64
for _, task := range cloudbrains {
repoIds = append(repoIds, task.RepoID)
taskIds = append(taskIds, task.ID)
}
repositoryMap, err := GetRepositoriesMapByIDs(repoIds)
specMap, err := GetResourceSpecMapByCloudbrainIDs(taskIds)
if err != nil {
return err
}
for _, v := range cloudbrains {
v.Repo = repositoryMap[v.RepoID]
v.Spec = specMap[v.ID]
cloudbrainMap[v.ID].Cloudbrain = v.ToShow()
}

return nil

}

func (l RewardRecordShowList) loadAdminLog() error {
if len(l) == 0 {
return nil
}
logIds := make([]string, 0)
logMap := make(map[string]*RewardOperateRecordShow, 0)
for _, r := range l {
if r.SourceType != SourceTypeAdminOperate.Name() {
continue
}
logIds = append(logIds, r.SourceId)
logMap[r.SourceId] = r
}
adminLogs, err := GetRewardAdminLogByLogIds(logIds)
if err != nil {
return err
}
for _, v := range adminLogs {
logMap[v.LogId].AdminLog = v.ToShow()
}

return nil

}

type RewardOperateRecord struct {
ID int64 `xorm:"pk autoincr"`
SerialNo string `xorm:"INDEX NOT NULL"`
UserId int64 `xorm:"INDEX NOT NULL"`
Amount int64 `xorm:"NOT NULL"`
LossAmount int64
Title string
RewardType string `xorm:"NOT NULL"`
SourceType string `xorm:"NOT NULL"`
SourceId string `xorm:"INDEX NOT NULL"`
SourceTemplateId string
RequestId string `xorm:"INDEX NOT NULL"`
OperateType string `xorm:"NOT NULL"`
Status string `xorm:"NOT NULL"`
Remark string
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
LastOperateUnix timeutil.TimeStamp `xorm:"INDEX"`
}

type AdminRewardOperateReq struct {
TargetUserId int64 `binding:"Required"`
OperateType RewardOperateType `binding:"Required"`
Amount int64 `binding:"Required;Range(1,100000)"`
Remark string
RewardType RewardType
}

type RewardOperateRecordShow struct {
SerialNo string
Status string
OperateType string
SourceId string
Amount int64
LossAmount int64
BalanceAfter int64
Remark string
SourceType string
UserName string
LastOperateDate timeutil.TimeStamp
UnitPrice int64
SuccessCount int
Action *ActionShow
Cloudbrain *CloudbrainShow
AdminLog *RewardAdminLogShow
}

func getPointOperateRecord(tl *RewardOperateRecord) (*RewardOperateRecord, error) {
has, err := x.Get(tl)
if err != nil {
return nil, err
} else if !has {
return nil, ErrRecordNotExist{}
}
return tl, nil
}

func GetPointOperateRecordBySourceTypeAndRequestId(sourceType, requestId, operateType string) (*RewardOperateRecord, error) {
t := &RewardOperateRecord{
SourceType: sourceType,
RequestId: requestId,
OperateType: operateType,
}
return getPointOperateRecord(t)
}

func GetPointOperateRecordBySerialNo(serialNo string) (*RewardOperateRecord, error) {
t := &RewardOperateRecord{
SerialNo: serialNo,
}
return getPointOperateRecord(t)
}

func InsertRewardOperateRecord(tl *RewardOperateRecord) (int64, error) {
return x.Insert(tl)
}

func UpdateRewardRecordToFinalStatus(sourceType, requestId, newStatus string) (int64, error) {
r := &RewardOperateRecord{
Status: newStatus,
LastOperateUnix: timeutil.TimeStampNow(),
}
return x.Cols("status", "last_operate_unix").Where("source_type=? and request_id=? and status=?", sourceType, requestId, OperateStatusOperating).Update(r)
}

func SumRewardAmountInTaskPeriod(rewardType string, sourceType string, userId int64, period *PeriodResult) (int64, error) {
var cond = builder.NewCond()
if period != nil {
cond = cond.And(builder.Gte{"created_unix": period.StartTime.Unix()})
cond = cond.And(builder.Lt{"created_unix": period.EndTime.Unix()})
}
if sourceType != "" {
cond = cond.And(builder.Eq{"source_type": sourceType})
}
cond = cond.And(builder.Eq{"reward_type": rewardType})
cond = cond.And(builder.Eq{"user_id": userId})
return x.Where(cond).SumInt(&RewardOperateRecord{}, "amount")
}

type RewardOperateContext struct {
SourceType SourceType
SourceId string
SourceTemplateId string
Title string
Remark string
Reward Reward
TargetUserId int64
RequestId string
OperateType RewardOperateType
RejectPolicy LimiterRejectPolicy
PermittedNegative bool
LossAmount int64
}

type Reward struct {
Amount int64
Type RewardType
}

type UserRewardOperationRedis struct {
UserId int64
Amount int64
RewardType RewardType
OperateType RewardOperateType
}

type UserRewardOperation struct {
UserId int64
Msg string
}

func AppendRemark(remark, appendStr string) string {
return strings.TrimPrefix(remark+Semicolon+appendStr, Semicolon)
}

type RewardRecordListOpts struct {
ListOptions
UserId int64
UserName string
OperateType RewardOperateType
RewardType RewardType
SourceType string
TaskType string
SerialNo string
OrderBy RewardOperateOrderBy
IsAdmin bool
Status string
}

func (opts *RewardRecordListOpts) toCond() builder.Cond {
if opts.Page <= 0 {
opts.Page = 1
}

if len(opts.OrderBy) == 0 {
opts.OrderBy = RewardOrderByIDDesc
}

cond := builder.NewCond()
if opts.UserId > 0 {
cond = cond.And(builder.Eq{"reward_operate_record.user_id": opts.UserId})
}
if opts.OperateType != OperateTypeNull {
cond = cond.And(builder.Eq{"reward_operate_record.operate_type": opts.OperateType.Name()})
}
if opts.SourceType != "" {
cond = cond.And(builder.Eq{"reward_operate_record.source_type": opts.SourceType})
}
if opts.TaskType != "" {
cond = cond.And(builder.Eq{"reward_operate_record.source_template_id": opts.TaskType})
}
if opts.SerialNo != "" {
cond = cond.And(builder.Like{"reward_operate_record.serial_no", opts.SerialNo})
}
if opts.Status != "" {
cond = cond.And(builder.Like{"reward_operate_record.status", opts.Status})
}

cond = cond.And(builder.Eq{"reward_operate_record.reward_type": opts.RewardType.Name()})
cond = cond.And(builder.Gt{"reward_operate_record.amount": 0})
return cond
}

type TestTT struct {
SerialNo string
UserId int64
Amount int64
UserName string
}

func GetRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowList, int64, error) {
cond := opts.toCond()
count, err := x.Where(cond).Count(&RewardOperateRecord{})
if err != nil {
return nil, 0, err
}
r := make([]*RewardOperateRecordShow, 0)
err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no",
"reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount",
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type",
"reward_operate_record.last_operate_unix as last_operate_date").
Where(cond).Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).OrderBy(string(opts.OrderBy)).Find(&r)

if err != nil {
return nil, 0, err
}
RewardRecordShowList(r).loadAttribute(false)
return r, count, nil
}

func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowList, int64, error) {
cond := opts.toCond()
count, err := x.Where(cond).Count(&RewardOperateRecord{})
if err != nil {
return nil, 0, err
}
r := make([]*RewardOperateRecordShow, 0)
switch opts.OperateType {
case OperateTypeIncrease:
err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no",
"reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount",
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type",
"reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name",
"point_account_log.balance_after").
Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id").
Join("LEFT", "point_account_log", " reward_operate_record.serial_no = point_account_log.source_id").
Where(cond).Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).OrderBy(string(opts.OrderBy)).Find(&r)
case OperateTypeDecrease:
err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no",
"reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount",
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type",
"reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name",
"reward_periodic_task.amount as unit_price", "reward_periodic_task.success_count").
Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id").
Join("LEFT", "reward_periodic_task", "reward_operate_record.serial_no = reward_periodic_task.operate_serial_no").
Where(cond).Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).OrderBy(string(opts.OrderBy)).Find(&r)
}

if err != nil {
return nil, 0, err
}
RewardRecordShowList(r).loadAttribute(true)
return r, count, nil
}

func IsWechatOpenIdRewarded(wechatOpenId string) bool {
actions := make([]Action, 0)
err := x.Where(" op_type = ? and content = ?", ActionBindWechat, wechatOpenId).Find(&actions)

if err != nil {
log.Error("IsWechatOpenIdRewarded find actions err.%v", err)
return true
}
if len(actions) == 0 {
return false
}
actionIds := make([]int64, len(actions))
for i, v := range actions {
actionIds[i] = v.ID
}
n, _ := x.Where(builder.Eq{"source_type": SourceTypeAccomplishTask}.And(builder.In("source_id", actionIds))).Count(&RewardOperateRecord{})
return n > 0
}

+ 115
- 0
models/reward_periodic_task.go View File

@@ -0,0 +1,115 @@
package models

import (
"code.gitea.io/gitea/modules/timeutil"
"time"
)

type PeriodicTaskStatus int

const (
PeriodicTaskStatusRunning = iota + 1 // 1
PeriodicTaskStatusFinished // 2
)

type PeriodType string

const (
PeriodType30MinutesFree1HourCost PeriodType = "30MF1HC"
)

func (r PeriodType) Name() string {
switch r {
case PeriodType30MinutesFree1HourCost:
return "30MF1HC"
default:
return ""
}
}

type RewardPeriodicTask struct {
ID int64 `xorm:"pk autoincr"`
OperateSerialNo string `xorm:"INDEX NOT NULL"`
DelaySeconds int64
IntervalSeconds int64
Amount int64 `xorm:"NOT NULL"`
NextExecuteTime timeutil.TimeStamp `xorm:"INDEX NOT NULL"`
SuccessCount int `xorm:"NOT NULL default 0"`
Status int `xorm:"NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
FinishedUnix timeutil.TimeStamp `xorm:"INDEX"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

type StartPeriodicTaskOpts struct {
SourceType SourceType
SourceId string
Remark string
Title string
TargetUserId int64
RequestId string
OperateType RewardOperateType
Delay time.Duration
Interval time.Duration
UnitAmount int
RewardType RewardType
StartTime time.Time
}

func InsertPeriodicTask(tl *RewardPeriodicTask) (int64, error) {
return x.Insert(tl)
}

func GetRunningRewardTask(now time.Time) ([]RewardPeriodicTask, error) {
r := make([]RewardPeriodicTask, 0)
err := x.Where("next_execute_time <= ? and status = ?", now.Unix(), PeriodicTaskStatusRunning).Find(&r)
if err != nil {
return nil, err
}
return r, err
}

func IncrRewardTaskSuccessCount(t RewardPeriodicTask, count int64, nextTime timeutil.TimeStamp) error {
sess := x.NewSession()
defer sess.Close()
_, err := sess.Exec("update reward_periodic_task set success_count = success_count + ? , next_execute_time = ?, updated_unix = ? where id = ?", count, nextTime, timeutil.TimeStampNow(), t.ID)
if err != nil {
sess.Rollback()
return err
}
_, err = sess.Exec("update reward_operate_record set amount = amount + ? ,updated_unix = ? ,last_operate_unix = ? where serial_no = ?", t.Amount, timeutil.TimeStampNow(), timeutil.TimeStampNow(), t.OperateSerialNo)
if err != nil {
sess.Rollback()
return err
}
sess.Commit()
return nil
}

func GetPeriodicTaskBySourceIdAndType(sourceType SourceType, sourceId string, operateType RewardOperateType) (*RewardPeriodicTask, error) {
r := RewardPeriodicTask{}
_, err := x.SQL("select rpt.* from reward_periodic_task rpt "+
"inner join reward_operate_record ror on rpt.operate_serial_no = ror.serial_no"+
" where ror.source_type = ? and ror.source_id = ? and ror.operate_type = ? ", sourceType.Name(), sourceId, operateType.Name()).Get(&r)
if err != nil {
return nil, err
}
return &r, nil
}

func StopPeriodicTask(taskId int64, operateSerialNo string, stopTime time.Time) error {
sess := x.NewSession()
defer sess.Close()
_, err := sess.Where("id = ? and status = ?", taskId, PeriodicTaskStatusRunning).Update(&RewardPeriodicTask{Status: PeriodicTaskStatusFinished, FinishedUnix: timeutil.TimeStamp(stopTime.Unix())})
if err != nil {
sess.Rollback()
return err
}
_, err = sess.Where("serial_no = ? and status = ?", operateSerialNo, OperateStatusOperating).Update(&RewardOperateRecord{Status: OperateStatusSucceeded})
if err != nil {
sess.Rollback()
return err
}
sess.Commit()
return nil
}

+ 44
- 0
models/task_accomplish_log.go View File

@@ -0,0 +1,44 @@
package models

import (
"code.gitea.io/gitea/modules/timeutil"
"time"
)

type TaskAccomplishLog struct {
ID int64 `xorm:"pk autoincr"`
ConfigId int64 `xorm:"NOT NULL"`
TaskCode string `xorm:"NOT NULL"`
UserId int64 `xorm:"INDEX NOT NULL"`
ActionId int64
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
}

type PeriodResult struct {
StartTime time.Time
EndTime time.Time
LeftTime time.Duration
}

func getTaskAccomplishLog(tl *TaskAccomplishLog) (*TaskAccomplishLog, error) {
has, err := x.Get(tl)
if err != nil {
return nil, err
} else if !has {
return nil, ErrRecordNotExist{}
}
return tl, nil
}

func CountTaskAccomplishLogInTaskPeriod(taskCode string, userId int64, period *PeriodResult) (int64, error) {
if period == nil {
return x.Where("task_code = ? and user_id = ?", taskCode, userId).Count(&TaskAccomplishLog{})
} else {
return x.Where("task_code = ? and user_id = ? and created_unix >= ? and created_unix < ? ", taskCode, userId, period.StartTime.Unix(), period.EndTime.Unix()).Count(&TaskAccomplishLog{})
}

}

func InsertTaskAccomplishLog(tl *TaskAccomplishLog) (int64, error) {
return x.Insert(tl)
}

+ 374
- 0
models/task_config.go View File

@@ -0,0 +1,374 @@
package models

import (
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
)

const (
PeriodNotCycle = "NOT_CYCLE"
PeriodDaily = "DAILY"
)

type TaskType string

const (
TaskCreatePublicRepo TaskType = "CreatePublicRepo"
TaskCreateIssue TaskType = "CreateIssue"
TaskCreatePullRequest TaskType = "CreatePullRequest"
TaskCommentIssue TaskType = "CommentIssue"
TaskUploadAttachment TaskType = "UploadAttachment"
TaskCreateNewModelTask TaskType = "CreateNewModelTask"
TaskBindWechat TaskType = "BindWechat"
TaskCreateCloudbrainTask TaskType = "CreateCloudbrainTask"
TaskDatasetRecommended TaskType = "DatasetRecommended"
TaskCreateImage TaskType = "CreateImage"
TaskImageRecommend TaskType = "ImageRecommend"
TaskChangeUserAvatar TaskType = "ChangeUserAvatar"
TaskPushCommits TaskType = "PushCommits"
)

func GetTaskTypeFromAction(a ActionType) TaskType {
switch a {
case ActionCreateDebugGPUTask,
ActionCreateDebugNPUTask,
ActionCreateTrainTask,
ActionCreateInferenceTask,
ActionCreateBenchMarkTask,
ActionCreateGPUTrainTask,
ActionCreateGrampusNPUTrainTask,
ActionCreateGrampusGPUTrainTask:
return TaskCreateCloudbrainTask
case ActionCreateRepo:
return TaskCreatePublicRepo
case ActionCreatePullRequest:
return TaskCreatePullRequest
case ActionCommentIssue:
return TaskCommentIssue
case ActionUploadAttachment:
return TaskUploadAttachment
case ActionCreateNewModelTask:
return TaskCreateNewModelTask
case ActionBindWechat:
return TaskBindWechat
case ActionDatasetRecommended:
return TaskDatasetRecommended
case ActionImageRecommend:
return TaskImageRecommend
case ActionCreateImage:
return TaskCreateImage
case ActionChangeUserAvatar:
return TaskChangeUserAvatar
case ActionCommitRepo,
ActionDeleteBranch,
ActionPushTag,
ActionDeleteTag:
return TaskPushCommits
case ActionCreateIssue:
return TaskCreateIssue
}
return ""
}

//PointTaskConfig Only add and delete are allowed, edit is not allowed
//so if you want to edit config for some task code,please delete first and add new one
type TaskConfig struct {
ID int64 `xorm:"pk autoincr"`
TaskCode string `xorm:"NOT NULL"`
Title string
AwardType string `xorm:"NOT NULL"`
AwardAmount int64 `xorm:"NOT NULL"`
CreatorId int64 `xorm:"NOT NULL"`
CreatorName string
CreatedUnix timeutil.TimeStamp `xorm:"created"`
DeletedAt timeutil.TimeStamp `xorm:"deleted"`
DeleterId int64
DeleterName string
}

type TaskConfigWithLimit struct {
ID int64
TaskCode string
Title string
AwardType string
AwardAmount int64
Creator string
IsDeleted bool
CreatedUnix timeutil.TimeStamp
DeleteAt timeutil.TimeStamp
Limiters []*LimitConfigVO
}

type TaskConfigWithLimitResponse struct {
Records []*TaskConfigWithSingleLimit
Total int64
PageSize int
Page int
}

type TaskConfigWithSingleLimit struct {
ID int64
TaskCode string
AwardType string
AwardAmount int64
Creator string
IsDeleted bool
CreatedUnix timeutil.TimeStamp
DeleteAt timeutil.TimeStamp
RefreshRate string
LimitNum int64
}

type TaskAndLimiterConfig struct {
TaskConfig TaskConfig `xorm:"extends"`
LimitConfig LimitConfig `xorm:"extends"`
}

type PointRule struct {
UserDailyLimit int64
TaskRules []TaskRule
}

type TaskRule struct {
TaskCode string
AwardType string
AwardAmount int64
RefreshRate string
LimitNum int64
}

func (TaskAndLimiterConfig) TableName() string {
return "task_config"
}

type BatchLimitConfigVO struct {
ConfigList []TaskConfigWithLimit
}

func getTaskConfig(t *TaskConfig) (*TaskConfig, error) {
has, err := x.Get(t)
if err != nil {
return nil, err
} else if !has {
return nil, ErrRecordNotExist{}
}
return t, nil
}

func GetTaskConfigByTaskCode(taskCode string) (*TaskConfig, error) {
t := &TaskConfig{
TaskCode: taskCode,
}
return getTaskConfig(t)
}

func GetTaskConfigByID(id int64) (*TaskConfig, error) {
t := &TaskConfig{
ID: id,
}
return getTaskConfig(t)
}

func GetTaskConfigList() ([]*TaskConfig, error) {
r := make([]*TaskConfig, 0)
err := x.Find(&r)
if err != nil {
return nil, err
}
if len(r) == 0 {
return nil, ErrRecordNotExist{}
}
return r, nil
}

type GetTaskConfigOpts struct {
ListOptions
Status int //1 normal 2 deleted
TaskType string
}

func GetTaskConfigPageWithDeleted(opt GetTaskConfigOpts) ([]*TaskAndLimiterConfig, int64, error) {
if opt.Page <= 0 {
opt.Page = 1
}
cond := builder.NewCond()
if opt.TaskType != "" {
cond = cond.And(builder.Eq{"task_code": opt.TaskType})
}

var count int64
var err error
if opt.Status == 1 {
subCond := builder.NewCond()
subCond = subCond.Or(builder.IsNull{"task_config.deleted_at"})
subCond = subCond.Or(builder.Eq{"task_config.deleted_at": 0})
cond = cond.And(subCond)
} else if opt.Status == 2 {
cond = cond.And(builder.Gt{"task_config.deleted_at": 0})
}
count, err = x.Unscoped().Where(cond).Count(&TaskConfig{})
if err != nil {
return nil, 0, err
}
r := make([]*TaskAndLimiterConfig, 0)
err = x.Join("LEFT", "limit_config", "task_config.id = limit_config.related_id").
Unscoped().Where(cond).Limit(opt.PageSize, (opt.Page-1)*opt.PageSize).
OrderBy("task_config.deleted_at desc,task_config.id desc").Find(&r)

if len(r) == 0 {
return nil, 0, ErrRecordNotExist{}
}
return r, count, nil
}

func EditTaskConfig(config TaskConfigWithLimit, doer *User) error {
sess := x.NewSession()
defer sess.Close()

//delete old task config
p := &TaskConfig{
ID: config.ID,
}
_, err := sess.Delete(p)
if err != nil {
sess.Rollback()
return err
}
//update deleter
p.DeleterId = doer.ID
p.DeleterName = doer.Name
sess.Where("id = ?", config.ID).Unscoped().Update(p)

//add new config
t := &TaskConfig{
TaskCode: config.TaskCode,
Title: config.Title,
AwardType: config.AwardType,
AwardAmount: config.AwardAmount,
CreatorId: doer.ID,
CreatorName: doer.Name,
}
_, err = sess.InsertOne(t)
if err != nil {
sess.Rollback()
return err
}

//delete old limiter config
lp := &LimitConfig{
RelatedId: config.ID,
}
_, err = sess.Delete(lp)
if err != nil {
sess.Rollback()
return err
}
lp.DeleterName = doer.Name
lp.DeleterId = doer.ID
//update deleter
sess.Where("related_id = ?", config.ID).Unscoped().Update(lp)

//add new limiter config
if config.Limiters != nil && len(config.Limiters) > 0 {
for _, v := range config.Limiters {
//add new config
l := &LimitConfig{
Title: v.Title,
RefreshRate: v.RefreshRate,
Scope: v.Scope,
LimitNum: v.LimitNum,
LimitCode: config.TaskCode,
LimitType: LimitTypeTask.Name(),
CreatorId: doer.ID,
CreatorName: doer.Name,
RelatedId: t.ID,
}
_, err = sess.Insert(l)
if err != nil {
sess.Rollback()
return err
}
}
}
sess.Commit()
return nil
}

func NewTaskConfig(config TaskConfigWithLimit, doer *User) error {
sess := x.NewSession()
defer sess.Close()

//add new config
t := &TaskConfig{
TaskCode: config.TaskCode,
Title: config.Title,
AwardType: config.AwardType,
AwardAmount: config.AwardAmount,
CreatorId: doer.ID,
CreatorName: doer.Name,
}
_, err := sess.InsertOne(t)
if err != nil {
sess.Rollback()
return err
}

//add new limiter config
if config.Limiters != nil && len(config.Limiters) > 0 {
for _, v := range config.Limiters {
//add new config
l := &LimitConfig{
RelatedId: t.ID,
Title: v.Title,
RefreshRate: v.RefreshRate,
Scope: v.Scope,
LimitNum: v.LimitNum,
LimitCode: config.TaskCode,
LimitType: LimitTypeTask.Name(),
CreatorId: doer.ID,
CreatorName: doer.Name,
}
_, err = sess.Insert(l)
if err != nil {
sess.Rollback()
return err
}
}
}
sess.Commit()
return nil
}

func DelTaskConfig(id int64, doer *User) error {
sess := x.NewSession()
defer sess.Close()

//delete old task config
p := &TaskConfig{
ID: id,
}
_, err := sess.Delete(p)
if err != nil {
sess.Rollback()
return err
}
//update deleter
p.DeleterId = doer.ID
p.DeleterName = doer.Name
sess.Where("id = ?", id).Unscoped().Update(p)
//delete old limiter config
lp := &LimitConfig{
RelatedId: id,
}
_, err = sess.Delete(lp)
if err != nil {
sess.Rollback()
return err
}
lp.DeleterName = doer.Name
lp.DeleterId = doer.ID
//update deleter
sess.Where("related_id = ?", id).Unscoped().Update(lp)
sess.Commit()
return nil
}

+ 4
- 0
models/user.go View File

@@ -188,6 +188,10 @@ type User struct {
PhoneNumber string `xorm:"VARCHAR(255)"` PhoneNumber string `xorm:"VARCHAR(255)"`
} }


type UserShow struct {
Name string
}

// SearchOrganizationsOptions options to filter organizations // SearchOrganizationsOptions options to filter organizations
type SearchOrganizationsOptions struct { type SearchOrganizationsOptions struct {
ListOptions ListOptions


+ 4
- 0
models/wechat_bind.go View File

@@ -96,3 +96,7 @@ func UnbindWechatOpenId(userId int64, oldWechatOpenID string) error {
sess.Insert(logParam) sess.Insert(logParam)
return sess.Commit() return sess.Commit()
} }

func CountWechatBindLog(wechatOpenId string, action WechatBindAction) (int64, error) {
return x.Where("wechat_open_id = ? and action = ?", wechatOpenId, action).Count(&WechatBindLog{})
}

+ 5
- 0
modules/auth/cloudbrain.go View File

@@ -23,6 +23,11 @@ type CreateCloudBrainForm struct {
BootFile string `form:"boot_file"` BootFile string `form:"boot_file"`
Params string `form:"run_para_list"` Params string `form:"run_para_list"`
BranchName string `form:"branch_name"` BranchName string `form:"branch_name"`
ModelName string `form:"model_name"`
ModelVersion string `form:"model_version"`
CkptName string `form:"ckpt_name"`
LabelName string `form:"label_names"`
PreTrainModelUrl string `form:"pre_train_model_url"`
DatasetName string `form:"dataset_name"` DatasetName string `form:"dataset_name"`
SpecId int64 `form:"spec_id"` SpecId int64 `form:"spec_id"`
} }


+ 5
- 0
modules/auth/grampus.go View File

@@ -18,6 +18,11 @@ type CreateGrampusTrainJobForm struct {
WorkServerNumber int `form:"work_server_number" binding:"Required"` WorkServerNumber int `form:"work_server_number" binding:"Required"`
Image string `form:"image"` Image string `form:"image"`
DatasetName string `form:"dataset_name"` DatasetName string `form:"dataset_name"`
ModelName string `form:"model_name"`
ModelVersion string `form:"model_version"`
CkptName string `form:"ckpt_name"`
LabelName string `form:"label_names"`
PreTrainModelUrl string `form:"pre_train_model_url"`
SpecId int64 `form:"spec_id"` SpecId int64 `form:"spec_id"`
} }




+ 6
- 0
modules/auth/modelarts.go View File

@@ -33,6 +33,7 @@ type CreateModelArtsTrainJobForm struct {
DisplayJobName string `form:"display_job_name" binding:"Required"` DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"` JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"` Attachment string `form:"attachment" binding:"Required"`
DatasetName string `form:"dataset_name"`
BootFile string `form:"boot_file" binding:"Required"` BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"` WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"` EngineID int `form:"engine_id" binding:"Required"`
@@ -48,6 +49,11 @@ type CreateModelArtsTrainJobForm struct {
FlavorName string `form:"flaver_names" binding:"Required"` FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"` EngineName string `form:"engine_names" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"` SpecId int64 `form:"spec_id" binding:"Required"`
ModelName string `form:"model_name"`
ModelVersion string `form:"model_version"`
CkptName string `form:"ckpt_name"`
LabelName string `form:"label_names"`
PreTrainModelUrl string `form:"pre_train_model_url"`
} }


type CreateModelArtsInferenceJobForm struct { type CreateModelArtsInferenceJobForm struct {


+ 1
- 1
modules/auth/user_form.go View File

@@ -372,7 +372,7 @@ func (f *U2FDeleteForm) Validate(ctx *macaron.Context, errs binding.Errors) bind


type PhoneNumberForm struct { type PhoneNumberForm struct {
PhoneNumber string `binding:"Required;MaxSize(20)"` PhoneNumber string `binding:"Required;MaxSize(20)"`
Mode int `binding:"Required"`
Mode int `binding:"Required"`
SlideID string `binding:"Required;MaxSize(100)"` SlideID string `binding:"Required;MaxSize(100)"`
} }




+ 11
- 8
modules/auth/wechat/access_token.go View File

@@ -1,20 +1,19 @@
package wechat package wechat


import ( import (
"time"

"code.gitea.io/gitea/modules/redis/redis_client" "code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key" "code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock" "code.gitea.io/gitea/modules/redis/redis_lock"
"time"
) )


const EMPTY_REDIS_VAL = "Nil"

var accessTokenLock = redis_lock.NewDistributeLock(redis_key.AccessTokenLockKey()) var accessTokenLock = redis_lock.NewDistributeLock(redis_key.AccessTokenLockKey())


func GetWechatAccessToken() string { func GetWechatAccessToken() string {
token, _ := redis_client.Get(redis_key.WechatAccessTokenKey()) token, _ := redis_client.Get(redis_key.WechatAccessTokenKey())
if token != "" { if token != "" {
if token == EMPTY_REDIS_VAL {
if token == redis_key.EMPTY_REDIS_VAL {
return "" return ""
} }
live, _ := redis_client.TTL(redis_key.WechatAccessTokenKey()) live, _ := redis_client.TTL(redis_key.WechatAccessTokenKey())
@@ -28,18 +27,22 @@ func GetWechatAccessToken() string {
} }


func refreshAccessToken() { func refreshAccessToken() {
if ok := accessTokenLock.Lock(3 * time.Second); ok {
if ok, _ := accessTokenLock.Lock(3 * time.Second); ok {
defer accessTokenLock.UnLock() defer accessTokenLock.UnLock()
callAccessTokenAndUpdateCache() callAccessTokenAndUpdateCache()
} }
} }


func refreshAndGetAccessToken() string { func refreshAndGetAccessToken() string {
if ok := accessTokenLock.LockWithWait(3*time.Second, 3*time.Second); ok {
isOk, err := accessTokenLock.LockWithWait(3*time.Second, 3*time.Second)
if err != nil {
return ""
}
if isOk {
defer accessTokenLock.UnLock() defer accessTokenLock.UnLock()
token, _ := redis_client.Get(redis_key.WechatAccessTokenKey()) token, _ := redis_client.Get(redis_key.WechatAccessTokenKey())
if token != "" { if token != "" {
if token == EMPTY_REDIS_VAL {
if token == redis_key.EMPTY_REDIS_VAL {
return "" return ""
} }
return token return token
@@ -59,7 +62,7 @@ func callAccessTokenAndUpdateCache() string {
} }


if token == "" { if token == "" {
redis_client.Setex(redis_key.WechatAccessTokenKey(), EMPTY_REDIS_VAL, 10*time.Second)
redis_client.Setex(redis_key.WechatAccessTokenKey(), redis_key.EMPTY_REDIS_VAL, 10*time.Second)
return "" return ""
} }
redis_client.Setex(redis_key.WechatAccessTokenKey(), token, time.Duration(r.Expires_in)*time.Second) redis_client.Setex(redis_key.WechatAccessTokenKey(), token, time.Duration(r.Expires_in)*time.Second)


+ 3
- 3
modules/auth/wechat/bind.go View File

@@ -38,7 +38,7 @@ func (err WechatBindError) Error() string {
} }


func BindWechat(userId int64, wechatOpenId string) error { func BindWechat(userId int64, wechatOpenId string) error {
if !IsWechatAccountAvailable(userId, wechatOpenId) {
if !IsWechatAccountUsed(userId, wechatOpenId) {
log.Error("bind wechat failed, because user use wrong wechat account to bind,userId=%d wechatOpenId=%s", userId, wechatOpenId) log.Error("bind wechat failed, because user use wrong wechat account to bind,userId=%d wechatOpenId=%s", userId, wechatOpenId)
return NewWechatBindError(BIND_REPLY_WECHAT_ACCOUNT_USED) return NewWechatBindError(BIND_REPLY_WECHAT_ACCOUNT_USED)
} }
@@ -60,9 +60,9 @@ func IsUserAvailableForWechatBind(userId int64, wechatOpenId string) bool {
return currentOpenId == "" || currentOpenId == wechatOpenId return currentOpenId == "" || currentOpenId == wechatOpenId
} }


//IsWechatAccountAvailable if wechat account used by another account,return false
//IsWechatAccountUsed if wechat account used by another account,return false
//if wechat account not used or used by the given user,return true //if wechat account not used or used by the given user,return true
func IsWechatAccountAvailable(userId int64, wechatOpenId string) bool {
func IsWechatAccountUsed(userId int64, wechatOpenId string) bool {
user := models.GetUserByWechatOpenId(wechatOpenId) user := models.GetUserByWechatOpenId(wechatOpenId)
if user != nil && user.WechatOpenId != "" && user.ID != userId { if user != nil && user.WechatOpenId != "" && user.ID != userId {
return false return false


+ 2
- 0
modules/auth/wechat/client.go View File

@@ -95,6 +95,7 @@ func getWechatRestyClient() *resty.Client {
func callAccessToken() *AccessTokenResponse { func callAccessToken() *AccessTokenResponse {
client := getWechatRestyClient() client := getWechatRestyClient()


log.Info("start to get wechat access token")
var result AccessTokenResponse var result AccessTokenResponse
_, err := client.R(). _, err := client.R().
SetQueryParam("grant_type", GRANT_TYPE). SetQueryParam("grant_type", GRANT_TYPE).
@@ -106,6 +107,7 @@ func callAccessToken() *AccessTokenResponse {
log.Error("get wechat access token failed,e=%v", err) log.Error("get wechat access token failed,e=%v", err)
return nil return nil
} }
log.Info("get wechat access token result=%v", result)
return &result return &result
} }




+ 14
- 0
modules/cloudbrain/cloudbrain.go View File

@@ -24,6 +24,7 @@ const (
CodeMountPath = "/code" CodeMountPath = "/code"
DataSetMountPath = "/dataset" DataSetMountPath = "/dataset"
ModelMountPath = "/model" ModelMountPath = "/model"
PretrainModelMountPath = "/pretrainmodel"
LogFile = "log.txt" LogFile = "log.txt"
BenchMarkMountPath = "/benchmark" BenchMarkMountPath = "/benchmark"
BenchMarkResourceID = 1 BenchMarkResourceID = 1
@@ -77,6 +78,8 @@ type GenerateCloudBrainTaskReq struct {
ModelVersion string ModelVersion string
CkptName string CkptName string
LabelName string LabelName string
PreTrainModelPath string
PreTrainModelUrl string
Spec *models.Specification Spec *models.Specification
} }


@@ -276,6 +279,16 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
}, },
} }


if req.PreTrainModelUrl != "" { //预训练
volumes = append(volumes, models.Volume{
HostPath: models.StHostPath{
Path: req.PreTrainModelPath,
MountPath: PretrainModelMountPath,
ReadOnly: true,
},
})
}

if len(req.DatasetInfos) == 1 { if len(req.DatasetInfos) == 1 {
volumes = append(volumes, models.Volume{ volumes = append(volumes, models.Volume{
HostPath: models.StHostPath{ HostPath: models.StHostPath{
@@ -359,6 +372,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
CkptName: req.CkptName, CkptName: req.CkptName,
ResultUrl: req.ResultPath, ResultUrl: req.ResultPath,
LabelName: req.LabelName, LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
CreatedUnix: createTime, CreatedUnix: createTime,
UpdatedUnix: createTime, UpdatedUnix: createTime,
CommitID: req.CommitID, CommitID: req.CommitID,


+ 8
- 4
modules/cloudbrain/resty.go View File

@@ -1,6 +1,7 @@
package cloudbrain package cloudbrain


import ( import (
"code.gitea.io/gitea/modules/notification"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
@@ -25,10 +26,10 @@ var (


const ( const (
JobHasBeenStopped = "S410" JobHasBeenStopped = "S410"
errInvalidToken = "S401"
Public = "public" Public = "public"
Custom = "custom" Custom = "custom"
LogPageSize = 500 LogPageSize = 500
errInvalidToken = "S401"
LogPageTokenExpired = "5m" LogPageTokenExpired = "5m"
pageSize = 15 pageSize = 15
QueuesDetailUrl = "/rest-server/api/v2/queuesdetail" QueuesDetailUrl = "/rest-server/api/v2/queuesdetail"
@@ -144,7 +145,6 @@ sendjob:
if jobResult.Code != Success { if jobResult.Code != Success {
return &jobResult, fmt.Errorf("jobResult err: %s", res.String()) return &jobResult, fmt.Errorf("jobResult err: %s", res.String())
} }

return &jobResult, nil return &jobResult, nil
} }


@@ -235,7 +235,7 @@ func getQueryString(page int, size int, name string) string {
return fmt.Sprintf("pageIndex=%d&pageSize=%d&name=%s", page, size, name) return fmt.Sprintf("pageIndex=%d&pageSize=%d&name=%s", page, size, name)
} }


func CommitImage(jobID string, params models.CommitImageParams) error {
func CommitImage(jobID string, params models.CommitImageParams, doer *models.User) error {
imageTag := strings.TrimSpace(params.ImageTag) imageTag := strings.TrimSpace(params.ImageTag)


dbImage, err := models.GetImageByTag(imageTag) dbImage, err := models.GetImageByTag(imageTag)
@@ -340,11 +340,12 @@ sendjob:
}) })
if err == nil { if err == nil {
go updateImageStatus(image, isSetCreatedUnix, createTime) go updateImageStatus(image, isSetCreatedUnix, createTime)
notification.NotifyCreateImage(doer, image)
} }
return err return err
} }


func CommitAdminImage(params models.CommitImageParams) error {
func CommitAdminImage(params models.CommitImageParams, doer *models.User) error {
imageTag := strings.TrimSpace(params.ImageTag) imageTag := strings.TrimSpace(params.ImageTag)
exist, err := models.IsImageExist(imageTag) exist, err := models.IsImageExist(imageTag)


@@ -381,6 +382,9 @@ func CommitAdminImage(params models.CommitImageParams) error {
} }
return nil return nil
}) })
if err == nil {
notification.NotifyCreateImage(doer, image)
}
return err return err
} }




+ 21
- 0
modules/context/point.go View File

@@ -0,0 +1,21 @@
package context

import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/services/reward/point/account"
"gitea.com/macaron/macaron"
)

// PointAccount returns a macaron to get request user's point account
func PointAccount() macaron.Handler {
return func(ctx *Context) {
a, err := account.GetAccount(ctx.User.ID)
if err != nil {
ctx.ServerError("GetPointAccount", err)
return
}
ctx.Data["PointAccount"] = a
ctx.Data["CloudBrainPaySwitch"] = setting.CloudBrainPaySwitch
ctx.Next()
}
}

+ 26
- 0
modules/cron/tasks_basic.go View File

@@ -5,6 +5,7 @@
package cron package cron


import ( import (
"code.gitea.io/gitea/services/reward"
"code.gitea.io/gitea/services/cloudbrain/resource" "code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/modelarts"
"context" "context"
@@ -209,6 +210,28 @@ func registerSyncCloudbrainStatus() {
}) })
} }


func registerRewardPeriodTask() {
RegisterTaskFatal("reward_period_task", &BaseConfig{
Enabled: true,
RunAtStart: true,
Schedule: "@every 1m",
}, func(ctx context.Context, _ *models.User, _ Config) error {
reward.StartRewardTask()
return nil
})
}

func registerCloudbrainPointDeductTask() {
RegisterTaskFatal("cloudbrain_point_deduct_task", &BaseConfig{
Enabled: true,
RunAtStart: true,
Schedule: "@every 1m",
}, func(ctx context.Context, _ *models.User, _ Config) error {
reward.StartCloudbrainPointDeductTask()
return nil
})
}

func registerSyncResourceSpecs() { func registerSyncResourceSpecs() {
RegisterTaskFatal("sync_grampus_specs", &BaseConfig{ RegisterTaskFatal("sync_grampus_specs", &BaseConfig{
Enabled: true, Enabled: true,
@@ -253,4 +276,7 @@ func initBasicTasks() {
registerHandleOrgStatistic() registerHandleOrgStatistic()
registerSyncResourceSpecs() registerSyncResourceSpecs()
registerSyncModelArtsTempJobs() registerSyncModelArtsTempJobs()

//registerRewardPeriodTask()
registerCloudbrainPointDeductTask()
} }

+ 25
- 1
modules/dataset/dataset.go View File

@@ -1,6 +1,10 @@
package dataset package dataset


import "code.gitea.io/gitea/models"
import (
"strings"

"code.gitea.io/gitea/models"
)


func GetResourceType(cloudbrainType int) string { func GetResourceType(cloudbrainType int) string {
if cloudbrainType == 0 { if cloudbrainType == 0 {
@@ -33,3 +37,23 @@ func IsShowDataSetOfCurrentRepo(repoID int64) bool {
return true return true


} }

func GetFilterDeletedAttachments(uuids string) (string, string) {
attachments, err := models.GetAttachmentsByUUIDs(strings.Split(uuids, ";"))
if err != nil {
return "", ""
}
uuidR := ""
filenames := ""
for i, attachment := range attachments {
if i == 0 {
uuidR += attachment.UUID
filenames += attachment.Name
} else {
uuidR += ";" + attachment.UUID
filenames += ";" + attachment.Name
}
}
return uuidR, filenames

}

+ 22
- 0
modules/eventsource/manager_run.go View File

@@ -5,6 +5,7 @@
package eventsource package eventsource


import ( import (
"code.gitea.io/gitea/services/reward"
"context" "context"
"time" "time"


@@ -24,8 +25,28 @@ func (m *Manager) Init() {
func (m *Manager) Run(ctx context.Context) { func (m *Manager) Run(ctx context.Context) {
then := timeutil.TimeStampNow().Add(-2) then := timeutil.TimeStampNow().Add(-2)
timer := time.NewTicker(setting.UI.Notification.EventSourceUpdateTime) timer := time.NewTicker(setting.UI.Notification.EventSourceUpdateTime)
rewardThen := then
rewardTimer := time.NewTicker(setting.UI.Notification.RewardNotifyUpdateTime)
loop: loop:
for { for {
select {
case <-rewardTimer.C:
log.Debug("rewardTimer run")
now := timeutil.TimeStampNow().Add(-2)
list := reward.GetRewardOperation(rewardThen, now)
if list != nil {
log.Debug("GetRewardOperation list=%v", list)
for _, l := range list {
m.SendMessage(l.UserId, &Event{
Name: "reward-operation",
Data: l.Msg,
})
}
}

rewardThen = now
}

select { select {
case <-ctx.Done(): case <-ctx.Done():
timer.Stop() timer.Stop()
@@ -44,6 +65,7 @@ loop:
}) })
} }
then = now then = now
default:
} }
} }
m.UnregisterAll() m.UnregisterAll()


+ 56
- 5
modules/grampus/grampus.go View File

@@ -22,9 +22,6 @@ const (
GpuWorkDir = "/tmp/" GpuWorkDir = "/tmp/"
NpuWorkDir = "/cache/" NpuWorkDir = "/cache/"


CommandPrepareScript = ";mkdir -p output;mkdir -p code;mkdir -p dataset;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/script_for_grampus/archive/master.zip;" +
"echo \"finish loading script\";unzip -q master.zip;cd script_for_grampus;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;"

CodeArchiveName = "master.zip" CodeArchiveName = "master.zip"
) )


@@ -34,6 +31,9 @@ var (
ImageInfos *setting.StImageInfosModelArts ImageInfos *setting.StImageInfosModelArts


SpecialPools *models.SpecialPools SpecialPools *models.SpecialPools

CommandPrepareScript = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" +
"echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;"
) )


type GenerateTrainJobReq struct { type GenerateTrainJobReq struct {
@@ -62,16 +62,60 @@ type GenerateTrainJobReq struct {
TotalVersionCount int TotalVersionCount int
ComputeResource string ComputeResource string
ProcessType string ProcessType string
DatasetName string

DatasetNames string
DatasetInfos map[string]models.DatasetInfo
Params string Params string
ModelName string
LabelName string
CkptName string
ModelVersion string
PreTrainModelPath string
PreTrainModelUrl string
Spec *models.Specification Spec *models.Specification
} }


func getEndPoint() string {
index := strings.Index(setting.Endpoint, "//")
endpoint := setting.Endpoint[index+2:]
return endpoint
}

func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.GrampusDataset {
var datasetGrampus []models.GrampusDataset
endPoint := getEndPoint()
for _, datasetInfo := range datasetInfos {
datasetGrampus = append(datasetGrampus, models.GrampusDataset{
Name: datasetInfo.FullName,
Bucket: setting.Bucket,
EndPoint: endPoint,
ObjectKey: datasetInfo.DataLocalPath + datasetInfo.FullName,
})

}
return datasetGrampus
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) { func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
createTime := timeutil.TimeStampNow() createTime := timeutil.TimeStampNow()


centerID, centerName := getCentersParamter(ctx, req) centerID, centerName := getCentersParamter(ctx, req)


var datasetGrampus, modelGrampus []models.GrampusDataset
if ProcessorTypeNPU == req.ProcessType {
datasetGrampus = getDatasetGrampus(req.DatasetInfos)
if len(req.ModelName) != 0 {
modelGrampus = []models.GrampusDataset{
{
Name: req.ModelName,
Bucket: setting.Bucket,
EndPoint: getEndPoint(),
ObjectKey: req.PreTrainModelPath,
},
}
}
}

jobResult, err := createJob(models.CreateGrampusJobRequest{ jobResult, err := createJob(models.CreateGrampusJobRequest{
Name: req.JobName, Name: req.JobName,
Tasks: []models.GrampusTasks{ Tasks: []models.GrampusTasks{
@@ -84,6 +128,8 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
CenterID: centerID, CenterID: centerID,
CenterName: centerName, CenterName: centerName,
ReplicaNum: 1, ReplicaNum: 1,
Datasets: datasetGrampus,
Models: modelGrampus,
}, },
}, },
}) })
@@ -103,7 +149,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
JobType: string(models.JobTypeTrain), JobType: string(models.JobTypeTrain),
Type: models.TypeC2Net, Type: models.TypeC2Net,
Uuid: req.Uuid, Uuid: req.Uuid,
DatasetName: req.DatasetName,
DatasetName: req.DatasetNames,
CommitID: req.CommitID, CommitID: req.CommitID,
IsLatestVersion: req.IsLatestVersion, IsLatestVersion: req.IsLatestVersion,
ComputeResource: req.ComputeResource, ComputeResource: req.ComputeResource,
@@ -121,6 +167,11 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
CreatedUnix: createTime, CreatedUnix: createTime,
UpdatedUnix: createTime, UpdatedUnix: createTime,
Spec: req.Spec, Spec: req.Spec,
ModelName: req.ModelName,
ModelVersion: req.ModelVersion,
LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
CkptName: req.CkptName,
}) })


if err != nil { if err != nil {


+ 6
- 5
modules/grampus/resty.go View File

@@ -1,14 +1,15 @@
package grampus package grampus


import ( import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"crypto/tls" "crypto/tls"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/go-resty/resty/v2"
"net/http" "net/http"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"github.com/go-resty/resty/v2"
) )


var ( var (
@@ -236,7 +237,7 @@ func GetTrainJobLog(jobID string) (string, error) {
return logContent, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) return logContent, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error())
} }
log.Error("GetTrainJobLog failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) log.Error("GetTrainJobLog failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg)
return logContent, fmt.Errorf("GetTrainJobLog failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg)
return logContent, fmt.Errorf("GetTrainJobLog failed(%d):%d(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg)
} }


logContent = res.String() logContent = res.String()


+ 18
- 2
modules/modelarts/modelarts.go View File

@@ -104,6 +104,11 @@ type GenerateTrainJobReq struct {
UserCommand string UserCommand string
DatasetName string DatasetName string
Spec *models.Specification Spec *models.Specification
ModelName string
LabelName string
CkptName string
ModelVersion string
PreTrainModelUrl string
} }


type GenerateInferenceJobReq struct { type GenerateInferenceJobReq struct {
@@ -148,8 +153,9 @@ type VersionInfo struct {


type Flavor struct { type Flavor struct {
Info []struct { Info []struct {
Code string `json:"code"`
Value string `json:"value"`
Code string `json:"code"`
Value string `json:"value"`
UnitPrice int64 `json:"unitPrice"`
} `json:"flavor"` } `json:"flavor"`
} }


@@ -439,6 +445,11 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
CreatedUnix: createTime, CreatedUnix: createTime,
UpdatedUnix: createTime, UpdatedUnix: createTime,
Spec: req.Spec, Spec: req.Spec,
ModelName: req.ModelName,
ModelVersion: req.ModelVersion,
LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
CkptName: req.CkptName,
}) })


if createErr != nil { if createErr != nil {
@@ -588,6 +599,11 @@ func GenerateTrainJobVersion(ctx *context.Context, req *GenerateTrainJobReq, job
CreatedUnix: createTime, CreatedUnix: createTime,
UpdatedUnix: createTime, UpdatedUnix: createTime,
Spec: req.Spec, Spec: req.Spec,
ModelName: req.ModelName,
ModelVersion: req.ModelVersion,
LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
CkptName: req.CkptName,
}) })
if createErr != nil { if createErr != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, createErr.Error()) log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, createErr.Error())


+ 76
- 0
modules/notification/action/action.go View File

@@ -5,6 +5,7 @@
package action package action


import ( import (
"code.gitea.io/gitea/modules/auth"
"encoding/json" "encoding/json"
"fmt" "fmt"
"path" "path"
@@ -345,3 +346,78 @@ func (a *actionNotifier) NotifyOtherTask(doer *models.User, repo *models.Reposit
log.Error("notifyWatchers: %v", err) log.Error("notifyWatchers: %v", err)
} }
} }

func (t *actionNotifier) NotifyWechatBind(user *models.User, wechatOpenId string) {
act := &models.Action{
ActUserID: user.ID,
ActUser: user,
OpType: models.ActionBindWechat,
IsPrivate: true,
Content: wechatOpenId,
}
if err := models.NotifyWatchers(act); err != nil {
log.Error("notifyWatchers: %v", err)
}
}

func (t *actionNotifier) NotifyDatasetRecommend(optUser *models.User, dataset *models.Dataset, action string) {
switch action {
case "recommend":
act := &models.Action{
OpType: models.ActionDatasetRecommended,
ActUserID: dataset.UserID,
RepoID: dataset.RepoID,
IsPrivate: false,
Content: fmt.Sprintf("%d|%s", dataset.ID, dataset.Title),
}

if err := models.NotifyWatchers(act); err != nil {
log.Error("notifyWatchers: %v", err)
}
}
}

func (t *actionNotifier) NotifyCreateImage(doer *models.User, image models.Image) {
act := &models.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: models.ActionCreateImage,
IsPrivate: image.IsPrivate,
Content: fmt.Sprintf("%d|%s", image.ID, image.Tag),
}
if err := models.NotifyWatchers(act); err != nil {
log.Error("notifyWatchers: %v", err)
}
}

func (t *actionNotifier) NotifyImageRecommend(optUser *models.User, image *models.Image, action string) {
u, err := models.GetUserByID(image.UID)
if err != nil {
return
}
switch action {
case "recommend":
act := &models.Action{
ActUserID: u.ID,
ActUser: u,
OpType: models.ActionImageRecommend,
IsPrivate: false,
Content: fmt.Sprintf("%d|%s", image.ID, image.Tag),
}
if err := models.NotifyWatchers(act); err != nil {
log.Error("notifyWatchers: %v", err)
}
}
}

func (t *actionNotifier) NotifyChangeUserAvatar(user *models.User, form auth.AvatarForm) {
act := &models.Action{
ActUserID: user.ID,
ActUser: user,
OpType: models.ActionChangeUserAvatar,
IsPrivate: true,
}
if err := models.NotifyWatchers(act); err != nil {
log.Error("notifyWatchers: %v", err)
}
}

+ 6
- 0
modules/notification/base/notifier.go View File

@@ -6,6 +6,7 @@ package base


import ( import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/repository"
) )


@@ -56,6 +57,11 @@ type Notifier interface {
NotifySyncDeleteRef(doer *models.User, repo *models.Repository, refType, refFullName string) NotifySyncDeleteRef(doer *models.User, repo *models.Repository, refType, refFullName string)


NotifyOtherTask(doer *models.User, repo *models.Repository, id string, name string, optype models.ActionType) NotifyOtherTask(doer *models.User, repo *models.Repository, id string, name string, optype models.ActionType)
NotifyWechatBind(user *models.User, wechatOpenId string)
NotifyDatasetRecommend(optUser *models.User, dataset *models.Dataset, action string)
NotifyCreateImage(doer *models.User, image models.Image)
NotifyImageRecommend(optUser *models.User, image *models.Image, action string)
NotifyChangeUserAvatar(user *models.User, form auth.AvatarForm)


NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string) NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string)
} }

+ 18
- 0
modules/notification/base/null.go View File

@@ -6,6 +6,7 @@ package base


import ( import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/repository"
) )


@@ -159,6 +160,23 @@ func (*NullNotifier) NotifyOtherTask(doer *models.User, repo *models.Repository,


} }


func (*NullNotifier) NotifyWechatBind(user *models.User, wechatOpenId string) {

}

func (*NullNotifier) NotifyDatasetRecommend(optUser *models.User, dataset *models.Dataset, action string) {
}

func (*NullNotifier) NotifyCreateImage(doer *models.User, image models.Image) {
}

func (*NullNotifier) NotifyImageRecommend(optUser *models.User, image *models.Image, action string) {
}

func (*NullNotifier) NotifyChangeUserAvatar(user *models.User, form auth.AvatarForm) {

}

func (*NullNotifier) NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string) { func (*NullNotifier) NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string) {


} }

+ 38
- 0
modules/notification/notification.go View File

@@ -6,10 +6,12 @@ package notification


import ( import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/notification/action" "code.gitea.io/gitea/modules/notification/action"
"code.gitea.io/gitea/modules/notification/base" "code.gitea.io/gitea/modules/notification/base"
"code.gitea.io/gitea/modules/notification/indexer" "code.gitea.io/gitea/modules/notification/indexer"
"code.gitea.io/gitea/modules/notification/mail" "code.gitea.io/gitea/modules/notification/mail"
"code.gitea.io/gitea/modules/notification/reward"
"code.gitea.io/gitea/modules/notification/ui" "code.gitea.io/gitea/modules/notification/ui"
"code.gitea.io/gitea/modules/notification/webhook" "code.gitea.io/gitea/modules/notification/webhook"
wechatNotifier "code.gitea.io/gitea/modules/notification/wechat" wechatNotifier "code.gitea.io/gitea/modules/notification/wechat"
@@ -37,6 +39,7 @@ func NewContext() {
RegisterNotifier(webhook.NewNotifier()) RegisterNotifier(webhook.NewNotifier())
RegisterNotifier(action.NewNotifier()) RegisterNotifier(action.NewNotifier())
RegisterNotifier(wechatNotifier.NewNotifier()) RegisterNotifier(wechatNotifier.NewNotifier())
RegisterNotifier(reward.NewNotifier())
} }


// NotifyUploadAttachment notifies attachment upload message to notifiers // NotifyUploadAttachment notifies attachment upload message to notifiers
@@ -272,6 +275,41 @@ func NotifySyncDeleteRef(pusher *models.User, repo *models.Repository, refType,
} }
} }


// NotifyWechatBind notifies wechat bind
func NotifyWechatBind(user *models.User, wechatOpenId string) {
for _, notifier := range notifiers {
notifier.NotifyWechatBind(user, wechatOpenId)
}
}

// NotifyDatasetRecommend
func NotifyDatasetRecommend(optUser *models.User, dataset *models.Dataset, action string) {
for _, notifier := range notifiers {
notifier.NotifyDatasetRecommend(optUser, dataset, action)
}
}

// NotifyDatasetRecommend
func NotifyCreateImage(doer *models.User, image models.Image) {
for _, notifier := range notifiers {
notifier.NotifyCreateImage(doer, image)
}
}

// NotifyDatasetRecommend
func NotifyImageRecommend(optUser *models.User, image *models.Image, action string) {
for _, notifier := range notifiers {
notifier.NotifyImageRecommend(optUser, image, action)
}
}

// NotifyDatasetRecommend
func NotifyChangeUserAvatar(user *models.User, form auth.AvatarForm) {
for _, notifier := range notifiers {
notifier.NotifyChangeUserAvatar(user, form)
}
}

// NotifyChangeCloudbrainStatus // NotifyChangeCloudbrainStatus
func NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string) { func NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string) {
for _, notifier := range notifiers { for _, notifier := range notifiers {


+ 27
- 0
modules/notification/reward/point.go View File

@@ -0,0 +1,27 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification/base"
)

type pointNotifier struct {
base.NullNotifier
}

var (
_ base.Notifier = &pointNotifier{}
)

// NewNotifier create a new wechatNotifier notifier
func NewNotifier() base.Notifier {
return &pointNotifier{}
}

func (*pointNotifier) NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string) {
log.Info("pointNotifier NotifyChangeCloudbrainStatus cloudbrain.id=%d cloudbrain.status=%s oldStatus=%s", cloudbrain.ID, cloudbrain.Status, oldStatus)
if cloudbrain.IsRunning() || cloudbrain.IsTerminal() {
models.StatusChangeChan <- cloudbrain
}
}

+ 83
- 1
modules/redis/redis_client/client.go View File

@@ -76,7 +76,7 @@ func HEXISTS(conn redis.Conn, key string, subKey string) (bool, error) {


} }


func Expire(conn redis.Conn, key string, seconds int) error {
func EXPIRE(conn redis.Conn, key string, seconds int) error {
_, err := conn.Do("EXPIRE", key, seconds) _, err := conn.Do("EXPIRE", key, seconds)
return err return err


@@ -145,3 +145,85 @@ func TTL(key string) (int, error) {
return n, nil return n, nil


} }

func IncrBy(key string, n int64) (int64, error) {
redisClient := labelmsg.Get()
defer redisClient.Close()

reply, err := redisClient.Do("INCRBY", key, n)
if err != nil {
return 0, err
}
i, err := strconv.ParseInt(fmt.Sprint(reply), 10, 64)
return i, nil

}

func Expire(key string, expireTime time.Duration) error {
redisClient := labelmsg.Get()
defer redisClient.Close()

_, err := redisClient.Do("EXPIRE", key, int64(expireTime.Seconds()))
if err != nil {
return err
}
return nil

}

//GetInt64 get redis value by Get(key)
//and then parse the value to int64
//return {isExist(bool)} {value(int64)} {error(error)}
func GetInt64(key string) (bool, int64, error) {
str, err := Get(key)
if err != nil {
return false, 0, err
}
if str == "" {
return false, 0, nil
}

i, err := strconv.ParseInt(str, 10, 64)
if err != nil {
return false, 0, err
}
return true, i, nil

}

func ZAdd(key, value string, score float64) error {
redisClient := labelmsg.Get()
defer redisClient.Close()

_, err := redisClient.Do("ZADD", key, score, value)
if err != nil {
return err
}
return nil
}

func ZRangeByScore(key string, min, max float64) ([]string, error) {
redisClient := labelmsg.Get()
defer redisClient.Close()

reply, err := redisClient.Do("ZRANGEBYSCORE", key, min, max)
if err != nil {
return nil, err
}
if reply == nil {
return nil, err
}
s, _ := redis.Strings(reply, nil)
return s, nil
}

func ZRemRangeByScore(key string, min, max float64) error {
redisClient := labelmsg.Get()
defer redisClient.Close()

_, err := redisClient.Do("ZREMRANGEBYSCORE", key, min, max)
if err != nil {
return err
}
return nil
}

+ 17
- 0
modules/redis/redis_key/account_redis_key.go View File

@@ -0,0 +1,17 @@
package redis_key

import "fmt"

const ACCOUNT_REDIS_PREFIX = "account"

func PointAccountOperateLock(userId int64) string {
return KeyJoin(ACCOUNT_REDIS_PREFIX, fmt.Sprint(userId), "point", "operate", "lock")
}

func PointAccountInfo(userId int64) string {
return KeyJoin(ACCOUNT_REDIS_PREFIX, fmt.Sprint(userId), "info")
}

func PointAccountInitLock(userId int64) string {
return KeyJoin(ACCOUNT_REDIS_PREFIX, fmt.Sprint(userId), "init", "lock")
}

+ 7
- 0
modules/redis/redis_key/cloudbrain_redis_key.go View File

@@ -0,0 +1,7 @@
package redis_key

const CLOUDBRAIN_PREFIX = "cloudbrain"

func CloudbrainBindingJobNameKey(repoId string, jobType string, jobName string) string {
return KeyJoin(CLOUDBRAIN_PREFIX, repoId, jobType, jobName, "redis_key")
}

+ 2
- 0
modules/redis/redis_key/key_base.go View File

@@ -4,6 +4,8 @@ import "strings"


const KEY_SEPARATE = ":" const KEY_SEPARATE = ":"


const EMPTY_REDIS_VAL = "Nil"

func KeyJoin(keys ...string) string { func KeyJoin(keys ...string) string {
var build strings.Builder var build strings.Builder
for _, v := range keys { for _, v := range keys {


+ 26
- 0
modules/redis/redis_key/limit_redis_key.go View File

@@ -0,0 +1,26 @@
package redis_key

import (
"code.gitea.io/gitea/models"
"fmt"
)

const LIMIT_REDIS_PREFIX = "limit"

func LimitCount(userId int64, limitCode string, limitType string, scope string, period *models.PeriodResult) string {
if scope == models.LimitScopeAllUsers.Name() {
if period == nil {
return KeyJoin(LIMIT_REDIS_PREFIX, limitCode, limitType, "count")
}
return KeyJoin(LIMIT_REDIS_PREFIX, limitCode, limitType, fmt.Sprint(period.StartTime.Unix()), fmt.Sprint(period.EndTime.Unix()), "count")
}
if period == nil {
return KeyJoin(LIMIT_REDIS_PREFIX, "uid", fmt.Sprint(userId), limitCode, limitType, "count")
}
return KeyJoin(LIMIT_REDIS_PREFIX, "uid", fmt.Sprint(userId), limitCode, limitType, fmt.Sprint(period.StartTime.Unix()), fmt.Sprint(period.EndTime.Unix()), "count")

}

func LimitConfig(limitType string) string {
return KeyJoin(LIMIT_REDIS_PREFIX, limitType, "config")
}

+ 21
- 0
modules/redis/redis_key/reward_redis_key.go View File

@@ -0,0 +1,21 @@
package redis_key

import (
"code.gitea.io/gitea/modules/setting"
"fmt"
"strings"
)

const REWARD_REDIS_PREFIX = "reward"

func RewardOperateLock(requestId string, sourceType string, operateType string) string {
return KeyJoin(REWARD_REDIS_PREFIX, requestId, sourceType, operateType, "send")
}

func RewardOperateNotification() string {
return KeyJoin(REWARD_REDIS_PREFIX, "operate", strings.ReplaceAll(setting.AppURL, "/", ""), "notification")
}

func RewardTaskRunningLock(taskId int64) string {
return KeyJoin(REWARD_REDIS_PREFIX, "periodic_task", fmt.Sprint(taskId), "lock")
}

+ 10
- 0
modules/redis/redis_key/serial_redis_key.go View File

@@ -0,0 +1,10 @@
package redis_key

import "time"

const SERIAL_REDIS_PREFIX = "serial"

func RewardSerialCounter(now time.Time) string {
h := now.Format("200601021504")
return KeyJoin(SERIAL_REDIS_PREFIX, "reward_operate", h, "counter")
}

+ 14
- 0
modules/redis/redis_key/task_redis_key.go View File

@@ -0,0 +1,14 @@
package redis_key

const TASK_REDIS_PREFIX = "task"

func TaskAccomplishLock(sourceId string, taskType string) string {
return KeyJoin(TASK_REDIS_PREFIX, sourceId, taskType, "accomplish")
}

func TaskConfigList() string {
return KeyJoin(TASK_REDIS_PREFIX, "config", "list")
}
func TaskConfigOperateLock(taskCode, rewardType string) string {
return KeyJoin(TASK_REDIS_PREFIX, "config", "operate", "lock")
}

+ 16
- 9
modules/redis/redis_lock/lock.go View File

@@ -1,8 +1,9 @@
package redis_lock package redis_lock


import ( import (
"code.gitea.io/gitea/modules/redis/redis_client"
"time" "time"

"code.gitea.io/gitea/modules/redis/redis_client"
) )


type DistributeLock struct { type DistributeLock struct {
@@ -13,26 +14,32 @@ func NewDistributeLock(lockKey string) *DistributeLock {
return &DistributeLock{lockKey: lockKey} return &DistributeLock{lockKey: lockKey}
} }


func (lock *DistributeLock) Lock(expireTime time.Duration) bool {
isOk, _ := redis_client.Setnx(lock.lockKey, "", expireTime)
return isOk
func (lock *DistributeLock) Lock(expireTime time.Duration) (bool, error) {
isOk, err := redis_client.Setnx(lock.lockKey, "", expireTime)
if err != nil {
return false, err
}
return isOk, nil
} }


func (lock *DistributeLock) LockWithWait(expireTime time.Duration, waitTime time.Duration) bool {
func (lock *DistributeLock) LockWithWait(expireTime time.Duration, waitTime time.Duration) (bool, error) {
start := time.Now().Unix() * 1000 start := time.Now().Unix() * 1000
duration := waitTime.Milliseconds() duration := waitTime.Milliseconds()
for { for {
isOk, _ := redis_client.Setnx(lock.lockKey, "", expireTime)
isOk, err := redis_client.Setnx(lock.lockKey, "", expireTime)
if err != nil {
return false, err
}
if isOk { if isOk {
return true
return true, nil
} }
if time.Now().Unix()*1000-start > duration { if time.Now().Unix()*1000-start > duration {
return false
return false, nil
} }
time.Sleep(50 * time.Millisecond) time.Sleep(50 * time.Millisecond)
} }


return false
return false, nil
} }


func (lock *DistributeLock) UnLock() error { func (lock *DistributeLock) UnLock() error {


+ 45
- 24
modules/setting/setting.go View File

@@ -66,9 +66,10 @@ const (
) )


type C2NetSequenceInfo struct { type C2NetSequenceInfo struct {
ID int `json:"id"`
Name string `json:"name"`
Content string `json:"content"`
ID int `json:"id"`
Name string `json:"name"`
Content string `json:"content"`
ContentEN string `json:"content_en"`
} }


type C2NetSqInfos struct { type C2NetSqInfos struct {
@@ -214,10 +215,11 @@ var (
UseServiceWorker bool UseServiceWorker bool


Notification struct { Notification struct {
MinTimeout time.Duration
TimeoutStep time.Duration
MaxTimeout time.Duration
EventSourceUpdateTime time.Duration
MinTimeout time.Duration
TimeoutStep time.Duration
MaxTimeout time.Duration
EventSourceUpdateTime time.Duration
RewardNotifyUpdateTime time.Duration
} `ini:"ui.notification"` } `ini:"ui.notification"`


Admin struct { Admin struct {
@@ -251,15 +253,17 @@ var (
Themes: []string{`gitea`, `arc-green`}, Themes: []string{`gitea`, `arc-green`},
Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`}, Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
Notification: struct { Notification: struct {
MinTimeout time.Duration
TimeoutStep time.Duration
MaxTimeout time.Duration
EventSourceUpdateTime time.Duration
MinTimeout time.Duration
TimeoutStep time.Duration
MaxTimeout time.Duration
EventSourceUpdateTime time.Duration
RewardNotifyUpdateTime time.Duration
}{ }{
MinTimeout: 10 * time.Second,
TimeoutStep: 10 * time.Second,
MaxTimeout: 60 * time.Second,
EventSourceUpdateTime: 10 * time.Second,
MinTimeout: 10 * time.Second,
TimeoutStep: 10 * time.Second,
MaxTimeout: 60 * time.Second,
EventSourceUpdateTime: 10 * time.Second,
RewardNotifyUpdateTime: 2 * time.Second,
}, },
Admin: struct { Admin: struct {
UserPagingNum int UserPagingNum int
@@ -583,12 +587,13 @@ var (


//grampus config //grampus config
Grampus = struct { Grampus = struct {
Env string
Host string
UserName string
Password string
SpecialPools string
C2NetSequence string
Env string
Host string
UserName string
Password string
SpecialPools string
C2NetSequence string
SyncScriptProject string
}{} }{}


C2NetInfos *C2NetSqInfos C2NetInfos *C2NetSqInfos
@@ -610,6 +615,13 @@ var (
WechatQRCodeExpireSeconds int WechatQRCodeExpireSeconds int
WechatAuthSwitch bool WechatAuthSwitch bool


//point config
CloudBrainPaySwitch bool
CloudBrainPayDelay time.Duration
CloudBrainPayInterval time.Duration
DeductTaskRange time.Duration
DeductTaskRangeForFirst time.Duration

//wechat auto reply config //wechat auto reply config
UserNameOfWechatReply string UserNameOfWechatReply string
RepoNameOfWechatReply string RepoNameOfWechatReply string
@@ -1464,7 +1476,7 @@ func NewContext() {
FlavorInfos = sec.Key("FLAVOR_INFOS").MustString("") FlavorInfos = sec.Key("FLAVOR_INFOS").MustString("")
TrainJobFLAVORINFOS = sec.Key("TrainJob_FLAVOR_INFOS").MustString("") TrainJobFLAVORINFOS = sec.Key("TrainJob_FLAVOR_INFOS").MustString("")
ModelArtsSpecialPools = sec.Key("SPECIAL_POOL").MustString("") ModelArtsSpecialPools = sec.Key("SPECIAL_POOL").MustString("")
ModelArtsMultiNode=sec.Key("MULTI_NODE").MustString("")
ModelArtsMultiNode = sec.Key("MULTI_NODE").MustString("")


sec = Cfg.Section("elk") sec = Cfg.Section("elk")
ElkUrl = sec.Key("ELKURL").MustString("") ElkUrl = sec.Key("ELKURL").MustString("")
@@ -1481,12 +1493,13 @@ func NewContext() {
WechatAppId = sec.Key("APP_ID").MustString("wxba77b915a305a57d") WechatAppId = sec.Key("APP_ID").MustString("wxba77b915a305a57d")
WechatAppSecret = sec.Key("APP_SECRET").MustString("") WechatAppSecret = sec.Key("APP_SECRET").MustString("")
WechatQRCodeExpireSeconds = sec.Key("QR_CODE_EXPIRE_SECONDS").MustInt(120) WechatQRCodeExpireSeconds = sec.Key("QR_CODE_EXPIRE_SECONDS").MustInt(120)
WechatAuthSwitch = sec.Key("AUTH_SWITCH").MustBool(true)
WechatAuthSwitch = sec.Key("AUTH_SWITCH").MustBool(false)
UserNameOfWechatReply = sec.Key("AUTO_REPLY_USER_NAME").MustString("OpenIOSSG") UserNameOfWechatReply = sec.Key("AUTO_REPLY_USER_NAME").MustString("OpenIOSSG")
RepoNameOfWechatReply = sec.Key("AUTO_REPLY_REPO_NAME").MustString("promote") RepoNameOfWechatReply = sec.Key("AUTO_REPLY_REPO_NAME").MustString("promote")
RefNameOfWechatReply = sec.Key("AUTO_REPLY_REF_NAME").MustString("master") RefNameOfWechatReply = sec.Key("AUTO_REPLY_REF_NAME").MustString("master")
TreePathOfAutoMsgReply = sec.Key("AUTO_REPLY_TREE_PATH").MustString("wechat/auto_reply.json") TreePathOfAutoMsgReply = sec.Key("AUTO_REPLY_TREE_PATH").MustString("wechat/auto_reply.json")
TreePathOfSubscribe = sec.Key("SUBSCRIBE_TREE_PATH").MustString("wechat/subscribe_reply.json") TreePathOfSubscribe = sec.Key("SUBSCRIBE_TREE_PATH").MustString("wechat/subscribe_reply.json")
WechatAuthSwitch = sec.Key("AUTH_SWITCH").MustBool(false)
CloudbrainStartedTemplateId = sec.Key("CLOUDBRAIN_STARTED_TEMPLATE_ID").MustString("") CloudbrainStartedTemplateId = sec.Key("CLOUDBRAIN_STARTED_TEMPLATE_ID").MustString("")
CloudbrainStartedNotifyList = strings.Split(sec.Key("CLOUDBRAIN_STARTED_NOTIFY_LIST").MustString("DEBUG"), ",") CloudbrainStartedNotifyList = strings.Split(sec.Key("CLOUDBRAIN_STARTED_NOTIFY_LIST").MustString("DEBUG"), ",")
CloudbrainStartedTitle = sec.Key("CLOUDBRAIN_STARTED_TITLE").MustString("您好,您提交的算力资源申请已通过,任务已启动,请您关注运行情况。") CloudbrainStartedTitle = sec.Key("CLOUDBRAIN_STARTED_TITLE").MustString("您好,您提交的算力资源申请已通过,任务已启动,请您关注运行情况。")
@@ -1496,6 +1509,12 @@ func NewContext() {
CloudbrainStoppedTitle = sec.Key("CLOUDBRAIN_STOPPED_TITLE").MustString("您好,您申请的算力资源已结束使用,任务已完成运行,状态为%s,请您关注运行结果") CloudbrainStoppedTitle = sec.Key("CLOUDBRAIN_STOPPED_TITLE").MustString("您好,您申请的算力资源已结束使用,任务已完成运行,状态为%s,请您关注运行结果")
CloudbrainStoppedRemark = sec.Key("CLOUDBRAIN_STOPPED_REMARK").MustString("感谢您的耐心等待。") CloudbrainStoppedRemark = sec.Key("CLOUDBRAIN_STOPPED_REMARK").MustString("感谢您的耐心等待。")


sec = Cfg.Section("point")
CloudBrainPaySwitch = sec.Key("CLOUDBRAIN_PAY_SWITCH").MustBool(false)
CloudBrainPayDelay = sec.Key("CLOUDBRAIN_PAY_DELAY").MustDuration(30 * time.Minute)
CloudBrainPayInterval = sec.Key("CLOUDBRAIN_PAY_INTERVAL").MustDuration(60 * time.Minute)
DeductTaskRange = sec.Key("DEDUCT_TASK_RANGE").MustDuration(30 * time.Minute)
DeductTaskRangeForFirst = sec.Key("DEDUCT_TASK_RANGE_FOR_FIRST").MustDuration(3 * time.Hour)
SetRadarMapConfig() SetRadarMapConfig()


sec = Cfg.Section("warn_mail") sec = Cfg.Section("warn_mail")
@@ -1552,12 +1571,14 @@ func getGrampusConfig() {
Grampus.UserName = sec.Key("USERNAME").MustString("") Grampus.UserName = sec.Key("USERNAME").MustString("")
Grampus.Password = sec.Key("PASSWORD").MustString("") Grampus.Password = sec.Key("PASSWORD").MustString("")
Grampus.SpecialPools = sec.Key("SPECIAL_POOL").MustString("") Grampus.SpecialPools = sec.Key("SPECIAL_POOL").MustString("")
Grampus.C2NetSequence = sec.Key("C2NET_SEQUENCE").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\"}]}")
Grampus.C2NetSequence = sec.Key("C2NET_SEQUENCE").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\",\"content_en\":\"Pencheng Cloudbrain Ⅰ\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\",\"content_en\":\"Pencheng Cloudbrain Ⅱ\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\",\"content_en\":\"Peking University AI Center\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\",\"content_en\":\"Hefei AI Center\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\",\"content_en\":\"Wuhan AI Center\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\",\"content_en\":\"Xi'an AI Center\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\",\"content_en\":\"Pengcheng Cloud Computing Institute\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\",\"content_en\":\"Zhongyuan AI Center\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\",\"content_en\":\"Chengdu AI Center\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\",\"content_en\":\"Hengqin AI Center\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\",\"content_en\":\"HPC & AI Center\"}]}")
if Grampus.C2NetSequence != "" { if Grampus.C2NetSequence != "" {
if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil { if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil {
log.Error("Unmarshal(C2NetSequence) failed:%v", err) log.Error("Unmarshal(C2NetSequence) failed:%v", err)
} }
} }
Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus")

} }


func SetRadarMapConfig() { func SetRadarMapConfig() {


+ 1
- 1
modules/templates/helper.go View File

@@ -791,7 +791,7 @@ func GetRefName(ref string) string {
return reg.ReplaceAllString(ref, "") return reg.ReplaceAllString(ref, "")
} }


func MB2GB(size int64) string {
func MB2GB(size int) string {
s := strconv.FormatFloat(float64(size)/float64(1024), 'f', 2, 64) s := strconv.FormatFloat(float64(size)/float64(1024), 'f', 2, 64)
for strings.HasSuffix(s, "0") { for strings.HasSuffix(s, "0") {
s = strings.TrimSuffix(s, "0") s = strings.TrimSuffix(s, "0")


+ 10
- 0
modules/util/uuid_util.go View File

@@ -0,0 +1,10 @@
package util

import (
gouuid "github.com/satori/go.uuid"
"strings"
)

func UUID() string {
return strings.ReplaceAll(gouuid.NewV4().String(), "-", "")
}

+ 23
- 5
options/locale/locale_en-US.ini View File

@@ -23,6 +23,7 @@ signed_in_as = Signed in as
enable_javascript = This website works better with JavaScript. enable_javascript = This website works better with JavaScript.
toc = Table of Contents toc = Table of Contents
return=Back OpenI return=Back OpenI
calculation_points = Calculation Points


username = Username username = Username
email = Email Address email = Email Address
@@ -1063,7 +1064,7 @@ image_delete_fail=Failed to delete image, please try again later.
image_overwrite=You had submitted the same name image before, are you sure to overwrite the original image? image_overwrite=You had submitted the same name image before, are you sure to overwrite the original image?
download=Download download=Download
score=Score score=Score
wait_count_start = There are currently
wait_count_start = There are currently
wait_count_end = tasks queued wait_count_end = tasks queued
file_limit_100 = Display up to 100 files or folders in a single directory file_limit_100 = Display up to 100 files or folders in a single directory
images.name = Image Tag images.name = Image Tag
@@ -1100,6 +1101,7 @@ cloudbrain_operate = Operate
cloudbrain_status_createtime = Status/Createtime cloudbrain_status_createtime = Status/Createtime
cloudbrain_status_runtime = Running Time cloudbrain_status_runtime = Running Time
cloudbrain_jobname_err=Name must start with a lowercase letter or number,can include lowercase letter,number,_ and -,can not end with _, and can be up to 36 characters long. cloudbrain_jobname_err=Name must start with a lowercase letter or number,can include lowercase letter,number,_ and -,can not end with _, and can be up to 36 characters long.
cloudbrain_samejob_err=A task with the same name has been created, the system is processing it, please wait a minute.
cloudbrain_bootfile_err=The bootfile does not exist in the repository cloudbrain_bootfile_err=The bootfile does not exist in the repository
cloudbrain_query_fail=Failed to query cloudbrain information. cloudbrain_query_fail=Failed to query cloudbrain information.
cloudbrain.mirror_tag = Mirror Tag cloudbrain.mirror_tag = Mirror Tag
@@ -1274,7 +1276,7 @@ model.manage.modellabel=Model label
model.manage.modeldesc=Model description model.manage.modeldesc=Model description
model.manage.baseinfo=Base Information model.manage.baseinfo=Base Information
modelconvert.notcreate=No model conversion task has been created. modelconvert.notcreate=No model conversion task has been created.
modelconvert.importfirst1=Please import the
modelconvert.importfirst1=Please import the
modelconvert.importfirst2=model modelconvert.importfirst2=model
modelconvert.importfirst3=first, then converts it. modelconvert.importfirst3=first, then converts it.
modelconvert.download=Download modelconvert.download=Download
@@ -1299,6 +1301,7 @@ modelconvert.taskurlname=Model transformation task
log_scroll_start=Scroll to top log_scroll_start=Scroll to top
log_scroll_end=Scroll to bottom log_scroll_end=Scroll to bottom
modelconvert.tasknameempty=Please enter a task name. modelconvert.tasknameempty=Please enter a task name.
modelconvert.modelfileempty=Please choose a model file.
modelconvert.inputshapeerror=Format input error, please input such as: 1,1,32,32, corresponding to the input data format. modelconvert.inputshapeerror=Format input error, please input such as: 1,1,32,32, corresponding to the input data format.


modelconvert.manage.create_error1=A model transformation task with the same name already exists. modelconvert.manage.create_error1=A model transformation task with the same name already exists.
@@ -3080,6 +3083,11 @@ task_createmodel=`created new model <a href="%s/modelmanage/show_model_info?name
task_gputrainjob=`created CPU/GPU training task <a href="%s/cloudbrain/train-job/%s">%s</a>` task_gputrainjob=`created CPU/GPU training task <a href="%s/cloudbrain/train-job/%s">%s</a>`
task_c2netnputrainjob=`created NPU training task <a href="%s/grampus/train-job/%s">%s</a>` task_c2netnputrainjob=`created NPU training task <a href="%s/grampus/train-job/%s">%s</a>`
task_c2netgputrainjob=`created CPU/GPU training task <a href="%s/grampus/train-job/%s">%s</a>` task_c2netgputrainjob=`created CPU/GPU training task <a href="%s/grampus/train-job/%s">%s</a>`
binded_wechat=binded WeChat
dataset_recommended=`created dataset <a href="%s/datasets">%s</a> was set as recommended dataset`
create_image=`committed image <span style="font-weight:bold;">%s</span>`
image_recommended=`committed image <span style="font-weight:bold;">%s</span> was set as recommended image`
update_user_avatar=updated avatar


[tool] [tool]
ago = %s ago ago = %s ago
@@ -3199,7 +3207,7 @@ wrong_specification=You cannot use this specification, please choose another ite
resource_use=Resource Occupancy resource_use=Resource Occupancy


job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-). job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-).
train_dataset_path_rule = The dataset location is stored in the environment variable <strong style="color:#010101">data_url</strong>, and the output path is stored in the environment variable <strong style="color:#010101">train_url</strong>.
train_dataset_path_rule = The dataset location is stored in the environment variable <strong style="color:#010101">data_url</strong>, the pre-trained model is storaged in the environment <strong style="color:#010101">ckpt_url</strong>, and the output path is stored in the environment variable <strong style="color:#010101">train_url</strong>.
infer_dataset_path_rule = The dataset location is stored in the environment variable <strong style="color:#010101">data_url</strong>, and the output path is stored in the environment variable <strong style="color:#010101">result_url</strong>. infer_dataset_path_rule = The dataset location is stored in the environment variable <strong style="color:#010101">data_url</strong>, and the output path is stored in the environment variable <strong style="color:#010101">result_url</strong>.
view_sample = View sample view_sample = View sample
inference_output_path_rule = The inference output path is stored in the environment variable result_url. inference_output_path_rule = The inference output path is stored in the environment variable result_url.
@@ -3236,5 +3244,15 @@ Stopped_success_update_status_fail=Succeed in stopping th job, but failed to upd
load_code_failed=Fail to load code, please check if the right branch is selected. load_code_failed=Fail to load code, please check if the right branch is selected.


error.dataset_select = dataset select error:the count exceed the limit or has same name error.dataset_select = dataset select error:the count exceed the limit or has same name
new_train_gpu_tooltips = The code is storaged in <strong style="color:#010101">%s</strong>, the dataset is storaged in <strong style="color:#010101">%s</strong>, and please put your model into <strong style="color:#010101">%s</strong> then you can download it online
new_infer_gpu_tooltips = The dataset is stored in <strong style="color:#010101">%s</strong>, the model file is stored in <strong style="color:#010101">%s</strong>, please store the inference output in <strong style="color:#010101">%s</strong> for subsequent downloads.
new_train_gpu_tooltips = The code is storaged in <strong style="color:#010101">%s</strong>, the dataset is storaged in <strong style="color:#010101">%s</strong>, the pre-trained model is storaged in the environment <strong style="color:#010101">%s</strong>, and please put your model into <strong style="color:#010101">%s</strong> then you can download it online
new_infer_gpu_tooltips = The dataset is stored in <strong style="color:#010101">%s</strong>, the model file is stored in <strong style="color:#010101">%s</strong>, please store the inference output in <strong style="color:#010101">%s</strong> for subsequent downloads.

[points]
points = points
free = Free
points_hour = Points/hour
balance_of_points = Balance of Points:
hours = Hours
expected_time = , expected to be available for
points_acquisition_instructions = Points Acquisition Instructions
insufficient_points_balance = Insufficient points balance

+ 23
- 3
options/locale/locale_zh-CN.ini View File

@@ -23,6 +23,7 @@ signed_in_as=已登录用户
enable_javascript=使用 JavaScript能使本网站更好的工作。 enable_javascript=使用 JavaScript能使本网站更好的工作。
toc=目录 toc=目录
return=返回OpenI return=返回OpenI
calculation_points=算力积分


username=用户名 username=用户名
email=电子邮件地址 email=电子邮件地址
@@ -1104,6 +1105,7 @@ cloudbrain_operate=操作
cloudbrain_status_createtime=状态/创建时间 cloudbrain_status_createtime=状态/创建时间
cloudbrain_status_runtime = 运行时长 cloudbrain_status_runtime = 运行时长
cloudbrain_jobname_err=只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。 cloudbrain_jobname_err=只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。
cloudbrain_samejob_err=同名任务已经被创建,系统处理中,请您稍候。
cloudbrain_bootfile_err=仓库中不存在启动文件 cloudbrain_bootfile_err=仓库中不存在启动文件
cloudbrain_query_fail=查询云脑任务失败。 cloudbrain_query_fail=查询云脑任务失败。
cloudbrain.mirror_tag = 镜像标签 cloudbrain.mirror_tag = 镜像标签
@@ -1315,6 +1317,7 @@ log_scroll_start=滚动到顶部
log_scroll_end=滚动到底部 log_scroll_end=滚动到底部
modelconvert.tasknameempty=请输入任务名称。 modelconvert.tasknameempty=请输入任务名称。
modelconvert.inputshapeerror=格式输入错误,请输入如:1,1,32,32,与输入数据格式对应。 modelconvert.inputshapeerror=格式输入错误,请输入如:1,1,32,32,与输入数据格式对应。
modelconvert.modelfileempty=请选择模型文件。


modelconvert.manage.create_error1=相同的名称模型转换任务已经存在。 modelconvert.manage.create_error1=相同的名称模型转换任务已经存在。
modelconvert.manage.create_error2=只能创建一个正在运行的模型转换任务。 modelconvert.manage.create_error2=只能创建一个正在运行的模型转换任务。
@@ -3097,6 +3100,11 @@ task_createmodel=`导入了新模型 <a href="%s/modelmanage/show_model_info?nam
task_gputrainjob=`创建了CPU/GPU类型训练任务 <a href="%s/cloudbrain/train-job/%s">%s</a>` task_gputrainjob=`创建了CPU/GPU类型训练任务 <a href="%s/cloudbrain/train-job/%s">%s</a>`
task_c2netnputrainjob=`创建了NPU类型训练任务 <a href="%s/grampus/train-job/%s">%s</a>` task_c2netnputrainjob=`创建了NPU类型训练任务 <a href="%s/grampus/train-job/%s">%s</a>`
task_c2netgputrainjob=`创建了CPU/GPU类型训练任务 <a href="%s/grampus/train-job/%s">%s</a>` task_c2netgputrainjob=`创建了CPU/GPU类型训练任务 <a href="%s/grampus/train-job/%s">%s</a>`
binded_wechat=绑定微信
dataset_recommended=`创建的数据集<a href="%s/datasets">%s</a>被设置为推荐数据集`
create_image=`提交了镜像<span style="font-weight:bold;">%s</span>`
image_recommended=`提交的镜像<span style="font-weight:bold;">%s</span>被设置为推荐镜像`
update_user_avatar=更新了头像


[tool] [tool]
ago=%s前 ago=%s前
@@ -3217,7 +3225,7 @@ card_type = 卡类型
wrong_specification=您目前不能使用这个资源规格,请选择其他资源规格。 wrong_specification=您目前不能使用这个资源规格,请选择其他资源规格。


job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。 job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。
train_dataset_path_rule = 数据集位置存储在环境变量<strong style="color:#010101">data_url</strong>中,训练输出路径存储在环境变量<strong style="color:#010101">train_url</strong>中。
train_dataset_path_rule = 数据集位置存储在环境变量<strong style="color:#010101">data_url</strong>中,预训练模型存放在环境变量<strong style="color:#010101">ckpt_url</strong>中,训练输出路径存储在环境变量<strong style="color:#010101">train_url</strong>中。
infer_dataset_path_rule = 数据集位置存储在环境变量<strong style="color:#010101">data_url</strong>中,推理输出路径存储在环境变量<strong style="color:#010101">result_url</strong>中。 infer_dataset_path_rule = 数据集位置存储在环境变量<strong style="color:#010101">data_url</strong>中,推理输出路径存储在环境变量<strong style="color:#010101">result_url</strong>中。
view_sample = 查看样例 view_sample = 查看样例
inference_output_path_rule = 推理输出路径存储在环境变量result_url中。 inference_output_path_rule = 推理输出路径存储在环境变量result_url中。
@@ -3255,5 +3263,17 @@ load_code_failed=代码加载失败,请确认选择了正确的分支。




error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集 error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集
new_train_gpu_tooltips =训练脚本存储在<strong style="color:#010101">%s</strong>中,数据集存储在<strong style="color:#010101">%s</strong>中,训练输出请存储在<strong style="color:#010101">%s</strong>中以供后续下载。
new_infer_gpu_tooltips = 数据集存储在<strong style="color:#010101">%s</strong>中,模型文件存储在<strong style="color:#010101">%s</strong>中,推理输出请存储在<strong style="color:#010101">%s</strong>中以供后续下载。
new_train_gpu_tooltips =训练脚本存储在<strong style="color:#010101">%s</strong>中,数据集存储在<strong style="color:#010101">%s</strong>中,预训练模型存放在环境变量<strong style="color:#010101">%s</strong>中,训练输出请存储在<strong style="color:#010101">%s</strong>中以供后续下载。
new_infer_gpu_tooltips = 数据集存储在<strong style="color:#010101">%s</strong>中,模型文件存储在<strong style="color:#010101">%s</strong>中,推理输出请存储在<strong style="color:#010101">%s</strong>中以供后续下载。

[points]
points = 积分
free = 免费
points_hour = 积分/每小时
balance_of_points = 积分余额:
hours = 小时
expected_time = ,预计可用
points_acquisition_instructions = 积分获取说明
insufficient_points_balance = 积分余额不足



+ 1
- 1
package.json View File

@@ -80,4 +80,4 @@
"browserslist": [ "browserslist": [
"defaults" "defaults"
] ]
}
}

+ 13
- 2
public/home/home.js View File

@@ -163,6 +163,11 @@ document.onreadystatechange = function () {
html += recordPrefix + actionName; html += recordPrefix + actionName;
html += " <a href=\"" + getTaskLink(record) + "\" rel=\"nofollow\">" + record.RefName + "</a>" html += " <a href=\"" + getTaskLink(record) + "\" rel=\"nofollow\">" + record.RefName + "</a>"
} }
else if(record.OpType == "35"){
var datasetLink = "<a href=\"" + getRepoLink(record) + "/datasets" + "\" rel=\"nofollow\">" + record.Content.split('|')[1] + "</a>";
actionName = actionName.replace('{dataset}', datasetLink);
html += recordPrefix + actionName;
}
else{ else{
continue; continue;
} }
@@ -354,7 +359,10 @@ var actionNameZH={
"30":"导入了新模型", "30":"导入了新模型",
"31":"创建了CPU/GPU类型训练任务", "31":"创建了CPU/GPU类型训练任务",
"32":"创建了NPU类型训练任务", "32":"创建了NPU类型训练任务",
"33":"创建了CPU/GPU类型训练任务"
"33":"创建了CPU/GPU类型训练任务",
"35":"创建的数据集 {dataset} 被设置为推荐数据集",
"36":"提交了镜像 {image}",
"37":"提交的镜像 {image} 被设置为推荐镜像",
}; };


var actionNameEN={ var actionNameEN={
@@ -382,7 +390,10 @@ var actionNameEN={
"30":" created new model", "30":" created new model",
"31":" created CPU/GPU type training task", "31":" created CPU/GPU type training task",
"32":" created NPU type training task", "32":" created NPU type training task",
"33":" created CPU/GPU type training task"
"33":" created CPU/GPU type training task",
"35":" created dataset {dataset} was set as recommended dataset",
"36":"committed image {image}",
"37":"committed image {image} was set as recommended image",
}; };


var repoAndOrgZH={ var repoAndOrgZH={


+ 3
- 0
routers/admin/dataset.go View File

@@ -1,6 +1,7 @@
package admin package admin


import ( import (
"code.gitea.io/gitea/modules/notification"
"net/http" "net/http"
"strconv" "strconv"
"strings" "strings"
@@ -111,6 +112,8 @@ func DatasetAction(ctx *context.Context) {
if err != nil { if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
} else { } else {
d, _ := models.GetDatasetByID(datasetId)
notification.NotifyDatasetRecommend(ctx.User, d, ctx.Params(":action"))
ctx.JSON(http.StatusOK, models.BaseOKMessage) ctx.JSON(http.StatusOK, models.BaseOKMessage)
} }
} }


+ 1
- 0
routers/api/v1/api.go View File

@@ -980,6 +980,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("", repo.GetModelArtsTrainJobVersion) m.Get("", repo.GetModelArtsTrainJobVersion)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.GrampusStopJob) m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.GrampusStopJob)
m.Get("/log", repo_ext.GrampusGetLog) m.Get("/log", repo_ext.GrampusGetLog)
m.Get("/download_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo_ext.GrampusDownloadLog)
}) })
}) })
}, reqRepoReader(models.UnitTypeCloudBrain)) }, reqRepoReader(models.UnitTypeCloudBrain))


+ 2
- 0
routers/api/v1/repo/cloudbrain_dashboard.go View File

@@ -733,6 +733,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
ctx.ServerError("Get job failed:", err) ctx.ServerError("Get job failed:", err)
return return
} }
models.LoadSpecs4CloudbrainInfo(ciTasks)
nilTime := time.Time{} nilTime := time.Time{}
tasks := []models.TaskDetail{} tasks := []models.TaskDetail{}
for i, task := range ciTasks { for i, task := range ciTasks {
@@ -769,6 +770,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
} else { } else {
taskDetail.IsDelete = false taskDetail.IsDelete = false
} }
taskDetail.Spec = ciTasks[i].Spec
tasks = append(tasks, taskDetail) tasks = append(tasks, taskDetail)
} }




+ 1
- 0
routers/authentication/wechat.go View File

@@ -31,6 +31,7 @@ func GetQRCode4Bind(ctx *context.Context) {


r, err := createQRCode4Bind(userId) r, err := createQRCode4Bind(userId)
if err != nil { if err != nil {
log.Error("GetQRCode4Bind failed,error=%v", err)
ctx.JSON(200, map[string]interface{}{ ctx.JSON(200, map[string]interface{}{
"code": "9999", "code": "9999",
"msg": "Get QR code failed", "msg": "Get QR code failed",


+ 1
- 1
routers/authentication/wechat_event.go View File

@@ -1,9 +1,9 @@
package authentication package authentication


import ( import (
"code.gitea.io/gitea/modules/auth/wechat"
"code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
wechat "code.gitea.io/gitea/services/wechat"
"encoding/xml" "encoding/xml"
"io/ioutil" "io/ioutil"
"time" "time"


+ 5
- 0
routers/image/image.go View File

@@ -1,6 +1,7 @@
package image package image


import ( import (
"code.gitea.io/gitea/modules/notification"
"net/http" "net/http"
"strconv" "strconv"


@@ -25,6 +26,10 @@ func Action(ctx *context.Context) {
if err != nil { if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
} else { } else {
image, err := models.GetImageByID(imageId)
if err == nil {
notification.NotifyImageRecommend(ctx.User, image, ctx.Params(":action"))
}
ctx.JSON(http.StatusOK, models.BaseOKMessage) ctx.JSON(http.StatusOK, models.BaseOKMessage)
} }
} }

+ 49
- 24
routers/repo/ai_model_manage.go View File

@@ -27,6 +27,9 @@ const (
MODEL_LATEST = 1 MODEL_LATEST = 1
MODEL_NOT_LATEST = 0 MODEL_NOT_LATEST = 0
MODEL_MAX_SIZE = 1024 * 1024 * 1024 MODEL_MAX_SIZE = 1024 * 1024 * 1024
STATUS_COPY_MODEL = 1
STATUS_FINISHED = 0
STATUS_ERROR = 2
) )


func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) error { func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) error {
@@ -62,13 +65,9 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
modelSelectedFile := ctx.Query("modelSelectedFile") modelSelectedFile := ctx.Query("modelSelectedFile")
//download model zip //train type //download model zip //train type
if aiTask.ComputeResource == models.NPUResource { if aiTask.ComputeResource == models.NPUResource {
modelPath, modelSize, err = downloadModelFromCloudBrainTwo(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
if err != nil {
log.Info("download model from CloudBrainTwo faild." + err.Error())
return err
}
cloudType = models.TypeCloudBrainTwo cloudType = models.TypeCloudBrainTwo
} else if aiTask.ComputeResource == models.GPUResource { } else if aiTask.ComputeResource == models.GPUResource {
cloudType = models.TypeCloudBrainOne
var ResourceSpecs *models.ResourceSpecs var ResourceSpecs *models.ResourceSpecs
json.Unmarshal([]byte(setting.ResourceSpecs), &ResourceSpecs) json.Unmarshal([]byte(setting.ResourceSpecs), &ResourceSpecs)
for _, tmp := range ResourceSpecs.ResourceSpec { for _, tmp := range ResourceSpecs.ResourceSpec {
@@ -77,24 +76,8 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
aiTask.FlavorName = flaverName aiTask.FlavorName = flaverName
} }
} }
modelPath, modelSize, err = downloadModelFromCloudBrainOne(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
if err != nil {
log.Info("download model from CloudBrainOne faild." + err.Error())
return err
}
cloudType = models.TypeCloudBrainOne
} }
// else if cloudType == models.TypeC2Net {
// if aiTask.ComputeResource == models.NPUResource {
// modelPath, modelSize, err = downloadModelFromCloudBrainTwo(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
// if err != nil {
// log.Info("download model from CloudBrainTwo faild." + err.Error())
// return err
// }
// } else if aiTask.ComputeResource == models.GPUResource {

// }
// }

accuracy := make(map[string]string) accuracy := make(map[string]string)
accuracy["F1"] = "" accuracy["F1"] = ""
accuracy["Recall"] = "" accuracy["Recall"] = ""
@@ -123,6 +106,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
Engine: int64(engine), Engine: int64(engine),
TrainTaskInfo: string(aiTaskJson), TrainTaskInfo: string(aiTaskJson),
Accuracy: string(accuracyJson), Accuracy: string(accuracyJson),
Status: STATUS_COPY_MODEL,
} }


err = models.SaveModelToDb(model) err = models.SaveModelToDb(model)
@@ -146,11 +130,44 @@ func saveModelByParameters(jobId string, versionName string, name string, versio


models.UpdateRepositoryUnits(ctx.Repo.Repository, units, deleteUnitTypes) models.UpdateRepositoryUnits(ctx.Repo.Repository, units, deleteUnitTypes)


go asyncToCopyModel(aiTask, id, modelSelectedFile)

log.Info("save model end.") log.Info("save model end.")
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask) notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask)
return nil return nil
} }


func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile string) {
if aiTask.ComputeResource == models.NPUResource {
modelPath, modelSize, err := downloadModelFromCloudBrainTwo(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
if err != nil {
updateStatus(id, 0, STATUS_ERROR, modelPath, err.Error())
log.Info("download model from CloudBrainTwo faild." + err.Error())
} else {
updateStatus(id, modelSize, STATUS_FINISHED, modelPath, "")
}
} else if aiTask.ComputeResource == models.GPUResource {

modelPath, modelSize, err := downloadModelFromCloudBrainOne(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
if err != nil {
updateStatus(id, 0, STATUS_ERROR, modelPath, err.Error())
log.Info("download model from CloudBrainOne faild." + err.Error())
} else {
updateStatus(id, modelSize, STATUS_FINISHED, modelPath, "")
}
}
}

func updateStatus(id string, modelSize int64, status int, modelPath string, statusDesc string) {
if len(statusDesc) > 400 {
statusDesc = statusDesc[0:400]
}
err := models.ModifyModelStatus(id, modelSize, status, modelPath, statusDesc)
if err != nil {
log.Info("update status error." + err.Error())
}
}

func SaveNewNameModel(ctx *context.Context) { func SaveNewNameModel(ctx *context.Context) {
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) { if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.Error(403, ctx.Tr("repo.model_noright")) ctx.Error(403, ctx.Tr("repo.model_noright"))
@@ -331,6 +348,7 @@ func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, in
RepoID: repoId, RepoID: repoId,
Type: -1, Type: -1,
New: MODEL_LATEST, New: MODEL_LATEST,
Status: -1,
}) })
} }


@@ -642,7 +660,6 @@ func queryUserName(intSlice []int64) map[int64]*models.User {
result[user.ID] = user result[user.ID] = user
} }
} }

return result return result
} }


@@ -685,6 +702,7 @@ func SetModelCount(ctx *context.Context) {
RepoID: repoId, RepoID: repoId,
Type: Type, Type: Type,
New: MODEL_LATEST, New: MODEL_LATEST,
Status: -1,
}) })
ctx.Data["MODEL_COUNT"] = count ctx.Data["MODEL_COUNT"] = count
} }
@@ -758,6 +776,7 @@ func ShowModelPageInfo(ctx *context.Context) {
RepoID: repoId, RepoID: repoId,
Type: Type, Type: Type,
New: MODEL_LATEST, New: MODEL_LATEST,
Status: -1,
}) })
if err != nil { if err != nil {
ctx.ServerError("Cloudbrain", err) ctx.ServerError("Cloudbrain", err)
@@ -835,6 +854,7 @@ func QueryModelListForPredict(ctx *context.Context) {
RepoID: repoId, RepoID: repoId,
Type: ctx.QueryInt("type"), Type: ctx.QueryInt("type"),
New: -1, New: -1,
Status: 0,
}) })
if err != nil { if err != nil {
ctx.ServerError("Cloudbrain", err) ctx.ServerError("Cloudbrain", err)
@@ -896,12 +916,17 @@ func QueryOneLevelModelFile(ctx *context.Context) {
log.Info("TypeCloudBrainTwo list model file.") log.Info("TypeCloudBrainTwo list model file.")
prefix := model.Path[len(setting.Bucket)+1:] prefix := model.Path[len(setting.Bucket)+1:]
fileinfos, _ := storage.GetOneLevelAllObjectUnderDir(setting.Bucket, prefix, parentDir) fileinfos, _ := storage.GetOneLevelAllObjectUnderDir(setting.Bucket, prefix, parentDir)
if fileinfos == nil {
fileinfos = make([]storage.FileInfo, 0)
}
ctx.JSON(http.StatusOK, fileinfos) ctx.JSON(http.StatusOK, fileinfos)
} else if model.Type == models.TypeCloudBrainOne { } else if model.Type == models.TypeCloudBrainOne {
log.Info("TypeCloudBrainOne list model file.") log.Info("TypeCloudBrainOne list model file.")
prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:] prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
fileinfos, _ := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, parentDir) fileinfos, _ := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, parentDir)
if fileinfos == nil {
fileinfos = make([]storage.FileInfo, 0)
}
ctx.JSON(http.StatusOK, fileinfos) ctx.JSON(http.StatusOK, fileinfos)
} }

} }

+ 153
- 11
routers/repo/cloudbrain.go View File

@@ -2,7 +2,6 @@ package repo


import ( import (
"bufio" "bufio"
"code.gitea.io/gitea/services/cloudbrain/resource"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
@@ -16,6 +15,11 @@ import (
"time" "time"
"unicode/utf8" "unicode/utf8"


"code.gitea.io/gitea/modules/dataset"

"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward/point/account"

"code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/notification"


"code.gitea.io/gitea/modules/grampus" "code.gitea.io/gitea/modules/grampus"
@@ -31,6 +35,8 @@ import (
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
@@ -137,6 +143,29 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error {


ctx.Data["benchmarkMode"] = ctx.Query("benchmarkMode") ctx.Data["benchmarkMode"] = ctx.Query("benchmarkMode")


if ctx.Cloudbrain != nil {
ctx.Data["branch_name"] = ctx.Cloudbrain.BranchName
ctx.Data["image"] = ctx.Cloudbrain.Image
ctx.Data["image_id"] = ctx.Cloudbrain.ImageID
ctx.Data["boot_file"] = ctx.Cloudbrain.BootFile
ctx.Data["description"] = ctx.Cloudbrain.Description
spec, _ := resource.GetCloudbrainSpec(ctx.Cloudbrain.ID)
if spec != nil {
ctx.Data["spec_id"] = spec.ID
}
ctx.Data["run_para_list"] = ctx.Cloudbrain.Parameters
ctx.Data["model_name"] = ctx.Cloudbrain.ModelName
ctx.Data["label_name"] = ctx.Cloudbrain.LabelName
ctx.Data["ckpt_name"] = ctx.Cloudbrain.CkptName
ctx.Data["model_version"] = ctx.Cloudbrain.ModelVersion
ctx.Data["pre_train_model_url"] = ctx.Cloudbrain.PreTrainModelUrl
ctx.Data["compute_resource"] = ctx.Cloudbrain.ComputeResource
uuids, datasetNames := dataset.GetFilterDeletedAttachments(ctx.Cloudbrain.Uuid)
ctx.Data["attachment"] = uuids
ctx.Data["dataset_name"] = datasetNames
ctx.Data["cluster_type"] = models.OpenICluster
}

return nil return nil
} }


@@ -183,8 +212,12 @@ func CloudBrainNew(ctx *context.Context) {
ctx.Data["PageIsGPUDebug"] = true ctx.Data["PageIsGPUDebug"] = true
ctx.HTML(200, tplCloudBrainNew) ctx.HTML(200, tplCloudBrainNew)
} }

func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
ctx.Data["IsCreate"] = true
cloudBrainCreate(ctx, form)
}

func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
ctx.Data["PageIsCloudBrain"] = true ctx.Data["PageIsCloudBrain"] = true
displayJobName := form.DisplayJobName displayJobName := form.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName) jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
@@ -201,6 +234,16 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
tpl = tplCloudBrainTrainJobNew tpl = tplCloudBrainTrainJobNew
} }


lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tpl, &form)
return
}
defer lock.UnLock()

tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName) tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName)
if err == nil { if err == nil {
if len(tasks) != 0 { if len(tasks) != 0 {
@@ -302,6 +345,13 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
return return
} }


if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tpl, &form)
return
}

req := cloudbrain.GenerateCloudBrainTaskReq{ req := cloudbrain.GenerateCloudBrainTaskReq{
Ctx: ctx, Ctx: ctx,
DisplayJobName: displayJobName, DisplayJobName: displayJobName,
@@ -328,13 +378,22 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
Spec: spec, Spec: spec,
} }


if form.ModelName != "" { //使用预训练模型训练
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
req.ModelVersion = form.ModelVersion
req.PreTrainModelPath = setting.Attachment.Minio.RealPath + form.PreTrainModelUrl
req.PreTrainModelUrl = form.PreTrainModelUrl

}

err = cloudbrain.GenerateTask(req) err = cloudbrain.GenerateTask(req)
if err != nil { if err != nil {
cloudBrainNewDataPrepare(ctx) cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tpl, &form) ctx.RenderWithErr(err.Error(), tpl, &form)
return return
} }

if jobType == string(models.JobTypeTrain) { if jobType == string(models.JobTypeTrain) {
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=all") ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=all")
} else { } else {
@@ -342,6 +401,11 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
} }
} }


func CloudBrainTrainJobVersionCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
ctx.Data["IsCreate"] = false
cloudBrainCreate(ctx, form)
}

func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string { func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string {
err := downloadCode(repo, codePath, branchName) err := downloadCode(repo, codePath, branchName)
if err != nil { if err != nil {
@@ -378,10 +442,20 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra
bootFile := strings.TrimSpace(form.BootFile) bootFile := strings.TrimSpace(form.BootFile)
labelName := form.LabelName labelName := form.LabelName
repo := ctx.Repo.Repository repo := ctx.Repo.Repository
tpl := tplCloudBrainInferenceJobNew

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tpl, &form)
return
}
defer lock.UnLock()


ckptUrl := setting.Attachment.Minio.RealPath + form.TrainUrl + form.CkptName ckptUrl := setting.Attachment.Minio.RealPath + form.TrainUrl + form.CkptName
log.Info("ckpt url:" + ckptUrl) log.Info("ckpt url:" + ckptUrl)
tpl := tplCloudBrainInferenceJobNew
command, err := getInferenceJobCommand(form) command, err := getInferenceJobCommand(form)
if err != nil { if err != nil {
log.Error("getTrainJobCommand failed: %v", err) log.Error("getTrainJobCommand failed: %v", err)
@@ -465,6 +539,12 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra
ctx.RenderWithErr("Resource specification not available", tpl, &form) ctx.RenderWithErr("Resource specification not available", tpl, &form)
return return
} }
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tpl, &form)
return
}
req := cloudbrain.GenerateCloudBrainTaskReq{ req := cloudbrain.GenerateCloudBrainTaskReq{
Ctx: ctx, Ctx: ctx,
DisplayJobName: displayJobName, DisplayJobName: displayJobName,
@@ -589,6 +669,13 @@ func CloudBrainRestart(ctx *context.Context) {
} }
task.Spec = spec task.Spec = spec


if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
resultCode = "-1"
errorMsg = ctx.Tr("points.insufficient_points_balance")
break
}

count, err := models.GetCloudbrainCountByUserID(ctx.User.ID, string(models.JobTypeDebug)) count, err := models.GetCloudbrainCountByUserID(ctx.User.ID, string(models.JobTypeDebug))
if err != nil { if err != nil {
log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
@@ -1005,7 +1092,7 @@ func CloudBrainAdminCommitImage(ctx *context.Context, form auth.CommitAdminImage
UID: ctx.User.ID, UID: ctx.User.ID,
Type: models.GetRecommondType(form.IsRecommend), Type: models.GetRecommondType(form.IsRecommend),
Place: form.Place, Place: form.Place,
})
}, ctx.User)
if err != nil { if err != nil {
log.Error("CommitImagefailed") log.Error("CommitImagefailed")
if models.IsErrImageTagExist(err) { if models.IsErrImageTagExist(err) {
@@ -1052,7 +1139,7 @@ func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrain
CloudBrainType: form.Type, CloudBrainType: form.Type,
Topics: validTopics, Topics: validTopics,
UID: ctx.User.ID, UID: ctx.User.ID,
})
}, ctx.User)
if err != nil { if err != nil {
log.Error("CommitImage(%s) failed:%v", ctx.Cloudbrain.JobName, err.Error(), ctx.Data["msgID"]) log.Error("CommitImage(%s) failed:%v", ctx.Cloudbrain.JobName, err.Error(), ctx.Data["msgID"])
if models.IsErrImageTagExist(err) { if models.IsErrImageTagExist(err) {
@@ -1066,7 +1153,6 @@ func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrain


return return
} }

ctx.JSON(200, models.BaseOKMessage) ctx.JSON(200, models.BaseOKMessage)
} }


@@ -1103,6 +1189,7 @@ func CloudBrainStop(ctx *context.Context) {
log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"]) log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"])
resultCode = "-1" resultCode = "-1"
errorMsg = "cloudbrain.Already_stopped" errorMsg = "cloudbrain.Already_stopped"
resultCode = task.Status
break break
} }


@@ -1129,7 +1216,6 @@ func CloudBrainStop(ctx *context.Context) {
errorMsg = "cloudbrain.Stopped_success_update_status_fail" errorMsg = "cloudbrain.Stopped_success_update_status_fail"
break break
} }

status = task.Status status = task.Status
break break
} }
@@ -1184,7 +1270,7 @@ func StopJobs(cloudBrains []*models.Cloudbrain) {
}) })


logErrorAndUpdateJobStatus(err, taskInfo) logErrorAndUpdateJobStatus(err, taskInfo)
} else {
} else if taskInfo.Type == models.TypeCloudBrainTwo {
if taskInfo.JobType == string(models.JobTypeTrain) { if taskInfo.JobType == string(models.JobTypeTrain) {
err := retry(3, time.Second*30, func() error { err := retry(3, time.Second*30, func() error {
_, err := modelarts.StopTrainJob(taskInfo.JobID, strconv.FormatInt(taskInfo.VersionID, 10)) _, err := modelarts.StopTrainJob(taskInfo.JobID, strconv.FormatInt(taskInfo.VersionID, 10))
@@ -1201,8 +1287,16 @@ func StopJobs(cloudBrains []*models.Cloudbrain) {
}) })
logErrorAndUpdateJobStatus(err, taskInfo) logErrorAndUpdateJobStatus(err, taskInfo)
} }
}
} else if taskInfo.Type == models.TypeC2Net {
if taskInfo.JobType == string(models.JobTypeTrain) {
err := retry(3, time.Second*30, func() error {
_, err := grampus.StopJob(taskInfo.JobID)
return err
})
logErrorAndUpdateJobStatus(err, taskInfo)
}


}
} }
} }


@@ -1812,6 +1906,10 @@ func SyncCloudbrainStatus() {
oldStatus := task.Status oldStatus := task.Status
task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status) task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
task.Duration = result.JobInfo.RunSec task.Duration = result.JobInfo.RunSec

if task.Duration < 0 {
task.Duration = 0
}
task.TrainJobDuration = models.ConvertDurationToStr(task.Duration) task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)


if task.StartTime == 0 && result.JobInfo.StartedAt > 0 { if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
@@ -2206,12 +2304,21 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
benchmarkTypeID := form.BenchmarkTypeID benchmarkTypeID := form.BenchmarkTypeID
benchmarkChildTypeID := form.BenchmarkChildTypeID benchmarkChildTypeID := form.BenchmarkChildTypeID
repo := ctx.Repo.Repository


ctx.Data["description"] = form.Description ctx.Data["description"] = form.Description
ctx.Data["benchmarkTypeID"] = benchmarkTypeID ctx.Data["benchmarkTypeID"] = benchmarkTypeID
ctx.Data["benchmark_child_types_id_hidden"] = benchmarkChildTypeID ctx.Data["benchmark_child_types_id_hidden"] = benchmarkChildTypeID


repo := ctx.Repo.Repository
lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), form.JobType, displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplCloudBrainBenchmarkNew, &form)
return
}
defer lock.UnLock()


tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeBenchmark), displayJobName) tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeBenchmark), displayJobName)
if err == nil { if err == nil {
@@ -2254,6 +2361,12 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
ctx.RenderWithErr("Resource specification not available", tplCloudBrainBenchmarkNew, &form) ctx.RenderWithErr("Resource specification not available", tplCloudBrainBenchmarkNew, &form)
return return
} }
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplCloudBrainBenchmarkNew, &form)
return
}


count, err := models.GetBenchmarkCountByUserID(ctx.User.ID) count, err := models.GetBenchmarkCountByUserID(ctx.User.ID)
if err != nil { if err != nil {
@@ -2387,6 +2500,16 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
tpl := tplCloudBrainBenchmarkNew tpl := tplCloudBrainBenchmarkNew
command := cloudbrain.GetCloudbrainDebugCommand() command := cloudbrain.GetCloudbrainDebugCommand()


lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tpl, &form)
return
}
defer lock.UnLock()

tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName) tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName)
if err == nil { if err == nil {
if len(tasks) != 0 { if len(tasks) != 0 {
@@ -2472,6 +2595,13 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
ctx.RenderWithErr("Resource specification not available", tpl, &form) ctx.RenderWithErr("Resource specification not available", tpl, &form)
return return
} }

if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tpl, &form)
return
}
log.Info("Command=" + command) log.Info("Command=" + command)
log.Info("ModelPath=" + storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/")) log.Info("ModelPath=" + storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"))
req := cloudbrain.GenerateCloudBrainTaskReq{ req := cloudbrain.GenerateCloudBrainTaskReq{
@@ -2540,6 +2670,15 @@ func BenchmarkDel(ctx *context.Context) {
} }


func CloudBrainTrainJobNew(ctx *context.Context) { func CloudBrainTrainJobNew(ctx *context.Context) {
ctx.Data["IsCreate"] = true
cloudBrainTrainJobCreate(ctx)
}
func CloudBrainTrainJobVersionNew(ctx *context.Context) {
ctx.Data["IsCreate"] = false
cloudBrainTrainJobCreate(ctx)
}

func cloudBrainTrainJobCreate(ctx *context.Context) {
err := cloudBrainNewDataPrepare(ctx) err := cloudBrainNewDataPrepare(ctx)
if err != nil { if err != nil {
ctx.ServerError("get new train-job info failed", err) ctx.ServerError("get new train-job info failed", err)
@@ -2629,6 +2768,9 @@ func getTrainJobCommand(form auth.CreateCloudBrainForm) (string, error) {
param += " --" + parameter.Label + "=" + parameter.Value param += " --" + parameter.Label + "=" + parameter.Value
} }
} }
if form.CkptName != "" {
param += " --ckpt_url" + "=" + "/pretrainmodel/" + form.CkptName
}


command += "python /code/" + bootFile + param + " > " + cloudbrain.ModelMountPath + "/" + form.DisplayJobName + "-" + cloudbrain.LogFile command += "python /code/" + bootFile + param + " > " + cloudbrain.ModelMountPath + "/" + form.DisplayJobName + "-" + cloudbrain.LogFile




+ 311
- 84
routers/repo/grampus.go View File

@@ -1,7 +1,6 @@
package repo package repo


import ( import (
"code.gitea.io/gitea/services/cloudbrain/resource"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
@@ -13,11 +12,19 @@ import (
"strings" "strings"
"time" "time"


"code.gitea.io/gitea/modules/dataset"

"code.gitea.io/gitea/services/cloudbrain/resource"

"code.gitea.io/gitea/services/reward/point/account"

"code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/grampus" "code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/util"
"github.com/unknwon/com" "github.com/unknwon/com"
@@ -41,7 +48,7 @@ const (
) )


func GrampusTrainJobGPUNew(ctx *context.Context) { func GrampusTrainJobGPUNew(ctx *context.Context) {
ctx.Data["datasetType"] = models.TypeCloudBrainOne
ctx.Data["IsCreate"] = true
err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
if err != nil { if err != nil {
ctx.ServerError("get new train-job info failed", err) ctx.ServerError("get new train-job info failed", err)
@@ -52,7 +59,7 @@ func GrampusTrainJobGPUNew(ctx *context.Context) {
} }


func GrampusTrainJobNPUNew(ctx *context.Context) { func GrampusTrainJobNPUNew(ctx *context.Context) {
ctx.Data["datasetType"] = models.TypeCloudBrainTwo
ctx.Data["IsCreate"] = true
err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
if err != nil { if err != nil {
ctx.ServerError("get new train-job info failed", err) ctx.ServerError("get new train-job info failed", err)
@@ -134,9 +141,57 @@ func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) err
ctx.Data["WaitCount"] = waitCount ctx.Data["WaitCount"] = waitCount
} }


if ctx.Cloudbrain != nil {
uuids, datasetNames := dataset.GetFilterDeletedAttachments(ctx.Cloudbrain.Uuid)
ctx.Data["attachment"] = uuids
ctx.Data["boot_file"] = ctx.Cloudbrain.BootFile
ctx.Data["image_id"] = ctx.Cloudbrain.ImageID
ctx.Data["run_para_list"] = ctx.Cloudbrain.Parameters
ctx.Data["description"] = ctx.Cloudbrain.Description
ctx.Data["branch_name"] = ctx.Cloudbrain.BranchName
ctx.Data["engine_name"] = ctx.Cloudbrain.EngineName
ctx.Data["work_server_number"] = ctx.Cloudbrain.WorkServerNumber
if ctx.Cloudbrain.Image != "" {
ctx.Data["image"] = ctx.Cloudbrain.Image
} else {
ctx.Data["image"] = ctx.Cloudbrain.EngineName
}
ctx.Data["dataset_name"] = datasetNames
ctx.Data["model_name"] = ctx.Cloudbrain.ModelName

ctx.Data["model_version"] = ctx.Cloudbrain.ModelVersion
ctx.Data["ckpt_name"] = ctx.Cloudbrain.CkptName
ctx.Data["label_names"] = ctx.Cloudbrain.LabelName
ctx.Data["pre_train_model_url"] = ctx.Cloudbrain.PreTrainModelUrl
spec, _ := resource.GetCloudbrainSpec(ctx.Cloudbrain.ID)
if spec != nil {
ctx.Data["spec_id"] = spec.ID
}

}
return nil return nil
} }


func GrampusTrainJobVersionNew(ctx *context.Context) {
task := ctx.Cloudbrain
ctx.Data["IsCreate"] = false
if task.ComputeResource == models.GPUResource {
err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
if err != nil {
ctx.ServerError("get new train-job version info failed", err)
return
}
ctx.HTML(http.StatusOK, tplGrampusTrainJobGPUNew)
} else if task.ComputeResource == models.NPUResource {
err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
if err != nil {
ctx.ServerError("get new train-job version info failed", err)
return
}
ctx.HTML(200, tplGrampusTrainJobNPUNew)
}
}

func prepareGrampusTrainSpecs(ctx *context.Context, computeResource string) { func prepareGrampusTrainSpecs(ctx *context.Context, computeResource string) {
noteBookSpecs, _ := resource.FindAvailableSpecs(ctx.User.ID, models.FindSpecsOptions{ noteBookSpecs, _ := resource.FindAvailableSpecs(ctx.User.ID, models.FindSpecsOptions{
JobType: models.JobTypeTrain, JobType: models.JobTypeTrain,
@@ -201,6 +256,12 @@ func grampusParamCheckCreateTrainJob(form auth.CreateGrampusTrainJobForm) error
} }


func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) { func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = true
grampusTrainJobGpuCreate(ctx, form)
}

func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {

displayJobName := form.DisplayJobName displayJobName := form.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName) jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := form.Attachment uuid := form.Attachment
@@ -210,28 +271,31 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
repo := ctx.Repo.Repository repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + cloudbrain.CodeMountPath + "/" codeLocalPath := setting.JobPath + jobName + cloudbrain.CodeMountPath + "/"
codeMinioPath := setting.CBCodePathPrefix + jobName + cloudbrain.CodeMountPath + "/" codeMinioPath := setting.CBCodePathPrefix + jobName + cloudbrain.CodeMountPath + "/"
dataMinioPath := setting.Attachment.Minio.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid
branchName := form.BranchName branchName := form.BranchName
image := strings.TrimSpace(form.Image) image := strings.TrimSpace(form.Image)
tpl := tplGrampusTrainJobGPUNew


if !jobNamePattern.MatchString(displayJobName) {
lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplGrampusTrainJobGPUNew, &form)
return return
} }
defer lock.UnLock()


bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
if !jobNamePattern.MatchString(displayJobName) {
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tpl, &form)
return return
} }


errStr := checkSpecialPool(ctx, "GPU")
if errStr != "" {
bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(errStr, tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tpl, &form)
return return
} }


@@ -240,13 +304,13 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err != nil { if err != nil {
log.Error("GetGrampusCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetGrampusCountByUserID failed:%v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("system error", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr("system error", tpl, &form)
return return
} else { } else {
if count >= 1 { if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr("you have already a running or waiting task, can not create more", tpl, &form)
return return
} }
} }
@@ -255,7 +319,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := grampusParamCheckCreateTrainJob(form); err != nil { if err := grampusParamCheckCreateTrainJob(form); err != nil {
log.Error("paramCheckCreateTrainJob failed:(%v)", err, ctx.Data["MsgID"]) log.Error("paramCheckCreateTrainJob failed:(%v)", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(err.Error(), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(err.Error(), tpl, &form)
return return
} }


@@ -265,14 +329,14 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if len(tasks) != 0 { if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"]) log.Error("the job name did already exist", ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("the job name did already exist", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr("the job name did already exist", tpl, &form)
return return
} }
} else { } else {
if !models.IsErrJobNotExist(err) { if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"]) log.Error("system error, %v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("system error", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr("system error", tpl, &form)
return return
} }
} }
@@ -285,16 +349,24 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
}) })
if err != nil || spec == nil { if err != nil || spec == nil {
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("Resource specification not available", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr("Resource specification not available", tpl, &form)
return
}

if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplGrampusTrainJobGPUNew, &form)
return return
} }


//check dataset //check dataset
attachment, err := models.GetAttachmentByUUID(uuid)

datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid, models.GPU)
if err != nil { if err != nil {
log.Error("GetAttachmentByUUID failed:", err.Error(), ctx.Data["MsgID"])
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("dataset is not exist", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
return return
} }


@@ -307,7 +379,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil { if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil {
log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


@@ -316,7 +388,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := uploadCodeToMinio(codeLocalPath+"/", jobName, cloudbrain.CodeMountPath+"/"); err != nil { if err := uploadCodeToMinio(codeLocalPath+"/", jobName, cloudbrain.CodeMountPath+"/"); err != nil {
log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


@@ -324,7 +396,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := mkModelPath(modelPath); err != nil { if err := mkModelPath(modelPath); err != nil {
log.Error("Failed to mkModelPath: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) log.Error("Failed to mkModelPath: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


@@ -332,52 +404,102 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/"); err != nil { if err := uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/"); err != nil {
log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


var datasetRemotePath, allFileName string
for _, datasetInfo := range datasetInfos {
if datasetRemotePath == "" {
datasetRemotePath = datasetInfo.DataLocalPath
allFileName = datasetInfo.FullName
} else {
datasetRemotePath = datasetRemotePath + ";" + datasetInfo.DataLocalPath
allFileName = allFileName + ";" + datasetInfo.FullName
}

}

//prepare command //prepare command
command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", dataMinioPath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", attachment.Name)
preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName)

command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName)
if err != nil { if err != nil {
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr("Create task failed, internal error", tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr("Create task failed, internal error", tpl, &form)
return return
} }


commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName) commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)


req := &grampus.GenerateTrainJobReq{ req := &grampus.GenerateTrainJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
ComputeResource: models.GPUResource,
ProcessType: grampus.ProcessorTypeGPU,
Command: command,
ImageUrl: image,
Description: description,
BootFile: bootFile,
Uuid: uuid,
CommitID: commitID,
BranchName: branchName,
Params: form.Params,
EngineName: image,
DatasetName: attachment.Name,
JobName: jobName,
DisplayJobName: displayJobName,
ComputeResource: models.GPUResource,
ProcessType: grampus.ProcessorTypeGPU,
Command: command,
ImageUrl: image,
Description: description,
BootFile: bootFile,
Uuid: uuid,
CommitID: commitID,
BranchName: branchName,
Params: form.Params,
EngineName: image,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,

IsLatestVersion: modelarts.IsLatestVersion, IsLatestVersion: modelarts.IsLatestVersion,
VersionCount: modelarts.VersionCountOne, VersionCount: modelarts.VersionCountOne,
WorkServerNumber: 1, WorkServerNumber: 1,
Spec: spec, Spec: spec,
} }


if form.ModelName != "" { //使用预训练模型训练
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
req.ModelVersion = form.ModelVersion
req.PreTrainModelUrl = form.PreTrainModelUrl

}

err = grampus.GenerateTrainJob(ctx, req) err = grampus.GenerateTrainJob(ctx, req)
if err != nil { if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"]) log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
ctx.RenderWithErr(err.Error(), tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(err.Error(), tpl, &form)
return return
} }
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job") ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job")
} }


func getPreTrainModelPath(pretrainModelDir string, fileName string) string {
index := strings.Index(pretrainModelDir, "/")
if index > 0 {
filterBucket := pretrainModelDir[index+1:]
return filterBucket + fileName
} else {
return ""
}

}

func GrampusTrainJobVersionCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = false
computeResource := ctx.Query("compute_resource")
if computeResource == models.GPUResource {
grampusTrainJobGpuCreate(ctx, form)
} else if computeResource == models.NPUResource {
grampusTrainJobNpuCreate(ctx, form)
} else {
ctx.ServerError("resource error", errors.New("compute resource is not support"))
return
}

}

func checkSpecialPool(ctx *context.Context, resourceType string) string { func checkSpecialPool(ctx *context.Context, resourceType string) string {
grampus.InitSpecialPool() grampus.InitSpecialPool()
if grampus.SpecialPools != nil { if grampus.SpecialPools != nil {
@@ -401,6 +523,12 @@ func checkSpecialPool(ctx *context.Context, resourceType string) string {
} }


func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) { func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = true
grampusTrainJobNpuCreate(ctx, form)
}

func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {

displayJobName := form.DisplayJobName displayJobName := form.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName) jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := form.Attachment uuid := form.Attachment
@@ -410,30 +538,34 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
repo := ctx.Repo.Repository repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + modelarts.CodePath codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
codeObsPath := grampus.JobPath + jobName + modelarts.CodePath codeObsPath := grampus.JobPath + jobName + modelarts.CodePath
dataObsPath := setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + "/"
//dataObsPath := setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + "/"
branchName := form.BranchName branchName := form.BranchName
isLatestVersion := modelarts.IsLatestVersion isLatestVersion := modelarts.IsLatestVersion
versionCount := modelarts.VersionCountOne versionCount := modelarts.VersionCountOne
engineName := form.EngineName engineName := form.EngineName
tpl := tplGrampusTrainJobNPUNew


if !jobNamePattern.MatchString(displayJobName) {
lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplGrampusTrainJobNPUNew, &form)
return return
} }
defer lock.UnLock()


bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
if !jobNamePattern.MatchString(displayJobName) {
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tpl, &form)
return return
} }


errStr := checkSpecialPool(ctx, "NPU")
if errStr != "" {
bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(errStr, tplGrampusTrainJobGPUNew, &form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tpl, &form)
return return
} }


@@ -442,13 +574,13 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err != nil { if err != nil {
log.Error("GetGrampusCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetGrampusCountByUserID failed:%v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("system error", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr("system error", tpl, &form)
return return
} else { } else {
if count >= 1 { if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr("you have already a running or waiting task, can not create more", tpl, &form)
return return
} }
} }
@@ -457,7 +589,7 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := grampusParamCheckCreateTrainJob(form); err != nil { if err := grampusParamCheckCreateTrainJob(form); err != nil {
log.Error("paramCheckCreateTrainJob failed:(%v)", err) log.Error("paramCheckCreateTrainJob failed:(%v)", err)
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(err.Error(), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(err.Error(), tpl, &form)
return return
} }


@@ -467,14 +599,14 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if len(tasks) != 0 { if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"]) log.Error("the job name did already exist", ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("the job name did already exist", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr("the job name did already exist", tpl, &form)
return return
} }
} else { } else {
if !models.IsErrJobNotExist(err) { if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"]) log.Error("system error, %v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("system error", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr("system error", tpl, &form)
return return
} }
} }
@@ -487,16 +619,22 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
}) })
if err != nil || spec == nil { if err != nil || spec == nil {
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("Resource specification not available", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr("Resource specification not available", tpl, &form)
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplGrampusTrainJobNPUNew, &form)
return return
} }


//check dataset //check dataset
attachment, err := models.GetAttachmentByUUID(uuid)
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid, models.NPU)
if err != nil { if err != nil {
log.Error("GetAttachmentByUUID failed:", err.Error(), ctx.Data["MsgID"])
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("dataset is not exist", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
return return
} }


@@ -509,7 +647,7 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil { if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil {
log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err) log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err)
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


@@ -517,23 +655,36 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath); err != nil { if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath); err != nil {
log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err) log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err)
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
return return
} }


var datasetRemotePath, allFileName string
for _, datasetInfo := range datasetInfos {
if datasetRemotePath == "" {
datasetRemotePath = datasetInfo.DataLocalPath + "'" + datasetInfo.FullName + "'"
allFileName = datasetInfo.FullName
} else {
datasetRemotePath = datasetRemotePath + ";" + datasetInfo.DataLocalPath + "'" + datasetInfo.FullName + "'"
allFileName = allFileName + ";" + datasetInfo.FullName
}

}

//prepare command //prepare command
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", dataObsPath+"'"+attachment.Name+"'", bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, attachment.Name)
preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName)
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName)
if err != nil { if err != nil {
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr("Create task failed, internal error", tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr("Create task failed, internal error", tpl, &form)
return return
} }


@@ -546,7 +697,6 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
ProcessType: grampus.ProcessorTypeNPU, ProcessType: grampus.ProcessorTypeNPU,
Command: command, Command: command,
ImageId: form.ImageID, ImageId: form.ImageID,
DataUrl: dataObsPath,
Description: description, Description: description,
CodeObsPath: codeObsPath, CodeObsPath: codeObsPath,
BootFileUrl: codeObsPath + bootFile, BootFileUrl: codeObsPath + bootFile,
@@ -560,15 +710,24 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
EngineName: engineName, EngineName: engineName,
VersionCount: versionCount, VersionCount: versionCount,
TotalVersionCount: modelarts.TotalVersionCount, TotalVersionCount: modelarts.TotalVersionCount,
DatasetName: attachment.Name,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,
Spec: spec, Spec: spec,
} }
if form.ModelName != "" { //使用预训练模型训练
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
req.ModelVersion = form.ModelVersion
req.PreTrainModelUrl = form.PreTrainModelUrl
req.PreTrainModelPath = preTrainModelPath
}


err = grampus.GenerateTrainJob(ctx, req) err = grampus.GenerateTrainJob(ctx, req)
if err != nil { if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error()) log.Error("GenerateTrainJob failed:%v", err.Error())
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
ctx.RenderWithErr(err.Error(), tplGrampusTrainJobNPUNew, &form)
ctx.RenderWithErr(err.Error(), tpl, &form)
return return
} }
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job") ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job")
@@ -695,6 +854,9 @@ func GrampusTrainJobShow(ctx *context.Context) {
task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status) task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
if task.Status != result.JobInfo.Status || result.JobInfo.Status == models.GrampusStatusRunning { if task.Status != result.JobInfo.Status || result.JobInfo.Status == models.GrampusStatusRunning {
task.Duration = result.JobInfo.RunSec task.Duration = result.JobInfo.RunSec
if task.Duration < 0 {
task.Duration = 0
}
task.TrainJobDuration = models.ConvertDurationToStr(task.Duration) task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)


if task.StartTime == 0 && result.JobInfo.StartedAt > 0 { if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
@@ -752,7 +914,7 @@ func GrampusTrainJobShow(ctx *context.Context) {
ctx.HTML(http.StatusOK, tplGrampusTrainJobShow) ctx.HTML(http.StatusOK, tplGrampusTrainJobShow)
} }


func GrampusGetLog(ctx *context.Context) {
func GrampusDownloadLog(ctx *context.Context) {
jobID := ctx.Params(":jobid") jobID := ctx.Params(":jobid")
job, err := models.GetCloudbrainByJobID(jobID) job, err := models.GetCloudbrainByJobID(jobID)
if err != nil { if err != nil {
@@ -764,19 +926,46 @@ func GrampusGetLog(ctx *context.Context) {
content, err := grampus.GetTrainJobLog(job.JobID) content, err := grampus.GetTrainJobLog(job.JobID)
if err != nil { if err != nil {
log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"]) log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"])
content = ""
}
fileName := job.JobName + "-log.txt"
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName)
ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
var b []byte = []byte(content)
ctx.Resp.Write(b)
}

func GrampusGetLog(ctx *context.Context) {
jobID := ctx.Params(":jobid")
job, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err) ctx.ServerError(err.Error(), err)
return return
} }


content, err := grampus.GetTrainJobLog(job.JobID)
if err != nil {
log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}
var canLogDownload bool
if err != nil {
canLogDownload = false
} else {
canLogDownload = true
}
ctx.JSON(http.StatusOK, map[string]interface{}{ ctx.JSON(http.StatusOK, map[string]interface{}{
"JobName": job.JobName,
"Content": content,
"JobName": job.JobName,
"Content": content,
"CanLogDownload": canLogDownload,
}) })


return return
} }


func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName string) (string, error) {
func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName string) (string, error) {
var command string var command string


workDir := grampus.NpuWorkDir workDir := grampus.NpuWorkDir
@@ -784,23 +973,26 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
workDir = grampus.GpuWorkDir workDir = grampus.GpuWorkDir
} }


command += "pwd;cd " + workDir + grampus.CommandPrepareScript
command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScript, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject)
//download code & dataset //download code & dataset
if processorType == grampus.ProcessorTypeNPU { if processorType == grampus.ProcessorTypeNPU {
commandDownload := "./downloader_for_obs " + setting.Bucket + " " + codeRemotePath + " " + grampus.CodeArchiveName + " " + dataRemotePath + " '" + datasetName + "';"
commandDownload := "./downloader_for_obs " + setting.Bucket + " " + codeRemotePath + " " + grampus.CodeArchiveName + ";"
command += commandDownload command += commandDownload
} else if processorType == grampus.ProcessorTypeGPU { } else if processorType == grampus.ProcessorTypeGPU {
commandDownload := "./downloader_for_minio " + setting.Grampus.Env + " " + codeRemotePath + " " + grampus.CodeArchiveName + " " + dataRemotePath + " '" + datasetName + "';"
commandDownload := "./downloader_for_minio " + setting.Grampus.Env + " " + codeRemotePath + " " + grampus.CodeArchiveName + " '" + dataRemotePath + "' '" + datasetName + "'"
commandDownload = processPretrainModelParameter(pretrainModelPath, pretrainModelFileName, commandDownload)
command += commandDownload command += commandDownload
} }


//unzip code & dataset //unzip code & dataset
toolUnzip := "unzip -q '"
if strings.HasSuffix(datasetName, ".tar.gz") {
toolUnzip = "tar -zxvf '"
if processorType == grampus.ProcessorTypeNPU {
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;"
command += commandUnzip
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
} }
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + toolUnzip + datasetName + "';"
command += commandUnzip


command += "echo \"unzip finished;start to exec code;\";" command += "echo \"unzip finished;start to exec code;\";"


@@ -834,6 +1026,9 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
if processorType == grampus.ProcessorTypeNPU { if processorType == grampus.ProcessorTypeNPU {
commandCode = "/bin/bash /home/work/run_train_for_openi.sh " + workDir + "code/" + strings.ToLower(repoName) + "/" + bootFile + " /tmp/log/train.log" + paramCode + ";" commandCode = "/bin/bash /home/work/run_train_for_openi.sh " + workDir + "code/" + strings.ToLower(repoName) + "/" + bootFile + " /tmp/log/train.log" + paramCode + ";"
} else if processorType == grampus.ProcessorTypeGPU { } else if processorType == grampus.ProcessorTypeGPU {
if pretrainModelFileName != "" {
paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName
}
commandCode = "cd " + workDir + "code/" + strings.ToLower(repoName) + ";python " + bootFile + paramCode + ";" commandCode = "cd " + workDir + "code/" + strings.ToLower(repoName) + ";python " + bootFile + paramCode + ";"
} }


@@ -845,10 +1040,10 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo


//upload models //upload models
if processorType == grampus.ProcessorTypeNPU { if processorType == grampus.ProcessorTypeNPU {
commandUpload := "cd " + workDir + "script_for_grampus/;./uploader_for_npu " + setting.Bucket + " " + outputRemotePath + " " + workDir + "output/;"
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_npu " + setting.Bucket + " " + outputRemotePath + " " + workDir + "output/;"
command += commandUpload command += commandUpload
} else if processorType == grampus.ProcessorTypeGPU { } else if processorType == grampus.ProcessorTypeGPU {
commandUpload := "cd " + workDir + "script_for_grampus/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;"
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;"
command += commandUpload command += commandUpload
} }


@@ -859,6 +1054,38 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
return command, nil return command, nil
} }


func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileName string, commandDownload string) string {
commandDownloadTemp := commandDownload
if pretrainModelPath != "" {
commandDownloadTemp += " '" + pretrainModelPath + "' '" + pretrainModelFileName + "'"
}
commandDownloadTemp += ";"
return commandDownloadTemp
}

func generateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
if len(datasetNameArray) == 1 { //单数据集
unZipDatasetCommand = "unzip -q '" + datasetName + "';"
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
if strings.HasSuffix(datasetNameTemp, ".tar.gz") {
unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';"
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
}

}
return unZipDatasetCommand
}

func downloadZipCode(ctx *context.Context, codePath, branchName string) error { func downloadZipCode(ctx *context.Context, codePath, branchName string) error {
archiveType := git.ZIP archiveType := git.ZIP
archivePath := codePath archivePath := codePath


+ 158
- 254
routers/repo/modelarts.go View File

@@ -2,8 +2,6 @@ package repo


import ( import (
"archive/zip" "archive/zip"
"code.gitea.io/gitea/modules/modelarts_cd"
"code.gitea.io/gitea/services/cloudbrain/resource"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
@@ -17,6 +15,12 @@ import (
"time" "time"
"unicode/utf8" "unicode/utf8"


"code.gitea.io/gitea/modules/dataset"

"code.gitea.io/gitea/modules/modelarts_cd"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward/point/account"

"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/base"
@@ -27,6 +31,8 @@ import (
"code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/obs" "code.gitea.io/gitea/modules/obs"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
@@ -209,6 +215,16 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
imageId := form.ImageId imageId := form.ImageId
repo := ctx.Repo.Repository repo := ctx.Repo.Repository


lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeDebug), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
notebookNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplModelArtsNotebookNew, &form)
return
}
defer lock.UnLock()

count, err := models.GetCloudbrainNotebookCountByUserID(ctx.User.ID) count, err := models.GetCloudbrainNotebookCountByUserID(ctx.User.ID)
if err != nil { if err != nil {
log.Error("GetCloudbrainNotebookCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetCloudbrainNotebookCountByUserID failed:%v", err, ctx.Data["MsgID"])
@@ -254,6 +270,13 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
ctx.RenderWithErr("Resource specification not available", tplModelArtsNotebookNew, &form) ctx.RenderWithErr("Resource specification not available", tplModelArtsNotebookNew, &form)
return return
} }
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplModelArtsNotebookNew, &form)
return
}

if setting.ModelartsCD.Enabled { if setting.ModelartsCD.Enabled {
err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec) err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec)
} else { } else {
@@ -461,7 +484,11 @@ func NotebookRestart(ctx *context.Context) {
errorMsg = "Resource specification not support any more" errorMsg = "Resource specification not support any more"
break break
} }

if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
errorMsg = ctx.Tr("points.insufficient_points_balance")
break
}
createTime := timeutil.TimeStampNow() createTime := timeutil.TimeStampNow()
param := models.NotebookAction{ param := models.NotebookAction{
Action: models.ActionStart, Action: models.ActionStart,
@@ -834,84 +861,6 @@ func setSpecBySpecialPoolConfig(ctx *context.Context, jobType string) {
} }
} }


func trainJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) error {
ctx.Data["PageIsCloudBrain"] = true

//can, err := canUserCreateTrainJob(ctx.User.ID)
//if err != nil {
// ctx.ServerError("canUserCreateTrainJob", err)
// return
//}
//
//if !can {
// log.Error("the user can not create train-job")
// ctx.ServerError("the user can not create train-job", fmt.Errorf("the user can not create train-job"))
// return
//}

t := time.Now()
var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:]
ctx.Data["display_job_name"] = displayJobName

attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID)
if err != nil {
ctx.ServerError("GetAllUserAttachments failed:", err)
return err
}
ctx.Data["attachments"] = attachs

var resourcePools modelarts.ResourcePool
if err = json.Unmarshal([]byte(setting.ResourcePools), &resourcePools); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["resource_pools"] = resourcePools.Info

var engines modelarts.Engine
if err = json.Unmarshal([]byte(setting.Engines), &engines); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["engines"] = engines.Info

var versionInfos modelarts.VersionInfo
if err = json.Unmarshal([]byte(setting.EngineVersions), &versionInfos); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["engine_versions"] = versionInfos.Version

prepareCloudbrainTwoTrainSpecs(ctx)

configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom)
if err != nil {
ctx.ServerError("getConfigList failed:", err)
return err
}
var Parameters modelarts.Parameters
if err = json.Unmarshal([]byte(form.Params), &Parameters); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["params"] = Parameters.Parameter
ctx.Data["config_list"] = configList.ParaConfigs
ctx.Data["bootFile"] = form.BootFile
ctx.Data["uuid"] = form.Attachment
_, datasetNames, err := models.GetDatasetInfo(form.Attachment)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
return nil
}
ctx.Data["dataset_name"] = datasetNames
ctx.Data["branch_name"] = form.BranchName
ctx.Data["datasetType"] = models.TypeCloudBrainTwo
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "")
ctx.Data["WaitCount"] = waitCount
setMultiNodeIfConfigureMatch(ctx)

return nil
}

func TrainJobNewVersion(ctx *context.Context) { func TrainJobNewVersion(ctx *context.Context) {


err := trainJobNewVersionDataPrepare(ctx) err := trainJobNewVersionDataPrepare(ctx)
@@ -977,26 +926,18 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error {
ctx.Data["spec_id"] = spec.ID ctx.Data["spec_id"] = spec.ID
} }


var Parameters modelarts.Parameters
if err = json.Unmarshal([]byte(task.Parameters), &Parameters); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["params"] = Parameters.Parameter
ctx.Data["run_para_list"] = task.Parameters


branches, _, err := ctx.Repo.GitRepo.GetBranches(0, 0) branches, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil { if err != nil {
ctx.ServerError("GetBranches error:", err) ctx.ServerError("GetBranches error:", err)
return err return err
} }
_, _, datasetNames, _, err := getDatasUrlListByUUIDS(task.Uuid)
if err != nil {
log.Info("query dataset error," + err.Error())
//ctx.ServerError("GetAllUserAttachments failed:", err)
//return err
} else {
ctx.Data["dataset_name"] = datasetNames
}

uuids, datasetNames := dataset.GetFilterDeletedAttachments(task.Uuid)

ctx.Data["dataset_name"] = datasetNames

ctx.Data["branches"] = branches ctx.Data["branches"] = branches
ctx.Data["branch_name"] = task.BranchName ctx.Data["branch_name"] = task.BranchName
ctx.Data["description"] = task.Description ctx.Data["description"] = task.Description
@@ -1005,104 +946,24 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error {
ctx.Data["work_server_number"] = task.WorkServerNumber ctx.Data["work_server_number"] = task.WorkServerNumber
ctx.Data["flavor_name"] = task.FlavorName ctx.Data["flavor_name"] = task.FlavorName
ctx.Data["engine_name"] = task.EngineName ctx.Data["engine_name"] = task.EngineName
ctx.Data["uuid"] = task.Uuid
ctx.Data["attachment"] = uuids
ctx.Data["flavor_code"] = task.FlavorCode ctx.Data["flavor_code"] = task.FlavorCode
ctx.Data["engine_id"] = task.EngineID ctx.Data["engine_id"] = task.EngineID
ctx.Data["datasetType"] = models.TypeCloudBrainTwo ctx.Data["datasetType"] = models.TypeCloudBrainTwo


configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom)
if err != nil {
ctx.ServerError("getConfigList failed:", err)
return err
}
ctx.Data["config_list"] = configList.ParaConfigs
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "")
ctx.Data["WaitCount"] = waitCount

return nil
}

func versionErrorDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) error {
ctx.Data["PageIsCloudBrain"] = true
var jobID = ctx.Params(":jobid")
// var versionName = ctx.Params(":version-name")
var versionName = ctx.Query("version_name")

task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName)
if err != nil {
log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", jobID, err.Error())
return err
}

t := time.Now()
var jobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:]
ctx.Data["job_name"] = task.JobName

attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID)
if err != nil {
ctx.ServerError("GetAllUserAttachments failed:", err)
return err
}
ctx.Data["attachments"] = attachs

var resourcePools modelarts.ResourcePool
if err = json.Unmarshal([]byte(setting.ResourcePools), &resourcePools); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["resource_pools"] = resourcePools.Info

var engines modelarts.Engine
if err = json.Unmarshal([]byte(setting.Engines), &engines); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["engines"] = engines.Info

var versionInfos modelarts.VersionInfo
if err = json.Unmarshal([]byte(setting.EngineVersions), &versionInfos); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["engine_versions"] = versionInfos.Version

prepareCloudbrainTwoTrainSpecs(ctx)

var Parameters modelarts.Parameters
if err = json.Unmarshal([]byte(form.Params), &Parameters); err != nil {
ctx.ServerError("json.Unmarshal failed:", err)
return err
}
ctx.Data["params"] = Parameters.Parameter
//pretrain model
ctx.Data["model_name"] = task.ModelName
ctx.Data["model_version"] = task.ModelVersion
ctx.Data["ckpt_name"] = task.CkptName
ctx.Data["label_names"] = task.LabelName
ctx.Data["pre_train_model_url"] = task.PreTrainModelUrl


outputObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.OutputPath
ctx.Data["train_url"] = outputObsPath

branches, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
ctx.ServerError("GetBranches error:", err)
return err
}
ctx.Data["branches"] = branches
ctx.Data["description"] = form.Description
ctx.Data["dataset_name"] = task.DatasetName
ctx.Data["work_server_number"] = form.WorkServerNumber
ctx.Data["flavor_name"] = form.FlavorName
ctx.Data["engine_name"] = form.EngineName
ctx.Data["flavor_code"] = task.FlavorCode
ctx.Data["engine_id"] = task.EngineID
ctx.Data["version_name"] = form.VersionName

ctx.Data["bootFile"] = form.BootFile
ctx.Data["uuid"] = form.Attachment
ctx.Data["branch_name"] = form.BranchName
configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom) configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom)
if err != nil { if err != nil {
ctx.ServerError("getConfigList failed:", err) ctx.ServerError("getConfigList failed:", err)
return err return err
} }
ctx.Data["config_list"] = configList.ParaConfigs ctx.Data["config_list"] = configList.ParaConfigs
ctx.Data["datasetType"] = models.TypeCloudBrainTwo
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "")
ctx.Data["WaitCount"] = waitCount ctx.Data["WaitCount"] = waitCount


@@ -1136,21 +997,31 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)


errStr := checkMultiNode(ctx.User.ID, form.WorkServerNumber) errStr := checkMultiNode(ctx.User.ID, form.WorkServerNumber)
if errStr != "" { if errStr != "" {
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsTrainJobNew, &form)
return return
} }


lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplModelArtsTrainJobNew, &form)
return
}
defer lock.UnLock()

count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID) count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID)
if err != nil { if err != nil {
log.Error("GetCloudbrainTrainJobCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetCloudbrainTrainJobCountByUserID failed:%v", err, ctx.Data["MsgID"])
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("system error", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("system error", tplModelArtsTrainJobNew, &form)
return return
} else { } else {
if count >= 1 { if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1158,7 +1029,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)


if err := paramCheckCreateTrainJob(form); err != nil { if err := paramCheckCreateTrainJob(form); err != nil {
log.Error("paramCheckCreateTrainJob failed:(%v)", err) log.Error("paramCheckCreateTrainJob failed:(%v)", err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1166,7 +1037,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName) bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist { if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err) log.Error("Get bootfile error:", err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1177,23 +1048,30 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
Cluster: models.OpenICluster, Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainTwo}) AiCenterCode: models.AICenterOfCloudBrainTwo})
if err != nil || spec == nil { if err != nil || spec == nil {
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("Resource specification not available", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("Resource specification not available", tplModelArtsTrainJobNew, &form)
return return
} }
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice*form.WorkServerNumber) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplModelArtsTrainJobNew, &form)
return
}

//Determine whether the task name of the task in the project is duplicated //Determine whether the task name of the task in the project is duplicated
tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeTrain), displayJobName) tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeTrain), displayJobName)
if err == nil { if err == nil {
if len(tasks) != 0 { if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"]) log.Error("the job name did already exist", ctx.Data["MsgID"])
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("the job name did already exist", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("the job name did already exist", tplModelArtsTrainJobNew, &form)
return return
} }
} else { } else {
if !models.IsErrJobNotExist(err) { if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"]) log.Error("system error, %v", err, ctx.Data["MsgID"])
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("system error", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("system error", tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1210,7 +1088,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)


if err := downloadCode(repo, codeLocalPath, branchName); err != nil { if err := downloadCode(repo, codeLocalPath, branchName); err != nil {
log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1218,14 +1096,14 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
//todo: upload code (send to file_server todo this work?) //todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath + VersionOutputPath + "/"); err != nil { if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err) log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("Failed to obsMkdir_output", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("Failed to obsMkdir_output", tplModelArtsTrainJobNew, &form)
return return
} }


if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil { if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err) log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("Failed to obsMkdir_log", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("Failed to obsMkdir_log", tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1234,7 +1112,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
// if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil { // if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1246,7 +1124,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
err := json.Unmarshal([]byte(params), &parameters) err := json.Unmarshal([]byte(params), &parameters)
if err != nil { if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err) log.Error("Failed to Unmarshal params: %s (%v)", params, err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("运行参数错误", tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("运行参数错误", tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1272,7 +1150,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid) datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil { if err != nil {
log.Error("Failed to getDatasUrlListByUUIDS: %v", err) log.Error("Failed to getDatasUrlListByUUIDS: %v", err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("Failed to getDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("Failed to getDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1280,7 +1158,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
jsondatas, err := json.Marshal(datasUrlList) jsondatas, err := json.Marshal(datasUrlList)
if err != nil { if err != nil {
log.Error("Failed to Marshal: %v", err) log.Error("Failed to Marshal: %v", err)
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr("json error:"+err.Error(), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr("json error:"+err.Error(), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1290,6 +1168,13 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
Value: string(jsondatas), Value: string(jsondatas),
}) })
} }
if form.ModelName != "" { //使用预训练模型训练
ckptUrl := "/" + form.PreTrainModelUrl + form.CkptName
param = append(param, models.Parameter{
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})
}


//save param config //save param config
// if isSaveParam == "on" { // if isSaveParam == "on" {
@@ -1358,6 +1243,15 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
DatasetName: datasetNames, DatasetName: datasetNames,
Spec: spec, Spec: spec,
} }
if form.ModelName != "" { //使用预训练模型训练
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
req.ModelVersion = form.ModelVersion
req.PreTrainModelUrl = form.PreTrainModelUrl

}

userCommand, userImageUrl := getUserCommand(engineID, req) userCommand, userImageUrl := getUserCommand(engineID, req)
req.UserCommand = userCommand req.UserCommand = userCommand
req.UserImageUrl = userImageUrl req.UserImageUrl = userImageUrl
@@ -1372,7 +1266,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
err = modelarts.GenerateTrainJob(ctx, req) err = modelarts.GenerateTrainJob(ctx, req)
if err != nil { if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error()) log.Error("GenerateTrainJob failed:%v", err.Error())
trainJobErrorNewDataPrepare(ctx, form)
trainJobNewDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobNew, &form) ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobNew, &form)
return return
} }
@@ -1457,7 +1351,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ


errStr := checkMultiNode(ctx.User.ID, form.WorkServerNumber) errStr := checkMultiNode(ctx.User.ID, form.WorkServerNumber)
if errStr != "" { if errStr != "" {
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1465,13 +1359,13 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID) count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID)
if err != nil { if err != nil {
log.Error("GetCloudbrainTrainJobCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetCloudbrainTrainJobCountByUserID failed:%v", err, ctx.Data["MsgID"])
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("system error", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("system error", tplModelArtsTrainJobVersionNew, &form)
return return
} else { } else {
if count >= 1 { if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1506,16 +1400,26 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
EngineName := form.EngineName EngineName := form.EngineName
isLatestVersion := modelarts.IsLatestVersion isLatestVersion := modelarts.IsLatestVersion


lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplModelArtsTrainJobVersionNew, &form)
return
}
defer lock.UnLock()

canNewJob, _ := canUserCreateTrainJobVersion(ctx, latestTask.UserID) canNewJob, _ := canUserCreateTrainJobVersion(ctx, latestTask.UserID)
if !canNewJob { if !canNewJob {
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("user cann't new trainjob", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("user cann't new trainjob", tplModelArtsTrainJobVersionNew, &form)
return return
} }


if err := paramCheckCreateTrainJob(form); err != nil { if err := paramCheckCreateTrainJob(form); err != nil {
log.Error("paramCheckCreateTrainJob failed:(%v)", err) log.Error("paramCheckCreateTrainJob failed:(%v)", err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1523,7 +1427,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName) bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist { if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err) log.Error("Get bootfile error:", err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1534,10 +1438,16 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
Cluster: models.OpenICluster, Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainTwo}) AiCenterCode: models.AICenterOfCloudBrainTwo})
if err != nil || spec == nil { if err != nil || spec == nil {
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("Resource specification not available", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("Resource specification not available", tplModelArtsTrainJobVersionNew, &form)
return return
} }
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplModelArtsTrainJobVersionNew, &form)
return
}


//todo: del the codeLocalPath //todo: del the codeLocalPath
_, err = ioutil.ReadDir(codeLocalPath) _, err = ioutil.ReadDir(codeLocalPath)
@@ -1549,7 +1459,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
commitID, _ := gitRepo.GetBranchCommitID(branchName) commitID, _ := gitRepo.GetBranchCommitID(branchName)
if err := downloadCode(repo, codeLocalPath, branchName); err != nil { if err := downloadCode(repo, codeLocalPath, branchName); err != nil {
log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1557,14 +1467,14 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
//todo: upload code (send to file_server todo this work?) //todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath + VersionOutputPath + "/"); err != nil { if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err) log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("Failed to obsMkdir_output", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("Failed to obsMkdir_output", tplModelArtsTrainJobVersionNew, &form)
return return
} }


if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil { if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err) log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("Failed to obsMkdir_log", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("Failed to obsMkdir_log", tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1574,7 +1484,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
// if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { // if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil { if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1588,7 +1498,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
err := json.Unmarshal([]byte(params), &parameters) err := json.Unmarshal([]byte(params), &parameters)
if err != nil { if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err) log.Error("Failed to Unmarshal params: %s (%v)", params, err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("运行参数错误", tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("运行参数错误", tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1614,7 +1524,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid) datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil { if err != nil {
log.Error("Failed to getDatasUrlListByUUIDS: %v", err) log.Error("Failed to getDatasUrlListByUUIDS: %v", err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("Failed to getDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("Failed to getDatasUrlListByUUIDS:"+err.Error(), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1622,7 +1532,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
jsondatas, err := json.Marshal(datasUrlList) jsondatas, err := json.Marshal(datasUrlList)
if err != nil { if err != nil {
log.Error("Failed to Marshal: %v", err) log.Error("Failed to Marshal: %v", err)
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr("json error:"+err.Error(), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr("json error:"+err.Error(), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -1633,46 +1543,13 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
}) })
} }


// //save param config
// if isSaveParam == "on" {
// saveparams := append(param, models.Parameter{
// Label: modelarts.TrainUrl,
// Value: outputObsPath,
// }, models.Parameter{
// Label: modelarts.DataUrl,
// Value: dataPath,
// })
// if form.ParameterTemplateName == "" {
// log.Error("ParameterTemplateName is empty")
// versionErrorDataPrepare(ctx, form)
// ctx.RenderWithErr("保存作业参数时,作业参数名称不能为空", tplModelArtsTrainJobVersionNew, &form)
// return
// }

// _, err := modelarts.CreateTrainJobConfig(models.CreateConfigParams{
// ConfigName: form.ParameterTemplateName,
// Description: form.PrameterDescription,
// DataUrl: dataPath,
// AppUrl: codeObsPath,
// BootFileUrl: codeObsPath + bootFile,
// TrainUrl: outputObsPath,
// Flavor: models.Flavor{
// Code: flavorCode,
// },
// WorkServerNum: workServerNumber,
// EngineID: int64(engineID),
// LogUrl: logObsPath,
// PoolID: poolID,
// Parameter: saveparams,
// })

// if err != nil {
// log.Error("Failed to CreateTrainJobConfig: %v", err)
// versionErrorDataPrepare(ctx, form)
// ctx.RenderWithErr("保存作业参数失败:"+err.Error(), tplModelArtsTrainJobVersionNew, &form)
// return
// }
// }
if form.ModelName != "" { //使用预训练模型训练
ckptUrl := "/" + form.PreTrainModelUrl + form.CkptName
param = append(param, models.Parameter{
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})
}


task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, PreVersionName) task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, PreVersionName)
if err != nil { if err != nil {
@@ -1707,6 +1584,15 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
DatasetName: datasetNames, DatasetName: datasetNames,
Spec: spec, Spec: spec,
} }

if form.ModelName != "" { //使用预训练模型训练
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
req.ModelVersion = form.ModelVersion
req.PreTrainModelUrl = form.PreTrainModelUrl

}
userCommand, userImageUrl := getUserCommand(engineID, req) userCommand, userImageUrl := getUserCommand(engineID, req)
req.UserCommand = userCommand req.UserCommand = userCommand
req.UserImageUrl = userImageUrl req.UserImageUrl = userImageUrl
@@ -1714,7 +1600,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
err = modelarts.GenerateTrainJobVersion(ctx, req, jobID) err = modelarts.GenerateTrainJobVersion(ctx, req, jobID)
if err != nil { if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error()) log.Error("GenerateTrainJob failed:%v", err.Error())
versionErrorDataPrepare(ctx, form)
trainJobNewVersionDataPrepare(ctx)
ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobVersionNew, &form) ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobVersionNew, &form)
return return
} }
@@ -2003,7 +1889,6 @@ func TrainJobStop(ctx *context.Context) {
ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobIndex, nil) ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobIndex, nil)
return return
} }

ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType) ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType)
} }


@@ -2112,6 +1997,16 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
return return
} }


lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeInference), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
inferenceJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tplModelArtsInferenceJobNew, &form)
return
}
defer lock.UnLock()

count, err := models.GetCloudbrainInferenceJobCountByUserID(ctx.User.ID) count, err := models.GetCloudbrainInferenceJobCountByUserID(ctx.User.ID)
if err != nil { if err != nil {
log.Error("GetCloudbrainInferenceJobCountByUserID failed:%v", err, ctx.Data["MsgID"]) log.Error("GetCloudbrainInferenceJobCountByUserID failed:%v", err, ctx.Data["MsgID"])
@@ -2170,6 +2065,13 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
ctx.RenderWithErr("Resource specification not available", tplModelArtsInferenceJobNew, &form) ctx.RenderWithErr("Resource specification not available", tplModelArtsInferenceJobNew, &form)
return return
} }
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
inferenceJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tplModelArtsInferenceJobNew, &form)
return
}

//todo: del the codeLocalPath //todo: del the codeLocalPath
_, err = ioutil.ReadDir(codeLocalPath) _, err = ioutil.ReadDir(codeLocalPath)
if err == nil { if err == nil {
@@ -2419,6 +2321,7 @@ func InferenceJobIndex(ctx *context.Context) {
RepoID: repoId, RepoID: repoId,
Type: Type, Type: Type,
New: MODEL_LATEST, New: MODEL_LATEST,
Status: 0,
}) })
ctx.Data["MODEL_COUNT"] = model_count ctx.Data["MODEL_COUNT"] = model_count


@@ -2499,6 +2402,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error {
RepoID: repoId, RepoID: repoId,
Type: Type, Type: Type,
New: MODEL_LATEST, New: MODEL_LATEST,
Status: 0,
}) })
ctx.Data["MODEL_COUNT"] = model_count ctx.Data["MODEL_COUNT"] = model_count
ctx.Data["datasetType"] = models.TypeCloudBrainTwo ctx.Data["datasetType"] = models.TypeCloudBrainTwo


+ 24
- 0
routers/reward/point/account.go View File

@@ -0,0 +1,24 @@
package point

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/reward/point/account"
"net/http"
)

func SearchPointAccount(ctx *context.Context) {
q := ctx.Query("q")
page := ctx.QueryInt("page")
resopnse, err := account.SearchPointAccount(models.SearchPointAccountOpts{ListOptions: models.ListOptions{Page: page, PageSize: 20}, Keyword: q})
if err != nil {
log.Error("SearchPointAccount error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

ctx.JSON(http.StatusOK, response.SuccessWithData(resopnse))
return
}

+ 45
- 0
routers/reward/point/limit.go View File

@@ -0,0 +1,45 @@
package point

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/reward/limiter"
"net/http"
)

func GetSingleDailyPointLimitConfig(ctx *context.Context) {
r, err := limiter.GetSingleDailyPointLimitConfig()
if err != nil {
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
resultMap := make(map[string]interface{}, 0)
if r == nil {
resultMap["LimitNum"] = ""
} else {
resultMap["LimitNum"] = r.LimitNum
}
ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
}

func SetSingleDailyPointLimitConfig(ctx *context.Context, config models.LimitConfigVO) {
err := limiter.SetSingleDailyPointLimitConfig(config.LimitNum, ctx.User)
if err != nil {
log.Error("Set single daily point limit config error. %v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
ctx.JSON(http.StatusOK, response.Success())
}

func DeletePointLimitConfig(ctx *context.Context) {
id := ctx.QueryInt64("id")
err := limiter.DeleteLimitConfig(id, ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
ctx.JSON(http.StatusOK, response.Success())
}

+ 170
- 0
routers/reward/point/point.go View File

@@ -0,0 +1,170 @@
package point

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/reward"
"code.gitea.io/gitea/services/reward/point/account"
"code.gitea.io/gitea/services/task"
"errors"
"net/http"
)

const tplPoint base.TplName = "reward/point"
const tplPointRule base.TplName = "reward/point/rule"

type AccountResponse struct {
Balance int64
TotalEarned int64
TotalConsumed int64
}

func GetPointAccount(ctx *context.Context) {
userId := ctx.User.ID
a, err := account.GetAccount(userId)
if err != nil {
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
res := &AccountResponse{
Balance: a.Balance,
TotalEarned: a.TotalEarned,
TotalConsumed: a.TotalConsumed,
}
ctx.JSON(http.StatusOK, response.SuccessWithData(res))
}

func GetPointRecordList(ctx *context.Context) {
operateType := ctx.Query("Operate")
page := ctx.QueryInt("Page")
var orderBy models.RewardOperateOrderBy
switch ctx.Query("sort") {
default:
orderBy = models.RewardOrderByIDDesc
}
t := models.GetRewardOperateTypeInstance(operateType)
if t == "" {
ctx.JSON(http.StatusOK, response.ServerError("param error"))
return
}

r, err := reward.GetRewardRecordList(&models.RewardRecordListOpts{
ListOptions: models.ListOptions{PageSize: 10, Page: page},
UserId: ctx.User.ID,
OperateType: t,
RewardType: models.RewardTypePoint,
OrderBy: orderBy,
IsAdmin: false,
UserName: ctx.User.Name,
})
if err != nil {
log.Error("GetPointRecordList error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

ctx.JSON(http.StatusOK, response.SuccessWithData(r))
return
}

func OperatePointAccountBalance(ctx *context.Context, req models.AdminRewardOperateReq) {
req.RewardType = models.RewardTypePoint
if req.OperateType.Name() == "" || req.Remark == "" {
ctx.JSON(http.StatusOK, "param error")
return
}
err := reward.AdminBalanceOperate(req, ctx.User)
if err != nil {
log.Error("OperatePointAccountBalance error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
ctx.JSON(http.StatusOK, response.Success())
}

func GetPointPage(ctx *context.Context) {
ctx.HTML(200, tplPoint)
}

func GetRulePage(ctx *context.Context) {
ctx.HTML(200, tplPointRule)
}

func GetRuleConfig(ctx *context.Context) {
r, err := task.GetPointRule()
if err != nil {
log.Error("GetRuleConfig error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}

func GetAdminRewardList(ctx *context.Context) {
opts, err := buildAdminRewardRecordListOpts(ctx)
if err != nil {
log.Error("buildAdminRewardRecordListOpts error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

username := ctx.Query("userName")
if username != "" {
user, err := models.GetUserByName(username)
if err != nil {
log.Error("GetUserByName error.%v", err)
if models.IsErrUserNotExist(err) {
ctx.JSON(http.StatusOK, response.ServerError("user not exist"))
} else {
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
}
return
}
opts.UserId = user.ID
opts.UserName = user.Name
}

r, err := reward.GetRewardRecordList(opts)
if err != nil {
log.Error("GetRewardRecordList error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}

ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}

func buildAdminRewardRecordListOpts(ctx *context.Context) (*models.RewardRecordListOpts, error) {
operateType := ctx.Query("operate")
sourceType := ctx.Query("source")
taskType := ctx.Query("action")
serialNo := ctx.Query("serialNo")
status := ctx.Query("status")

page := ctx.QueryInt("page")
var orderBy models.RewardOperateOrderBy
switch ctx.Query("sort") {
default:
orderBy = models.RewardOrderByIDDesc
}
t := models.GetRewardOperateTypeInstance(operateType)
if t == "" {
return nil, errors.New("param error")
}
opts := &models.RewardRecordListOpts{
ListOptions: models.ListOptions{PageSize: 10, Page: page},
OperateType: t,
RewardType: models.RewardTypePoint,
OrderBy: orderBy,
SourceType: sourceType,
TaskType: taskType,
SerialNo: serialNo,
IsAdmin: true,
Status: status,
}
return opts, nil
}

+ 41
- 10
routers/routes/routes.go View File

@@ -6,6 +6,9 @@ package routes


import ( import (
"bytes" "bytes"
"code.gitea.io/gitea/routers/reward/point"
"code.gitea.io/gitea/routers/task"
"code.gitea.io/gitea/services/reward"
"encoding/gob" "encoding/gob"
"net/http" "net/http"
"path" "path"
@@ -328,6 +331,8 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/", routers.Home) m.Get("/", routers.Home)
m.Get("/dashboard", routers.Dashboard) m.Get("/dashboard", routers.Dashboard)
go routers.SocketManager.Run() go routers.SocketManager.Run()
go task.RunTask()
go reward.AcceptStatusChangeAction()
m.Get("/action/notification", routers.ActionNotification) m.Get("/action/notification", routers.ActionNotification)
m.Get("/recommend/home", routers.RecommendHomeInfo) m.Get("/recommend/home", routers.RecommendHomeInfo)
m.Get("/dashboard/invitation", routers.GetMapInfo) m.Get("/dashboard/invitation", routers.GetMapInfo)
@@ -643,6 +648,20 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/operation", func() { m.Group("/operation", func() {
m.Get("/config/recommend_org", operation.Organizations) m.Get("/config/recommend_org", operation.Organizations)
m.Post("/config/recommend_org", bindIgnErr(operation.OrgInfos{}), operation.UpdateRecommendOrganizations) m.Post("/config/recommend_org", bindIgnErr(operation.OrgInfos{}), operation.UpdateRecommendOrganizations)

m.Group("/reward/point", func() {
m.Combo("/limiter/single-daily").Get(point.GetSingleDailyPointLimitConfig).Post(bindIgnErr(models.LimitConfigVO{}), point.SetSingleDailyPointLimitConfig)
m.Post("/limiter/delete", point.DeletePointLimitConfig)
m.Get("/account/search", point.SearchPointAccount)
m.Post("/account/operate", binding.Bind(models.AdminRewardOperateReq{}), point.OperatePointAccountBalance)
m.Get("/list", point.GetAdminRewardList)
})

m.Group("/task/config", func() {
m.Get("/list", task.GetTaskConfigList)
m.Post("/add/batch", bindIgnErr(models.BatchLimitConfigVO{}), task.BatchAddTaskConfig)
m.Post("/^:action(new|edit|del)$", bindIgnErr(models.TaskConfigWithLimit{}), task.OperateTaskConfig)
})
}, operationReq) }, operationReq)
// ***** END: Operation ***** // ***** END: Operation *****


@@ -1116,7 +1135,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels) m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels)
m.Get("/download_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadModel) m.Get("/download_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadModel)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.CloudBrainNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.CloudBrainNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate)


m.Group("/benchmark", func() { m.Group("/benchmark", func() {
@@ -1127,7 +1146,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.BenchmarkDel) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.BenchmarkDel)
m.Get("/rate", reqRepoCloudBrainReader, repo.GetRate) m.Get("/rate", reqRepoCloudBrainReader, repo.GetRate)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.CloudBrainBenchmarkNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.CloudBrainBenchmarkNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainBenchmarkCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainBenchmarkCreate)
m.Get("/get_child_types", repo.GetChildTypes) m.Get("/get_child_types", repo.GetChildTypes)
}) })
@@ -1140,8 +1159,10 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/download_model", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainDownloadModel) m.Get("/download_model", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainDownloadModel)
//m.Get("/get_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo.GetLogFromModelDir) //m.Get("/get_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo.GetLogFromModelDir)
//m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion) //m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.CloudBrainTrainJobVersionNew)
m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainTrainJobVersionCreate)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.CloudBrainTrainJobNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.CloudBrainTrainJobNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate)
}) })
m.Group("/inference-job", func() { m.Group("/inference-job", func() {
@@ -1151,7 +1172,7 @@ func RegisterRoutes(m *macaron.Macaron) {


m.Get("/downloadall", repo.DownloadInferenceResultFile) m.Get("/downloadall", repo.DownloadInferenceResultFile)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.InferenceCloudBrainJobNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.InferenceCloudBrainJobNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainInferencForm{}), repo.CloudBrainInferenceJobCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainInferencForm{}), repo.CloudBrainInferenceJobCreate)
}) })
}, context.RepoRef()) }, context.RepoRef())
@@ -1162,13 +1183,15 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusStopJob) m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusStopJob)
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GrampusTrainJobDel) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GrampusTrainJobDel)
m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload) m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.GrampusTrainJobVersionNew)
m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateGrampusTrainJobForm{}), repo.GrampusTrainJobVersionCreate)
}) })
m.Group("/gpu", func() { m.Group("/gpu", func() {
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.GrampusTrainJobGPUNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.GrampusTrainJobGPUNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateGrampusTrainJobForm{}), repo.GrampusTrainJobGpuCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateGrampusTrainJobForm{}), repo.GrampusTrainJobGpuCreate)
}) })
m.Group("/npu", func() { m.Group("/npu", func() {
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.GrampusTrainJobNPUNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.GrampusTrainJobNPUNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateGrampusTrainJobForm{}), repo.GrampusTrainJobNpuCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateGrampusTrainJobForm{}), repo.GrampusTrainJobNpuCreate)
}) })
}) })
@@ -1225,7 +1248,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.NotebookStop) m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.NotebookStop)
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.NotebookDel) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.NotebookDel)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.NotebookNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.NotebookNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsNotebookForm{}), repo.Notebook2Create) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsNotebookForm{}), repo.Notebook2Create)
}) })


@@ -1237,10 +1260,10 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.TrainJobDel) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.TrainJobDel)
m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload) m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload)
m.Get("/download_log_file", cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobDownloadLogFile) m.Get("/download_log_file", cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobDownloadLogFile)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobNewVersion)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, context.PointAccount(), repo.TrainJobNewVersion)
m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion) m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.TrainJobNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.TrainJobNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreate)


m.Get("/para-config-list", reqRepoCloudBrainReader, repo.TrainJobGetConfigList) m.Get("/para-config-list", reqRepoCloudBrainReader, repo.TrainJobGetConfigList)
@@ -1253,7 +1276,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/result_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ResultDownload) m.Get("/result_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ResultDownload)
m.Get("/downloadall", repo.DownloadMultiResultFile) m.Get("/downloadall", repo.DownloadMultiResultFile)
}) })
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.InferenceJobNew)
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.InferenceJobNew)
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsInferenceJobForm{}), repo.InferenceJobCreate) m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsInferenceJobForm{}), repo.InferenceJobCreate)
}) })
}, context.RepoRef()) }, context.RepoRef())
@@ -1413,6 +1436,14 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/purge", user.NotificationPurgePost) m.Post("/purge", user.NotificationPurgePost)
}, reqSignIn) }, reqSignIn)


m.Group("/reward/point", func() {
m.Get("", point.GetPointPage)
m.Get("/rule", point.GetRulePage)
m.Get("/rule/config", point.GetRuleConfig)
m.Get("/account", point.GetPointAccount)
m.Get("/record/list", point.GetPointRecordList)
}, reqSignIn)

if setting.API.EnableSwagger { if setting.API.EnableSwagger {
m.Get("/swagger.v1.json", templates.JSONRenderer(), routers.SwaggerV1Json) m.Get("/swagger.v1.json", templates.JSONRenderer(), routers.SwaggerV1Json)
} }


+ 68
- 0
routers/task/config.go View File

@@ -0,0 +1,68 @@
package task

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/task"
"errors"
"net/http"
)

func GetTaskConfigList(ctx *context.Context) {
page := ctx.QueryInt("Page")
status := ctx.QueryInt("Status")
action := ctx.Query("Action")
r, err := task.GetTaskConfigWithLimitList(models.GetTaskConfigOpts{
ListOptions: models.ListOptions{PageSize: 20, Page: page},
Status: status,
TaskType: action,
})
if err != nil {
log.Error("GetTaskConfigList error.%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}

func OperateTaskConfig(ctx *context.Context, config models.TaskConfigWithLimit) {
action := ctx.Params(":action")

var err error
switch action {
case "edit":
err = task.EditTaskConfig(config, ctx.User)
case "new":
err = task.AddTaskConfig(config, ctx.User)
case "del":
err = task.DelTaskConfig(config.ID, ctx.User)
default:
err = errors.New("action type error")
}

if err != nil {
log.Error("OperateTaskConfig error ,%v", err)
ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
return
}
ctx.JSON(http.StatusOK, response.Success())
}
func BatchAddTaskConfig(ctx *context.Context, list models.BatchLimitConfigVO) {
successCount := 0
failCount := 0
for _, config := range list.ConfigList {
err := task.AddTaskConfig(config, ctx.User)
if err != nil {
failCount++
} else {
successCount++
}
}
r := make(map[string]int, 2)
r["successCount"] = successCount
r["failCount"] = failCount
log.Debug("BatchAddTaskConfig success.result=%v", r)
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}

+ 15
- 0
routers/task/task.go View File

@@ -0,0 +1,15 @@
package task

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/services/task"
)

func RunTask() {
for {
select {
case action := <-models.ActionChan4Task:
task.Accomplish(action)
}
}
}

+ 2
- 0
routers/user/setting/profile.go View File

@@ -6,6 +6,7 @@
package setting package setting


import ( import (
"code.gitea.io/gitea/modules/notification"
"errors" "errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
@@ -179,6 +180,7 @@ func AvatarPost(ctx *context.Context, form auth.AvatarForm) {
if err := UpdateAvatarSetting(ctx, form, ctx.User); err != nil { if err := UpdateAvatarSetting(ctx, form, ctx.User); err != nil {
ctx.Flash.Error(err.Error()) ctx.Flash.Error(err.Error())
} else { } else {
notification.NotifyChangeUserAvatar(ctx.User, form)
ctx.Flash.Success(ctx.Tr("settings.update_avatar_success")) ctx.Flash.Success(ctx.Tr("settings.update_avatar_success"))
} }




+ 1
- 1
services/phone/phone.go View File

@@ -46,7 +46,7 @@ func SendVerifyCode(conn redis.Conn, phoneNumber string) error {
if err != nil { if err != nil {
return err return err
} }
err = redis_client.Expire(conn, timesKey, getRemainSecondOfDay(time.Now()))
err = redis_client.EXPIRE(conn, timesKey, getRemainSecondOfDay(time.Now()))
if err != nil { if err != nil {
return err return err
} }


+ 50
- 0
services/reward/admin_operate.go View File

@@ -0,0 +1,50 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
)

func AdminBalanceOperate(req models.AdminRewardOperateReq, doer *models.User) error {
logId := util.UUID()
_, err := models.InsertRewardAdminLog(&models.RewardAdminLog{
LogId: logId,
Amount: req.Amount,
RewardType: req.RewardType.Name(),
TargetUserId: req.TargetUserId,
CreatorId: doer.ID,
CreatorName: doer.Name,
Remark: req.Remark,
Status: models.RewardAdminLogProcessing,
})
if err != nil {
log.Error("AdminBalanceOperate InsertRewardAdminLog error.%v", err)
return err
}

//reward
err = Operate(&models.RewardOperateContext{
SourceType: models.SourceTypeAdminOperate,
SourceId: logId,
Title: "管理员操作",
Reward: models.Reward{
Amount: req.Amount,
Type: req.RewardType,
},
TargetUserId: req.TargetUserId,
RequestId: logId,
OperateType: req.OperateType,
Remark: req.Remark,
RejectPolicy: models.JustReject,
PermittedNegative: true,
})

if err != nil {
log.Error("AdminBalanceOperate operate error.%v", err)
models.UpdateRewardAdminLogStatus(logId, models.RewardAdminLogProcessing, models.RewardAdminLogFailed)
return err
}
models.UpdateRewardAdminLogStatus(logId, models.RewardAdminLogProcessing, models.RewardAdminLogSuccess)
return nil
}

+ 145
- 0
services/reward/cloudbrain_deduct.go View File

@@ -0,0 +1,145 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"fmt"
"time"
)

var (
ResourceSpecs *models.ResourceSpecs
TrainResourceSpecs *models.ResourceSpecs
)

const RUN_CLOUDBRAIN_TASK_TITTLE = "运行云脑任务"

func AcceptStatusChangeAction() {
for {
select {
case task := <-models.StatusChangeChan:
DeductPoint4Cloudbrain(*task, time.Now())
}
}
}

func StartAndGetCloudBrainPointDeductTask(task models.Cloudbrain) (*models.RewardPeriodicTask, error) {
sourceId := getCloudBrainPointTaskSourceId(task)
r, err := GetPeriodicTask(models.SourceTypeRunCloudbrainTask, sourceId, sourceId, models.OperateTypeDecrease)
if err != nil {
return nil, err
}

if r != nil {
log.Debug("PeriodicTask is already exist.cloudbrain.ID = %d", task.ID)
return r, nil
}

if !setting.CloudBrainPaySwitch {
log.Debug("CloudBrainPaySwitch is off")
return nil, nil
}

unitPrice, err := models.GetCloudbrainTaskUnitPrice(task)
if err != nil {
return nil, err
}
if unitPrice == 0 {
log.Debug("Finish startAndGetCloudBrainPointDeductTask, UnitPrice = 0 task.ID=%d", task.ID)
return nil, nil
}

return StartPeriodicTask(&models.StartPeriodicTaskOpts{
SourceType: models.SourceTypeRunCloudbrainTask,
SourceId: getCloudBrainPointTaskSourceId(task),
TargetUserId: task.UserID,
RequestId: getCloudBrainPointTaskSourceId(task),
OperateType: models.OperateTypeDecrease,
Delay: setting.CloudBrainPayDelay,
Interval: setting.CloudBrainPayInterval,
UnitAmount: unitPrice,
RewardType: models.RewardTypePoint,
StartTime: time.Unix(int64(task.StartTime), 0),
Title: RUN_CLOUDBRAIN_TASK_TITTLE,
})
}

func StopCloudBrainPointDeductTask(task models.Cloudbrain) {
StopPeriodicTask(models.SourceTypeRunCloudbrainTask, getCloudBrainPointTaskSourceId(task), models.OperateTypeDecrease)
}

func getCloudBrainPointTaskSourceId(task models.Cloudbrain) string {
return fmt.Sprint(task.ID)
}

var firstTimeFlag = true

func StartCloudbrainPointDeductTask() {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
log.Debug("try to run CloudbrainPointDeductTask")
end := time.Now()
start := end.Add(-1 * setting.DeductTaskRange)
if firstTimeFlag {
//When it is executed for the first time, it needs to process the tasks of the last 3 hours.
//This is done to prevent the application from hanging for a long time
start = end.Add(-1 * setting.DeductTaskRangeForFirst)
firstTimeFlag = false
}
taskList, err := models.GetStartedCloudbrainTaskByUpdatedUnix(start, end)
if err != nil {
log.Error("GetStartedCloudbrainTaskByUpdatedUnix error. %v", err)
return
}
if taskList == nil || len(taskList) == 0 {
log.Debug("No cloudbrain task need handled")
return
}
for _, t := range taskList {
DeductPoint4Cloudbrain(t, end)
}
log.Debug("CloudbrainPointDeductTask completed")
}

func DeductPoint4Cloudbrain(t models.Cloudbrain, now time.Time) error {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
log.Debug("start to deduct point for cloudbrain[%d]", t.ID)
if t.StartTime == 0 {
log.Debug("cloudbrain[%d] task not start", t.ID)
return nil
}

task, err := StartAndGetCloudBrainPointDeductTask(t)
if err != nil {
log.Error("run cloudbrain point deduct task error,err=%v", err)
return err
}
if task == nil {
log.Debug("cloudbrain[%d] deduct task is nil")
return nil
}
if task.Status == models.PeriodicTaskStatusFinished {
log.Info("Periodic task is finished")
return nil
}

if t.EndTime > 0 {
endTime := time.Unix(int64(t.EndTime), 0)
RunRewardTask(*task, endTime)
models.StopPeriodicTask(task.ID, task.OperateSerialNo, endTime)
} else {
RunRewardTask(*task, now)
}
log.Debug("finished deduct point for cloudbrain[%d]", t.ID)
return nil
}

+ 100
- 0
services/reward/limiter/config.go View File

@@ -0,0 +1,100 @@
package limiter

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
)

func GetSingleDailyPointLimitConfig() (*models.LimitConfigVO, error) {
r, err := GetLimitConfigList(models.LimitConfigQueryOpts{
RefreshRate: models.PeriodDaily,
Scope: models.LimitScopeSingleUser,
LimitCode: models.SourceTypeAccomplishTask.Name(),
LimitType: models.LimitTypeRewardPoint,
})
if err != nil {
return nil, err
}
if r == nil || len(r) == 0 {
return nil, nil
}
return r[0], nil
}

func SetSingleDailyPointLimitConfig(limitNum int64, doer *models.User) error {
l := &models.LimitConfigVO{
RefreshRate: models.PeriodDaily,
Scope: models.LimitScopeSingleUser.Name(),
LimitCode: models.SourceTypeAccomplishTask.Name(),
LimitType: models.LimitTypeRewardPoint.Name(),
LimitNum: limitNum,
}
return AddLimitConfig(l, doer)
}

func GetLimitConfigList(opts models.LimitConfigQueryOpts) ([]*models.LimitConfigVO, error) {
r, err := GetLimitersByLimitType(opts.LimitType)
if err != nil {
log.Error("GetLimitConfigList error when getting limiters by limit type.err=%v", err)
return nil, err
}
result := make([]*models.LimitConfigVO, 0)
for _, v := range r {
if opts.LimitCode != "" && opts.LimitCode != v.LimitCode {
continue
}
if opts.Scope != "" && opts.Scope.Name() != v.Scope {
continue
}
if opts.RefreshRate != "" && opts.RefreshRate != v.RefreshRate {
continue
}
if opts.LimitType != "" && opts.LimitType.Name() != v.LimitType {
continue
}
result = append(result, v.ToLimitConfigVO())
}
return result, nil
}
func GetLimitConfigById(id int64) (*models.LimitConfig, error) {
return models.GetLimitConfigById(id)
}

func AddLimitConfig(config *models.LimitConfigVO, doer *models.User) error {
r := &models.LimitConfig{
Title: config.Title,
RefreshRate: config.RefreshRate,
Scope: config.Scope,
LimitNum: config.LimitNum,
LimitCode: config.LimitCode,
LimitType: config.LimitType,
CreatorId: doer.ID,
CreatorName: doer.Name,
}
err := models.AddLimitConfig(r)

if err != nil {
log.Error("add limit config error,config:%v err:%v", config, err)
return err
}
redis_client.Del(redis_key.LimitConfig(config.LimitType))
return nil
}

func DeleteLimitConfig(id int64, doer *models.User) error {
config, err := GetLimitConfigById(id)
if err != nil {
log.Error("GetLimitConfigById err,e=%v", err)
return err
}
err = models.DeleteLimitConfig(*config, doer.ID, doer.Name)

if err != nil {
log.Error("add limit config error,config:%v err:%v", config, err)
return err
}
redis_client.Del(redis_key.LimitConfig(config.LimitType))
return nil
}

+ 258
- 0
services/reward/limiter/limiter.go View File

@@ -0,0 +1,258 @@
package limiter

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/services/task/period"
"encoding/json"
"errors"
"fmt"
"time"
)

type limiterRunner struct {
limiters []models.LimitConfig
index int
userId int64
amount int64
limitCode string
limitType models.LimitType
rejectPolicy models.LimiterRejectPolicy
resultMap map[int]limitResult
minRealAmount int64
}

type limitResult struct {
isLoss bool
planAmount int64
realAmount int64
}

func newLimitResult(isLoss bool, planAmount int64, realAmount int64) limitResult {
return limitResult{
isLoss: isLoss,
planAmount: planAmount,
realAmount: realAmount,
}
}

func newLimiterRunner(limitCode string, limitType models.LimitType, userId, amount int64, policy models.LimiterRejectPolicy) *limiterRunner {
return &limiterRunner{
userId: userId,
amount: amount,
limitCode: limitCode,
limitType: limitType,
index: 0,
rejectPolicy: policy,
resultMap: make(map[int]limitResult, 0),
}
}

//Run run all limiters
//return real used amount(when choose the FillUp reject policy, amount may only be partially used)
func (l *limiterRunner) Run() error {
if err := l.LoadLimiters(); err != nil {
return err
}
l.minRealAmount = l.amount
for l.index < len(l.limiters) {
err := l.limit(l.limiters[l.index])
if err != nil {
log.Info("limiter check failed,%v", err)
l.Rollback()
return err
}
result := l.resultMap[l.index]
if result.isLoss {
//find the minimum real amount
if l.minRealAmount > result.realAmount {
l.minRealAmount = result.realAmount
}
}
l.index += 1
}

//post process
l.PostProcess()
return nil
}

//Rollback rollback the usedNum from limiters[0] to limiters[index]
func (l *limiterRunner) Rollback() error {
for i := l.index - 1; i >= 0; i-- {
l.rollback(l.limiters[i], l.resultMap[i])
}
return nil
}

func (l *limiterRunner) rollback(r models.LimitConfig, result limitResult) error {
p, err := period.GetPeriod(r.RefreshRate)
if err != nil {
return err
}
redisKey := redis_key.LimitCount(l.userId, r.LimitCode, r.LimitType, r.Scope, p)
redis_client.IncrBy(redisKey, -1*result.realAmount)
return nil
}

//PostProcess process loss,if realAmount < planAmount
func (l *limiterRunner) PostProcess() error {
for i := l.index - 1; i >= 0; i-- {
l.postProcess(l.limiters[i], l.resultMap[i])
}
return nil
}

func (l *limiterRunner) postProcess(r models.LimitConfig, result limitResult) error {
if result.realAmount == l.minRealAmount {
return nil
}
p, err := period.GetPeriod(r.RefreshRate)
if err != nil {
return err
}
diff := result.realAmount - l.minRealAmount
redisKey := redis_key.LimitCount(l.userId, r.LimitCode, r.LimitType, r.Scope, p)
redis_client.IncrBy(redisKey, -1*diff)
return nil
}

func (l *limiterRunner) limit(r models.LimitConfig) error {
p, err := period.GetPeriod(r.RefreshRate)
if err != nil {
return err
}
redisKey := redis_key.LimitCount(l.userId, r.LimitCode, r.LimitType, r.Scope, p)
usedNum, err := redis_client.IncrBy(redisKey, l.amount)
if err != nil {
return err
}
//if usedNum equals amount,it is the first operation in period or redis cache deleted
//count in database to distinguish the two cases
if usedNum == l.amount {
n, err := l.countInPeriod(r, p)
if err != nil {
return err
}
if n > 0 {
//means redis cache deleted,incr the cache with real value
usedNum, err = redis_client.IncrBy(redisKey, n)
}
if p != nil {
redis_client.Expire(redisKey, p.LeftTime)
} else {
//add default expire time if no period set
redis_client.Expire(redisKey, 24*time.Hour)
}
}
if usedNum > r.LimitNum {
if usedNum-r.LimitNum >= l.amount {
redis_client.IncrBy(redisKey, -1*l.amount)
return errors.New(fmt.Sprintf("over limit,congfigId=%d", r.ID))
}
switch l.rejectPolicy {
case models.FillUp:
exceed := usedNum - r.LimitNum
realAmount := l.amount - exceed
redis_client.IncrBy(redisKey, -1*exceed)
l.resultMap[l.index] = newLimitResult(true, l.amount, realAmount)
return nil
case models.JustReject:
redis_client.IncrBy(redisKey, -1*l.amount)
return errors.New(fmt.Sprintf("over limit,congfigId=%d", r.ID))
case models.PermittedOnce:
l.resultMap[l.index] = newLimitResult(false, l.amount, l.amount)
return nil
}

}
l.resultMap[l.index] = newLimitResult(false, l.amount, l.amount)
return nil
}

func (l *limiterRunner) LoadLimiters() error {
limiters, err := GetLimiters(l.limitCode, l.limitType)
if err != nil {
return err
}
if limiters != nil {
l.limiters = limiters
}
return nil
}

func (l *limiterRunner) countInPeriod(r models.LimitConfig, p *models.PeriodResult) (int64, error) {
switch r.LimitType {
case models.LimitTypeTask.Name():
return models.CountTaskAccomplishLogInTaskPeriod(r.LimitCode, l.userId, p)
case models.LimitTypeRewardPoint.Name():
return models.SumRewardAmountInTaskPeriod(models.RewardTypePoint.Name(), r.LimitCode, l.userId, p)
default:
return 0, nil

}
}

func CheckLimit(limitCode string, limitType models.LimitType, userId, amount int64, rejectPolicy models.LimiterRejectPolicy) (int64, error) {
if rejectPolicy == "" {
rejectPolicy = models.JustReject
}
r := newLimiterRunner(limitCode, limitType, userId, amount, rejectPolicy)
err := r.Run()
if err != nil {
return 0, err
}
return r.minRealAmount, nil
}

func GetLimiters(limitCode string, limitType models.LimitType) ([]models.LimitConfig, error) {
limiters, err := GetLimitersByLimitType(limitType)
if err != nil {
return nil, err
}
result := make([]models.LimitConfig, 0)
for i, v := range limiters {
if v.LimitCode == "" || v.LimitCode == limitCode {
result = append(result, limiters[i])
}
}
return result, nil
}

func GetLimitersByLimitType(limitType models.LimitType) ([]models.LimitConfig, error) {
redisKey := redis_key.LimitConfig(limitType.Name())
val, _ := redis_client.Get(redisKey)
if val != "" {
if val == redis_key.EMPTY_REDIS_VAL {
return nil, nil
}
limiters := make([]models.LimitConfig, 0)
json.Unmarshal([]byte(val), &limiters)
return limiters, nil
}
limiters, err := models.GetLimitConfigByLimitType(limitType)
if err != nil {
if models.IsErrRecordNotExist(err) {
redis_client.Setex(redisKey, redis_key.EMPTY_REDIS_VAL, 5*time.Second)
return nil, nil
}
return nil, err
}
jsonStr, _ := json.Marshal(limiters)
redis_client.Setex(redisKey, string(jsonStr), 30*24*time.Hour)

return limiters, nil
}

func GetLimitersByRelatedIdWithDeleted(limitType models.LimitType) ([]models.LimitConfig, error) {
limiters, err := models.GetLimitersByRelatedIdWithDeleted(limitType)
if err != nil {
if models.IsErrRecordNotExist(err) {
return nil, nil
}
return nil, err
}
return limiters, nil
}

+ 54
- 0
services/reward/notify.go View File

@@ -0,0 +1,54 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/timeutil"
"encoding/json"
"fmt"
"time"
)

func NotifyRewardOperation(userId, amount int64, sourceType models.SourceType, rewardType models.RewardType, operateType models.RewardOperateType) {
switch sourceType {
case models.SourceTypeRunCloudbrainTask:
return
}
data := &models.UserRewardOperationRedis{
UserId: userId,
Amount: amount,
RewardType: rewardType,
OperateType: operateType,
}
b, _ := json.Marshal(data)
redis_client.ZAdd(redis_key.RewardOperateNotification(), string(b), float64(time.Now().Unix()))
}

func GetRewardOperation(since, until timeutil.TimeStamp) []models.UserRewardOperation {
list, err := redis_client.ZRangeByScore(redis_key.RewardOperateNotification(), float64(since), float64(until))
if err != nil {
log.Error("GetRewardOperation ZRangeByScore error. %v", err)
return nil
}
if len(list) == 0 {
log.Debug("GetRewardOperation list length = 0")
return nil
}
r := make([]models.UserRewardOperation, len(list))
for _, v := range list {
t := models.UserRewardOperationRedis{}
json.Unmarshal([]byte(v), &t)
r = append(r, models.UserRewardOperation{
UserId: t.UserId,
Msg: v,
})
}
redis_client.ZRemRangeByScore(redis_key.RewardOperateNotification(), float64(since), float64(until))
return r
}

func GetRewardOperateMsg(u models.UserRewardOperationRedis) string {
return u.OperateType.Show() + fmt.Sprint(u.Amount) + u.RewardType.Show()
}

+ 278
- 0
services/reward/operator.go View File

@@ -0,0 +1,278 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/services/reward/point"
"errors"
"fmt"
"time"
)

var RewardOperatorMap = map[string]RewardOperator{
fmt.Sprint(models.RewardTypePoint): new(point.PointOperator),
}

type RewardOperator interface {
IsLimited(ctx *models.RewardOperateContext) error
Operate(ctx *models.RewardOperateContext) error
}

func Operate(ctx *models.RewardOperateContext) error {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
if !checkRewardOperationParam(ctx) {
log.Error("send reward error,param incorrect")
return errors.New("param incorrect")
}
//add lock
var rewardLock = redis_lock.NewDistributeLock(redis_key.RewardOperateLock(ctx.RequestId, ctx.SourceType.Name(), ctx.OperateType.Name()))
isOk, err := rewardLock.Lock(3 * time.Second)
if err != nil {
return err
}
if !isOk {
log.Info("duplicated reward request,targetUserId=%d requestId=%s", ctx.TargetUserId, ctx.RequestId)
return nil
}
defer rewardLock.UnLock()

//is handled before?
isHandled, err := isHandled(ctx.SourceType.Name(), ctx.RequestId, ctx.OperateType.Name())
if err != nil {
log.Error("reward is handled error,%v", err)
return err
}
if isHandled {
log.Info("reward has been handled,ctx=%+v", ctx)
return nil
}

//get operator
operator := GetOperator(ctx.Reward.Type)
if operator == nil {
log.Error("operator of reward type is not exist,ctx=%v", ctx)
return errors.New("operator of reward type is not exist")
}

if ctx.OperateType == models.OperateTypeIncrease {
//is limited?
if err := operator.IsLimited(ctx); err != nil {
log.Info("operator IsLimited, err=%v", err)
return err
}
}

//new reward operate record
recordId, err := initRewardOperateRecord(ctx)
if err != nil {
log.Error("initRewardOperateRecord error,err=%v", err)
return err
}

ctx.SourceId = recordId

//operate
if err := operator.Operate(ctx); err != nil {
log.Error("operator Operate error,err=%v", err)
UpdateRewardRecordToFinalStatus(ctx.SourceType.Name(), ctx.RequestId, models.OperateStatusFailed)
return err
}

UpdateRewardRecordToFinalStatus(ctx.SourceType.Name(), ctx.RequestId, models.OperateStatusSucceeded)
NotifyRewardOperation(ctx.TargetUserId, ctx.Reward.Amount, ctx.SourceType, ctx.Reward.Type, ctx.OperateType)
return nil
}

func checkRewardOperationParam(ctx *models.RewardOperateContext) bool {
if ctx.Reward.Type == "" {
return false
}
return true
}

func GetOperator(rewardType models.RewardType) RewardOperator {
return RewardOperatorMap[rewardType.Name()]
}

func isHandled(sourceType string, requestId string, operateType string) (bool, error) {
_, err := models.GetPointOperateRecordBySourceTypeAndRequestId(sourceType, requestId, operateType)
if err != nil {
log.Error("operator isHandled error. %v", err)
if models.IsErrRecordNotExist(err) {
return false, nil
}
log.Error("GetPointOperateRecordBySourceTypeAndRequestId ZRangeByScore error. %v", err)
return false, err
}
return true, nil

}

func initRewardOperateRecord(ctx *models.RewardOperateContext) (string, error) {
sn, err := generateOperateSerialNo()
if err != nil {
log.Error("generateOperateSerialNo error. %v", err)
return "", err
}
record := &models.RewardOperateRecord{
UserId: ctx.TargetUserId,
Amount: ctx.Reward.Amount,
LossAmount: ctx.LossAmount,
RewardType: ctx.Reward.Type.Name(),
SourceType: ctx.SourceType.Name(),
SourceId: ctx.SourceId,
SourceTemplateId: ctx.SourceTemplateId,
RequestId: ctx.RequestId,
OperateType: ctx.OperateType.Name(),
Status: models.OperateStatusOperating,
Remark: ctx.Remark,
Title: ctx.Title,
SerialNo: sn,
}
_, err = models.InsertRewardOperateRecord(record)
if err != nil {
log.Error("InsertRewardOperateRecord error. %v", err)
return "", err
}
return record.SerialNo, nil
}

func createPeriodicRewardOperateRecord(ctx *models.StartPeriodicTaskOpts) (string, error) {
sn, err := generateOperateSerialNo()
if err != nil {
log.Error("createPeriodic generateOperateSerialNo error. %v", err)
return "", err
}
record := &models.RewardOperateRecord{
UserId: ctx.TargetUserId,
Amount: 0,
RewardType: ctx.RewardType.Name(),
SourceType: ctx.SourceType.Name(),
SourceId: ctx.SourceId,
RequestId: ctx.RequestId,
OperateType: ctx.OperateType.Name(),
Status: models.OperateStatusOperating,
Remark: ctx.Remark,
Title: ctx.Title,
SerialNo: sn,
}
_, err = models.InsertRewardOperateRecord(record)
if err != nil {
log.Error("createPeriodic InsertRewardOperateRecord error. %v", err)
return "", err
}
return record.SerialNo, nil
}

func UpdateRewardRecordToFinalStatus(sourceType, requestId, newStatus string) error {
_, err := models.UpdateRewardRecordToFinalStatus(sourceType, requestId, newStatus)
if err != nil {
log.Error("UpdateRewardRecord UpdateRewardRecordToFinalStatus error. %v", err)
return err
}
return nil
}

func GetPeriodicTask(sourceType models.SourceType, sourceId, requestId string, operateType models.RewardOperateType) (*models.RewardPeriodicTask, error) {
_, err := models.GetPointOperateRecordBySourceTypeAndRequestId(sourceType.Name(), requestId, operateType.Name())
if err == nil {
task, err := models.GetPeriodicTaskBySourceIdAndType(sourceType, sourceId, operateType)
if err != nil {
log.Error("GetPeriodicTaskBySourceIdAndType error,%v", err)
return nil, err
}
return task, nil
}

if err != nil && !models.IsErrRecordNotExist(err) {
log.Error("GetPointOperateRecordBySourceTypeAndRequestId error,%v", err)
return nil, err
}
return nil, nil
}

func StartPeriodicTask(opts *models.StartPeriodicTaskOpts) (*models.RewardPeriodicTask, error) {
//add lock
var rewardLock = redis_lock.NewDistributeLock(redis_key.RewardOperateLock(opts.RequestId, opts.SourceType.Name(), opts.OperateType.Name()))
isOk, err := rewardLock.Lock(3 * time.Second)
if !isOk {
log.Info("duplicated operate request,targetUserId=%d requestId=%s", opts.TargetUserId, opts.RequestId)
return nil, nil
}
defer rewardLock.UnLock()

r, err := GetPeriodicTask(opts.SourceType, opts.SourceId, opts.RequestId, opts.OperateType)
if err != nil {
return nil, err
}

if r != nil {
return r, nil
}

//new reward operate record
recordId, err := createPeriodicRewardOperateRecord(opts)
if err != nil {
log.Error("StartAndGetPeriodicTask createPeriodicRewardOperateRecord error. %v", err)
return nil, err
}

if err = NewRewardPeriodicTask(recordId, opts); err != nil {
log.Error("StartAndGetPeriodicTask NewRewardPeriodicTask error. %v", err)
UpdateRewardRecordToFinalStatus(opts.SourceType.Name(), opts.RequestId, models.OperateStatusFailed)
return nil, err
}

task, err := models.GetPeriodicTaskBySourceIdAndType(opts.SourceType, opts.SourceId, opts.OperateType)
if err != nil {
log.Error("GetPeriodicTaskBySourceIdAndType error,%v", err)
return nil, err
}
return task, nil
}

func StopPeriodicTaskAsyn(sourceType models.SourceType, sourceId string, operateType models.RewardOperateType) {
go StopPeriodicTask(sourceType, sourceId, operateType)
}

func StopPeriodicTask(sourceType models.SourceType, sourceId string, operateType models.RewardOperateType) error {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
task, err := models.GetPeriodicTaskBySourceIdAndType(sourceType, sourceId, operateType)
if err != nil {
log.Error("StopPeriodicTask. GetPeriodicTaskBySourceIdAndType error. %v", err)
return err
}
if task == nil {
log.Info("Periodic task is not exist")
return nil
}
if task.Status == models.PeriodicTaskStatusFinished {
log.Info("Periodic task is finished")
return nil
}
now := time.Now()
RunRewardTask(*task, now)
return models.StopPeriodicTask(task.ID, task.OperateSerialNo, now)
}

func generateOperateSerialNo() (string, error) {
s, err := GetSerialNoByRedis()
if err != nil {
log.Error("generateOperateSerialNo error. %v", err)

return "", err
}
return s, nil
}

+ 131
- 0
services/reward/period_task.go View File

@@ -0,0 +1,131 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/routers/repo"
"errors"
"fmt"
"time"
)

func NewRewardPeriodicTask(operateRecordId string, opts *models.StartPeriodicTaskOpts) error {
task := &models.RewardPeriodicTask{}
task.DelaySeconds = int64(opts.Delay.Seconds())
task.IntervalSeconds = int64(opts.Interval.Seconds())
task.Amount = int64(opts.UnitAmount)
task.OperateSerialNo = operateRecordId
task.Status = models.PeriodicTaskStatusRunning
task.NextExecuteTime = timeutil.TimeStamp(opts.StartTime.Add(opts.Delay).Unix())

_, err := models.InsertPeriodicTask(task)
return err
}

func StartRewardTask() {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
log.Debug("try to run reward tasks")
now := time.Now()
taskList, err := models.GetRunningRewardTask(now)
if err != nil {
log.Error("GetRunningRewardTask error. %v", err)
return
}
if taskList == nil || len(taskList) == 0 {
log.Debug("No GetRunningRewardTask need handled")
return
}
for _, t := range taskList {
RunRewardTask(t, now)
}
}

func RunRewardTask(t models.RewardPeriodicTask, now time.Time) error {
lock := redis_lock.NewDistributeLock(redis_key.RewardTaskRunningLock(t.ID))
isOk, _ := lock.LockWithWait(5*time.Second, 5*time.Second)
if !isOk {
log.Error("get RewardTaskRunningLock failed,t=%+v", t)
return errors.New("get RewardTaskRunningLock failed")
}
defer lock.UnLock()
record, err := models.GetPointOperateRecordBySerialNo(t.OperateSerialNo)
if err != nil {
log.Error("RunRewardTask. GetPointOperateRecordBySerialNo error. %v", err)
return errors.New("GetPointOperateRecordBySerialNo error")
}
if record.Status != models.OperateStatusOperating {
log.Info("RunRewardTask. operate record is finished,record=%+v", record)
return nil
}
n, _ := countExecuteTimes(t, now)
if n == 0 {
log.Info("countExecuteTimes result is 0")
return nil
}

//get operator
operator := GetOperator(models.GetRewardTypeInstance(record.RewardType))
if operator == nil {
log.Error("RunRewardTask. operator of reward type is not exist")
return errors.New("operator of reward type is not exist")
}
nextTime := timeutil.TimeStamp(int64(t.NextExecuteTime) + t.IntervalSeconds)
log.Debug("RunRewardTask n=%d", n)
for i := 1; int64(i) <= n; i++ {
log.Debug("operator.Operate i=%d n=%d", i, n)
err = operator.Operate(&models.RewardOperateContext{
SourceType: models.SourceTypeRunCloudbrainTask,
SourceId: t.OperateSerialNo,
Reward: models.Reward{
Amount: t.Amount,
Type: models.GetRewardTypeInstance(record.RewardType),
},
TargetUserId: record.UserId,
OperateType: models.GetRewardOperateTypeInstance(record.OperateType),
})
if err != nil {
log.Error("RunRewardTask.operator operate error.%v", err)
if models.IsErrInsufficientPointsBalance(err) {
task, err := models.GetCloudbrainByID(record.SourceId)
if err != nil {
log.Error("RunRewardTask GetCloudbrainByID error. %v", err)
return err
}
repo.StopJobs([]*models.Cloudbrain{task})
models.StopPeriodicTask(task.ID, t.OperateSerialNo, time.Now())
return nil
}
return nil
}
models.IncrRewardTaskSuccessCount(t, 1, nextTime)
nextTime = timeutil.TimeStamp(int64(nextTime) + t.IntervalSeconds)
}
return nil

}

func countExecuteTimes(t models.RewardPeriodicTask, now time.Time) (int64, timeutil.TimeStamp) {
interval := t.IntervalSeconds
nextTime := int64(t.NextExecuteTime)
if nextTime > now.Unix() {
return 0, 0
}
diff := now.Unix() - nextTime
var n int64
if diff%interval == 0 {
n = diff / interval
} else {
n = diff/interval + 1
}

newNextTime := timeutil.TimeStamp(nextTime + n*interval)
return n, newNextTime
}

+ 150
- 0
services/reward/point/account/point_account.go View File

@@ -0,0 +1,150 @@
package account

import (
"bytes"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"encoding/json"
"strings"
"time"
)

func GetAccount(userId int64) (*models.PointAccount, error) {
redisKey := redis_key.PointAccountInfo(userId)
val, _ := redis_client.Get(redisKey)
if val != "" {
account := &models.PointAccount{}
json.Unmarshal([]byte(val), account)
return account, nil
}
account, err := models.GetAccountByUserId(userId)
if err != nil {
if models.IsErrRecordNotExist(err) {
a, err := InitAccount(userId)
if err != nil {
log.Error("InitAccount error,err=%v", err)
return nil, err
}
return a, nil
}
log.Error("GetAccountByUserId error,err=%v", err)
return nil, err
}
jsonStr, _ := json.Marshal(account)
redis_client.Setex(redisKey, string(jsonStr), 24*time.Hour)
return account, nil
}

func InitAccount(userId int64) (*models.PointAccount, error) {
lock := redis_lock.NewDistributeLock(redis_key.PointAccountInitLock(userId))
isOk, err := lock.LockWithWait(3*time.Second, 3*time.Second)
if err != nil {
log.Error("PointAccountInitLock error,err=%v", err)
return nil, err
}
if isOk {
defer lock.UnLock()
account, _ := models.GetAccountByUserId(userId)
if account == nil {
models.InsertAccount(&models.PointAccount{
Balance: 0,
TotalEarned: 0,
TotalConsumed: 0,
UserId: userId,
Status: models.PointAccountNormal,
Version: 0,
AccountCode: util.UUID(),
})
return models.GetAccountByUserId(userId)
}
return account, nil
}
return nil, nil

}

//IsPointBalanceEnough check whether the user's point balance is bigger than task unit price
func IsPointBalanceEnough(targetUserId int64, unitPrice int) bool {
if !setting.CloudBrainPaySwitch {
return true
}
if unitPrice == 0 {
return true
}
a, err := GetAccount(targetUserId)
if err != nil {
log.Error("IsPointBalanceEnough GetAccount error,err=%v", err)
return false
}
return a.Balance >= int64(unitPrice)
}

func SearchPointAccount(opt models.SearchPointAccountOpts) (*models.SearchPointAccountResponse, error) {
var result = &models.SearchPointAccountResponse{
Records: make([]*models.UserPointAccount, 0),
PageSize: opt.PageSize,
Page: opt.Page,
Total: 0,
}

userSearch := &models.SearchUserOptions{
Type: models.UserTypeIndividual,
ListOptions: models.ListOptions{
PageSize: 20,
},
SearchByEmail: true,
OrderBy: models.SearchOrderByAlphabetically,
}

userSearch.Page = opt.Page
if userSearch.Page <= 0 {
userSearch.Page = 1
}
userSearch.Keyword = strings.Trim(opt.Keyword, " ")
if len(userSearch.Keyword) == 0 || isKeywordValid(userSearch.Keyword) {
users, count, err := models.SearchUsers(userSearch)
if err != nil {
log.Error("SearchPointAccount SearchUsers error.%v", err)
return nil, err
}
userIds := make([]int64, 0)
for _, v := range users {
userIds = append(userIds, v.ID)
}
accountMap, err := models.GetPointAccountMapByUserIds(userIds)
if err != nil {
return nil, err
}

records := make([]*models.UserPointAccount, 0)
for _, v := range users {
upa := &models.UserPointAccount{
UserId: v.ID,
UserName: v.Name,
Email: v.Email,
Balance: 0,
TotalEarned: 0,
TotalConsumed: 0,
}
a := accountMap[v.ID]
if a != nil {
upa.Balance = a.Balance
upa.TotalConsumed = a.TotalConsumed
upa.TotalEarned = a.TotalEarned
}
records = append(records, upa)
}
result.Records = records
result.Total = count
}
return result, nil
}

func isKeywordValid(keyword string) bool {
return !bytes.Contains([]byte(keyword), []byte{0x00})
}

+ 65
- 0
services/reward/point/point_operate.go View File

@@ -0,0 +1,65 @@
package point

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/services/reward/limiter"
"code.gitea.io/gitea/services/reward/point/account"
"errors"
"time"
)

type PointOperator struct {
}

func (operator *PointOperator) IsLimited(ctx *models.RewardOperateContext) error {
realAmount, err := limiter.CheckLimit(ctx.SourceType.Name(), models.LimitTypeRewardPoint, ctx.TargetUserId, ctx.Reward.Amount, ctx.RejectPolicy)
if err != nil {
log.Error("PointOperator IsLimited error,err=%v", err)
return err
}
if realAmount < ctx.Reward.Amount {
ctx.LossAmount = ctx.Reward.Amount - realAmount
ctx.Reward.Amount = realAmount
}
return nil
}

func (operator *PointOperator) Operate(ctx *models.RewardOperateContext) error {
lock := redis_lock.NewDistributeLock(redis_key.PointAccountOperateLock(ctx.TargetUserId))
isOk, err := lock.LockWithWait(3*time.Second, 3*time.Second)
if err != nil {
log.Error("Get PointAccountOperateLock error,err=%v", err)
return err
}
if isOk {
defer lock.UnLock()
na, err := account.GetAccount(ctx.TargetUserId)
if err != nil || na == nil {
log.Error("operator get account error error,err=%v", err)
return errors.New("get account error")
}
if ctx.OperateType == models.OperateTypeIncrease {
err = na.Increase(ctx.Reward.Amount, ctx.SourceId)
} else if ctx.OperateType == models.OperateTypeDecrease {
if !ctx.PermittedNegative && na.Balance < ctx.Reward.Amount {
log.Info("account balance is not enough,ctx=%v", ctx)
return models.ErrInsufficientPointsBalance{}
}
err = na.Decrease(ctx.Reward.Amount, ctx.SourceId)
}
if err != nil {
log.Error("operate account balance error,err=%v", err)
return err
}
redis_client.Del(redis_key.PointAccountInfo(ctx.TargetUserId))

} else {
log.Error("Get account operate lock failed,ctx=%v", ctx)
return errors.New("Get account operate lock failed")
}
return nil
}

+ 47
- 0
services/reward/record.go View File

@@ -0,0 +1,47 @@
package reward

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
)

type RecordResponse struct {
Records []*models.RewardOperateRecordShow
Total int64
PageSize int
Page int
}

func GetRewardRecordList(opts *models.RewardRecordListOpts) (*RecordResponse, error) {
var l models.RewardRecordShowList
var n int64
var err error
if opts.IsAdmin {
l, n, err = models.GetAdminRewardRecordShowList(opts)
} else {
l, n, err = models.GetRewardRecordShowList(opts)
}
if err != nil {
log.Error("GetRewardRecordList error. %v", err)

return nil, err
}
if len(l) == 0 {
return &RecordResponse{Records: make([]*models.RewardOperateRecordShow, 0), Total: n, Page: opts.Page, PageSize: opts.PageSize}, nil
}
return &RecordResponse{Records: l, Total: n, Page: opts.Page, PageSize: opts.PageSize}, nil
}

func handleRecordResponse(opts *models.RewardRecordListOpts, list models.RewardRecordShowList) {
if opts.IsAdmin {
for _, v := range list {
v.UserName = opts.UserName
}
} else {
for _, v := range list {
if v.Cloudbrain != nil {
v.Cloudbrain.AiCenter = ""
}
}
}
}

+ 28
- 0
services/reward/serial.go View File

@@ -0,0 +1,28 @@
package reward

import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
"fmt"
"math/rand"
"time"
)

func GetSerialNoByRedis() (string, error) {
now := time.Now()
r := int64(rand.Intn(3)) + 1
n, err := redis_client.IncrBy(redis_key.RewardSerialCounter(now), r)
if err != nil {
log.Error("GetSerialNoByRedis RewardSerialCounter error. %v", err)
return "", err
}
if n == r {
redis_client.Expire(redis_key.RewardSerialCounter(now), 2*time.Minute)
}
//when the counter n exceeds 1000, the length of the serial number will become longer
if n >= 1000 {
return now.Format("200601021504") + fmt.Sprintf("%d", n) + fmt.Sprint(rand.Intn(10)), nil
}
return now.Format("200601021504") + fmt.Sprintf("%03d", n) + fmt.Sprint(rand.Intn(10)), nil
}

+ 14
- 12
services/socketwrap/clientManager.go View File

@@ -10,7 +10,7 @@ import (
"github.com/elliotchance/orderedmap" "github.com/elliotchance/orderedmap"
) )


var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33}
var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35}


type ClientsManager struct { type ClientsManager struct {
Clients *orderedmap.OrderedMap Clients *orderedmap.OrderedMap
@@ -107,16 +107,18 @@ func initActionQueue() {


func filterUserPrivateInfo(action *models.Action) { func filterUserPrivateInfo(action *models.Action) {
action.Comment = nil action.Comment = nil
action.ActUser.Email = ""
action.ActUser.Passwd = ""
action.ActUser.PasswdHashAlgo = ""
action.ActUser.PrivateKey = ""
action.ActUser.PublicKey = ""
action.ActUser.Salt = ""
action.ActUser.FullName = ""
action.ActUser.AvatarEmail = ""
action.ActUser.IsAdmin = false
action.ActUser.EmailNotificationsPreference = ""
action.ActUser.IsOperator = false
if action.ActUser != nil {
action.ActUser.Email = ""
action.ActUser.Passwd = ""
action.ActUser.PasswdHashAlgo = ""
action.ActUser.PrivateKey = ""
action.ActUser.PublicKey = ""
action.ActUser.Salt = ""
action.ActUser.FullName = ""
action.ActUser.AvatarEmail = ""
action.ActUser.IsAdmin = false
action.ActUser.EmailNotificationsPreference = ""
action.ActUser.IsOperator = false
}


} }

+ 50
- 0
services/task/period/handler.go View File

@@ -0,0 +1,50 @@
package period

import (
"code.gitea.io/gitea/models"
"errors"
"time"
)

var PeriodHandlerMap = map[string]PeriodHandler{
models.PeriodNotCycle: new(NoCycleHandler),
models.PeriodDaily: new(DailyHandler),
}

type PeriodHandler interface {
GetCurrentPeriod() *models.PeriodResult
}

type NoCycleHandler struct {
}

func (l *NoCycleHandler) GetCurrentPeriod() *models.PeriodResult {
return nil
}

type DailyHandler struct {
}

func (l *DailyHandler) GetCurrentPeriod() *models.PeriodResult {
t := time.Now()
startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
endTime := startTime.Add(24 * time.Hour)
leftTime := endTime.Sub(t)
return &models.PeriodResult{
StartTime: startTime,
EndTime: endTime,
LeftTime: leftTime,
}
}

func getPeriodHandler(refreshRateype string) PeriodHandler {
return PeriodHandlerMap[refreshRateype]
}

func GetPeriod(refreshRate string) (*models.PeriodResult, error) {
handler := getPeriodHandler(refreshRate)
if handler == nil {
return nil, errors.New("task config incorrect")
}
return handler.GetCurrentPeriod(), nil
}

+ 145
- 0
services/task/task.go View File

@@ -0,0 +1,145 @@
package task

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/services/reward"
"code.gitea.io/gitea/services/reward/limiter"
"fmt"
"strconv"
"strings"
)

func Accomplish(action models.Action) {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
taskType := models.GetTaskTypeFromAction(action.OpType)
if taskType == "" {
log.Info("Accomplish finished.taskType is not exist.action.ID=%d", action.ID)
return
}
actions := make([]models.Action, 0)
actions = append(actions, action)
switch taskType {
//only creating public repo can be rewarded
case models.TaskCreatePublicRepo:
if action.Repo.IsPrivate {
return
}
//only creating public image can be rewarded
case models.TaskCreateImage:
if action.IsPrivate {
return
}
case models.TaskBindWechat:
n, err := models.CountWechatBindLog(action.Content, models.WECHAT_BIND)
if err != nil {
log.Error("CountWechatBindLog error when accomplish task,err=%v", err)
return
}
//if wechatOpenId has been bound before,the action can not get reward
if n > 1 && models.IsWechatOpenIdRewarded(action.Content) {

log.Debug("the wechat account has been bound before,wechatOpenId = %s", action.Content)
return
}
case models.TaskDatasetRecommended:
datasetIdStr := strings.Split(action.Content, "|")[0]
datasetId, _ := strconv.ParseInt(datasetIdStr, 10, 64)
users, err := models.GetAllDatasetContributorByDatasetId(datasetId)
if err != nil {
return
}
for _, user := range users {
if user.ID == action.ActUserID {
continue
}
actions = append(actions, models.Action{
ID: action.ID,
OpType: models.ActionDatasetRecommended,
ActUserID: action.UserID,
UserID: user.ID,
RepoID: action.RepoID,
Content: action.Content,
})
}

}
batchAccomplish(taskType, actions...)
}

func batchAccomplish(taskType models.TaskType, actions ...models.Action) {
for _, act := range actions {
go accomplish(act, taskType)
}
}

func accomplish(action models.Action, taskType models.TaskType) error {
defer func() {
if err := recover(); err != nil {
combinedErr := fmt.Errorf("%s\n%s", err, log.Stack(2))
log.Error("PANIC:%v", combinedErr)
}
}()
userId := action.UserID

//get task config
config, err := GetTaskConfig(string(taskType))
if err != nil {
log.Error("GetTaskConfig error,%v", err)
return err
}
if config == nil {
log.Info("task config not exist,userId=%d taskType=%s", userId, taskType)
return nil
}

//is limited?
if isLimited(userId, config, models.JustReject) {
log.Info("task accomplish maximum times are reached,userId=%d taskType=%s", userId, taskType)
return nil
}

//add log
_, err = models.InsertTaskAccomplishLog(&models.TaskAccomplishLog{
ConfigId: config.ID,
TaskCode: config.TaskCode,
UserId: userId,
ActionId: action.ID,
})
if err != nil {
log.Error("InsertTaskAccomplishLog error,%v", err)
return err
}

//reward
reward.Operate(&models.RewardOperateContext{
SourceType: models.SourceTypeAccomplishTask,
SourceId: fmt.Sprint(action.ID),
SourceTemplateId: string(taskType),
Title: config.Title,
Reward: models.Reward{
Amount: config.AwardAmount,
Type: models.GetRewardTypeInstance(config.AwardType),
},
TargetUserId: userId,
RequestId: fmt.Sprint(action.ID),
OperateType: models.OperateTypeIncrease,
RejectPolicy: models.FillUp,
})
log.Debug("accomplish success,action=%v", action)
return nil
}

func isLimited(userId int64, config *models.TaskConfig, rejectPolicy models.LimiterRejectPolicy) bool {
if _, err := limiter.CheckLimit(config.TaskCode, models.LimitTypeTask, userId, 1, rejectPolicy); err != nil {
log.Error(" isLimited CheckLimit error. %v", err)
return true
}
return false

}

+ 228
- 0
services/task/task_config.go View File

@@ -0,0 +1,228 @@
package task

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/services/reward/limiter"
"encoding/json"
"errors"
"fmt"
"time"
)

//GetTaskConfig get task config from redis cache first
// if not exist in redis, find in db and refresh the redis key
func GetTaskConfig(taskType string) (*models.TaskConfig, error) {
list, err := GetTaskConfigList()
if err != nil {
log.Error(" GetTaskConfigList error. %v", err)
return nil, err
}
for _, v := range list {
if v.TaskCode == taskType {
return v, nil
}
}
return nil, nil
}

func GetTaskConfigList() ([]*models.TaskConfig, error) {
redisKey := redis_key.TaskConfigList()
configStr, _ := redis_client.Get(redisKey)
if configStr != "" {
if configStr == redis_key.EMPTY_REDIS_VAL {
return nil, nil
}
config := make([]*models.TaskConfig, 0)
json.Unmarshal([]byte(configStr), &config)
return config, nil
}
config, err := models.GetTaskConfigList()
if err != nil {
log.Error(" GetTaskConfigList from model error. %v", err)
if models.IsErrRecordNotExist(err) {
redis_client.Setex(redisKey, redis_key.EMPTY_REDIS_VAL, 5*time.Second)
return nil, nil
}
return nil, err
}
jsonStr, _ := json.Marshal(config)
redis_client.Setex(redisKey, string(jsonStr), 30*24*time.Hour)
return config, nil
}

func GetTaskConfigPageWithDeleted(opt models.GetTaskConfigOpts) ([]*models.TaskAndLimiterConfig, int64, error) {
config, count, err := models.GetTaskConfigPageWithDeleted(opt)
if err != nil {
log.Error(" GetTaskConfigPageWithDeleted from model error. %v", err)
if models.IsErrRecordNotExist(err) {
return nil, 0, nil
}
return nil, 0, err
}
return config, count, nil
}

func GetTaskConfigWithLimitList(opt models.GetTaskConfigOpts) (*models.TaskConfigWithLimitResponse, error) {
list, n, err := GetTaskConfigPageWithDeleted(opt)
if err != nil {
return nil, err
}
if len(list) == 0 {
return nil, nil
}
r := make([]*models.TaskConfigWithSingleLimit, 0)
for i := 0; i < len(list); i++ {
li := list[i]
t := &models.TaskConfigWithSingleLimit{
ID: li.TaskConfig.ID,
TaskCode: li.TaskConfig.TaskCode,
AwardType: li.TaskConfig.AwardType,
AwardAmount: li.TaskConfig.AwardAmount,
Creator: li.TaskConfig.CreatorName,
CreatedUnix: li.TaskConfig.CreatedUnix,
IsDeleted: li.TaskConfig.DeletedAt > 0,
DeleteAt: li.TaskConfig.DeletedAt,
LimitNum: li.LimitConfig.LimitNum,
RefreshRate: li.LimitConfig.RefreshRate,
}
r = append(r, t)
}

return &models.TaskConfigWithLimitResponse{
Records: r,
Page: opt.Page,
PageSize: opt.PageSize,
Total: n,
}, nil
}

func AddTaskConfig(config models.TaskConfigWithLimit, doer *models.User) error {
if config.TaskCode == "" || config.AwardType == "" {
log.Error(" EditTaskConfig param error")
return errors.New("param error")
}
var lock = redis_lock.NewDistributeLock(redis_key.TaskConfigOperateLock(config.TaskCode, config.AwardType))
isOk, _ := lock.LockWithWait(3*time.Second, 3*time.Second)
if !isOk {
return errors.New("Get lock failed")
}
defer lock.UnLock()

t, err := models.GetTaskConfigByTaskCode(config.TaskCode)
if err != nil && !models.IsErrRecordNotExist(err) {
return err
}
if t != nil {
return errors.New("task config is exist")
}

for i, l := range config.Limiters {
if l.Scope == "" {
config.Limiters[i].Scope = models.LimitScopeSingleUser.Name()
}
}
err = models.NewTaskConfig(config, doer)
if err != nil {
log.Error("add task config error,config:%v err:%v", config, err)
return err
}
redis_client.Del(redis_key.LimitConfig(models.LimitTypeTask.Name()))
redis_client.Del(redis_key.TaskConfigList())
return nil
}

func EditTaskConfig(config models.TaskConfigWithLimit, doer *models.User) error {
if config.TaskCode == "" || config.AwardType == "" || config.ID <= 0 {
log.Error(" EditTaskConfig param error")
return errors.New("param error")
}
var lock = redis_lock.NewDistributeLock(redis_key.TaskConfigOperateLock(config.TaskCode, config.AwardType))
isOk, _ := lock.LockWithWait(3*time.Second, 3*time.Second)
if !isOk {
return errors.New("Get lock failed")
}
defer lock.UnLock()
t, err := models.GetTaskConfigByID(config.ID)
if err != nil {
return err
}
if t == nil {
return errors.New("task config is not exist")
}

for i, l := range config.Limiters {
if l.Scope == "" {
config.Limiters[i].Scope = models.LimitScopeSingleUser.Name()
}
}
err = models.EditTaskConfig(config, doer)
if err != nil {
log.Error("add task config error,config:%v err:%v", config, err)
return err
}
redis_client.Del(redis_key.LimitConfig(models.LimitTypeTask.Name()))
redis_client.Del(redis_key.TaskConfigList())
return nil
}

func DelTaskConfig(id int64, doer *models.User) error {
if id == 0 {
log.Error(" EditTaskConfig param error")
return errors.New("param error")
}
err := models.DelTaskConfig(id, doer)
if err != nil {
log.Error("del task config error,err:%v", err)
return err
}
redis_client.Del(redis_key.LimitConfig(models.LimitTypeTask.Name()))
redis_client.Del(redis_key.TaskConfigList())
return nil
}

func GetPointRule() (*models.PointRule, error) {
r, err := limiter.GetSingleDailyPointLimitConfig()
if err != nil {
return nil, err
}
limiters, err := limiter.GetLimitersByLimitType(models.LimitTypeTask)
if err != nil {
return nil, err
}
limiterMap := make(map[string]*models.LimitConfig, 0)
for i := 0; i < len(limiters); i++ {
limiterMap[limiters[i].LimitCode] = &limiters[i]
}

taskConfigs, err := GetTaskConfigList()
if err != nil {
return nil, err
}
taskRules := make([]models.TaskRule, len(taskConfigs))

for i, taskConfig := range taskConfigs {
rule := models.TaskRule{
TaskCode: taskConfig.TaskCode,
AwardType: taskConfig.AwardType,
AwardAmount: taskConfig.AwardAmount,
}
limiter := limiterMap[fmt.Sprint(taskConfig.TaskCode)]
if limiter != nil {
rule.RefreshRate = limiter.RefreshRate
rule.LimitNum = limiter.LimitNum
}
taskRules[i] = rule
}

pointRule := &models.PointRule{
TaskRules: taskRules,
}
if r != nil {
pointRule.UserDailyLimit = r.LimitNum
}
return pointRule, nil
}

modules/auth/wechat/auto_reply.go → services/wechat/auto_reply.go View File


modules/auth/wechat/event_handle.go → services/wechat/event_handle.go View File

@@ -1,6 +1,9 @@
package wechat package wechat


import ( import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth/wechat"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/redis/redis_client" "code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/modules/redis/redis_key" "code.gitea.io/gitea/modules/redis/redis_key"
"encoding/json" "encoding/json"
@@ -142,22 +145,26 @@ func HandleScanEvent(we WechatMsg) string {
if val == "" { if val == "" {
return "" return ""
} }
qrCache := new(QRCode4BindCache)
qrCache := new(wechat.QRCode4BindCache)
json.Unmarshal([]byte(val), qrCache) json.Unmarshal([]byte(val), qrCache)
if qrCache.Status == BIND_STATUS_UNBIND {
err := BindWechat(qrCache.UserId, we.FromUserName)
if qrCache.Status == wechat.BIND_STATUS_UNBIND {
err := wechat.BindWechat(qrCache.UserId, we.FromUserName)
if err != nil { if err != nil {
if err, ok := err.(WechatBindError); ok {
if err, ok := err.(wechat.WechatBindError); ok {
return err.Reply return err.Reply
} }
return BIND_REPLY_FAILED_DEFAULT
return wechat.BIND_REPLY_FAILED_DEFAULT
} }
qrCache.Status = BIND_STATUS_BOUND
qrCache.Status = wechat.BIND_STATUS_BOUND
jsonStr, _ := json.Marshal(qrCache) jsonStr, _ := json.Marshal(qrCache)
redis_client.Setex(redis_key.WechatBindingUserIdKey(sceneStr), string(jsonStr), 60*time.Second) redis_client.Setex(redis_key.WechatBindingUserIdKey(sceneStr), string(jsonStr), 60*time.Second)
} }
u, err := models.GetUserByID(qrCache.UserId)
if err == nil {
notification.NotifyWechatBind(u, we.FromUserName)
}


return BIND_REPLY_SUCCESS
return wechat.BIND_REPLY_SUCCESS
} }


func HandleSubscribeEvent(we WechatMsg) *WechatReplyContent { func HandleSubscribeEvent(we WechatMsg) *WechatReplyContent {

+ 8
- 0
templates/admin/cloudbrain/list.tmpl View File

@@ -259,6 +259,14 @@
</a> </a>
{{end}} {{end}}
</div> </div>
<!-- 修改任务 -->
{{if eq .JobType "TRAIN"}}
<div class="ui compact buttons">
<a style="padding: 0.5rem 1rem;" class="ui basic blue button" href="{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .Cloudbrain.Type 1}}/modelarts/train-job/{{.JobID}}{{else if eq .Cloudbrain.Type 0}}/cloudbrain/train-job/{{.JobID}}{{else if eq .Cloudbrain.Type 2}}/grampus/train-job/{{.JobID}}{{end}}/create_version{{if .VersionName}}?version_name={{.VersionName}}{{end}}">
{{$.i18n.Tr "repo.modelarts.modify"}}
</a>
</div>
{{end}}
<!-- 删除任务 --> <!-- 删除任务 -->
<form class="ui compact buttons" id="delForm-{{$JobID}}" <form class="ui compact buttons" id="delForm-{{$JobID}}"
action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if or (eq .JobType "SNN4IMAGENET") (eq .JobType "BRAINSCORE")}}/cloudbrain{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .Cloudbrain.Type 1}}/modelarts/train-job{{else if eq .Cloudbrain.Type 0}}/cloudbrain/train-job{{else if eq .Cloudbrain.Type 2}}/grampus/train-job{{end}}{{else if eq .JobType "INFERENCE"}}{{if eq .Cloudbrain.Type 0}}/cloudbrain/train-job{{end}}{{end}}/{{$JobID}}/del?isadminpage=true' action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if or (eq .JobType "SNN4IMAGENET") (eq .JobType "BRAINSCORE")}}/cloudbrain{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .Cloudbrain.Type 1}}/modelarts/train-job{{else if eq .Cloudbrain.Type 0}}/cloudbrain/train-job{{else if eq .Cloudbrain.Type 2}}/grampus/train-job{{end}}{{else if eq .JobType "INFERENCE"}}{{if eq .Cloudbrain.Type 0}}/cloudbrain/train-job{{end}}{{end}}/{{$JobID}}/del?isadminpage=true'


+ 11
- 1
templates/base/footer.tmpl View File

@@ -52,6 +52,12 @@
<script src="/rotation3D/rotation3D.js?v={{MD5 AppVer}}"></script> <script src="/rotation3D/rotation3D.js?v={{MD5 AppVer}}"></script>
<script> <script>
var jobTask={}; var jobTask={};
var html =document.documentElement;
var lang = html.attributes["lang"]
var isZh = true;
if(lang != null && lang.nodeValue =="en-US" ){
isZh=false;
}
function queryAiCenterInfo(){ function queryAiCenterInfo(){
$.ajax({ $.ajax({
type:"GET", type:"GET",
@@ -96,7 +102,11 @@
tmp["type"]="green"; tmp["type"]="green";
} }
tmp["icon"]=""; tmp["icon"]="";
tmp["content"]=json[i].content;
if(isZh){
tmp["content"]=json[i].content;
}else{
tmp["content"]=json[i].content_en;
}
serverItemList.push(tmp); serverItemList.push(tmp);
} }
} }


+ 5
- 5
templates/base/footer_content.tmpl View File

@@ -20,7 +20,7 @@
<div class="column ui vertical text menu"> <div class="column ui vertical text menu">
<div class="header item">{{.i18n.Tr "custom.foot.help"}}</div> <div class="header item">{{.i18n.Tr "custom.foot.help"}}</div>
<div class="ui language bottom floating slide up dropdown link item"> <div class="ui language bottom floating slide up dropdown link item">
<i class="world icon"></i>
<i class="globe icon"></i>
<div class="text">{{.LangName}}</div> <div class="text">{{.LangName}}</div>
<div class="menu"> <div class="menu">
{{range .AllLangs}} {{range .AllLangs}}
@@ -29,12 +29,12 @@
</div> </div>
</div> </div>


<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class=" item a_margin" target="_blank"><i class="ri-creative-commons-by-line footer_icon" ></i><p class="footer_icon">{{.i18n.Tr "custom.Platform_Tutorial"}}</p> </a>
{{if .EnableSwagger}}<a href="/api/swagger" class=" item a_margin"><i class="ri-exchange-line footer_icon" > </i><p class="footer_icon">API</p> </a>{{end}}
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class="item" target="_blank"><i class="compass icon" ></i> {{.i18n.Tr "custom.Platform_Tutorial"}}</a>
{{if .EnableSwagger}}<a href="/api/swagger" class="item"><i class="plug icon"></i> API</a>{{end}}
{{if .IsSigned}} {{if .IsSigned}}
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning/issues/new" class=" item a_margin" target="_blank"><i class="ri-mail-send-line footer_icon"></i><p class="footer_icon">{{.i18n.Tr "custom.foot.advice_feedback"}}</p></a>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning/issues/new" class="item" target="_blank"><i class="envelope icon"></i> {{.i18n.Tr "custom.foot.advice_feedback"}}</a>
{{else}} {{else}}
<a href="{{AppSubUrl}}/user/login" class=" item a_margin" ><i class="ri-mail-send-line footer_icon" ></i><p class="footer_icon">{{.i18n.Tr "custom.foot.advice_feedback"}}</p></a>
<a href="{{AppSubUrl}}/user/login" class="item"><i class="envelope icon"></i> {{.i18n.Tr "custom.foot.advice_feedback"}}</a>
{{end}} {{end}}


{{template "custom/extra_links_footer" .}} {{template "custom/extra_links_footer" .}}


+ 5
- 5
templates/base/footer_content_fluid.tmpl View File

@@ -18,7 +18,7 @@
<div class="column ui vertical text menu"> <div class="column ui vertical text menu">
<div class="header item">{{.i18n.Tr "custom.foot.help"}}</div> <div class="header item">{{.i18n.Tr "custom.foot.help"}}</div>
<div class="ui language bottom floating slide up dropdown link item"> <div class="ui language bottom floating slide up dropdown link item">
<i class="world icon"></i>
<i class="globe icon"></i>
<div class="text">{{.LangName}}</div> <div class="text">{{.LangName}}</div>
<div class="menu"> <div class="menu">
{{range .AllLangs}} {{range .AllLangs}}
@@ -26,12 +26,12 @@
{{end}} {{end}}
</div> </div>
</div> </div>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class=" item a_margin" target="_blank"><i class="ri-creative-commons-by-line footer_icon" ></i><p class="footer_icon">{{.i18n.Tr "custom.Platform_Tutorial"}}</p> </a>
{{if .EnableSwagger}}<a href="/api/swagger" class=" item a_margin"><i class="ri-exchange-line footer_icon" > </i><p class="footer_icon">API</p> </a>{{end}}
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning" class="item" target="_blank"><i class="compass icon"></i> {{.i18n.Tr "custom.Platform_Tutorial"}} </a>
{{if .EnableSwagger}}<a href="/api/swagger" class="item"><i class="plug icon" ></i> API</a>{{end}}
{{if .IsSigned}} {{if .IsSigned}}
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning/issues/new" class=" item a_margin" target="_blank"><i class="ri-mail-send-line footer_icon"></i><p class="footer_icon">{{.i18n.Tr "custom.foot.advice_feedback"}}</p></a>
<a href="https://git.openi.org.cn/zeizei/OpenI_Learning/issues/new" class="item" target="_blank"><i class="envelope icon"></i> {{.i18n.Tr "custom.foot.advice_feedback"}}</a>
{{else}} {{else}}
<a href="{{AppSubUrl}}/user/login" class=" item a_margin" ><i class="ri-mail-send-line footer_icon" ></i><p class="footer_icon footer_icon">{{.i18n.Tr "custom.foot.advice_feedback"}}</p></a>
<a href="{{AppSubUrl}}/user/login" class="item"><i class="envelope icon"></i> {{.i18n.Tr "custom.foot.advice_feedback"}}</a>
{{end}} {{end}}
{{template "custom/extra_links_footer" .}} {{template "custom/extra_links_footer" .}}
</div> </div>


+ 4
- 2
templates/base/head_navbar.tmpl View File

@@ -27,7 +27,8 @@
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/issues">{{.i18n.Tr "issues"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/issues">{{.i18n.Tr "issues"}}</a>
<a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/pulls">{{.i18n.Tr "pull_requests"}}</a> <a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/pulls">{{.i18n.Tr "pull_requests"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/milestones">{{.i18n.Tr "milestones"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/milestones">{{.i18n.Tr "milestones"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/reward/point">{{.i18n.Tr "calculation_points"}}</a>
</div> </div>
</div> </div>
</div> </div>
@@ -59,7 +60,8 @@
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "issues"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "issues"}}</a>
<a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "pull_requests"}}</a> <a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "pull_requests"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "milestones"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "milestones"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/reward/point">{{.i18n.Tr "calculation_points"}}</a>
</div> </div>
</div> </div>
</div> </div>


+ 2
- 0
templates/base/head_navbar_fluid.tmpl View File

@@ -28,6 +28,7 @@
<a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/pulls">{{.i18n.Tr "pull_requests"}}</a> <a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/pulls">{{.i18n.Tr "pull_requests"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/milestones">{{.i18n.Tr "milestones"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/milestones">{{.i18n.Tr "milestones"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/reward/point">{{.i18n.Tr "calculation_points"}}</a>
</div> </div>
</div> </div>
</div> </div>
@@ -59,6 +60,7 @@
<a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "pull_requests"}}</a> <a style="border: none;color: #000; white-space: nowrap;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "pull_requests"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "milestones"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/user/login">{{.i18n.Tr "milestones"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a> <a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/cloudbrains">{{.i18n.Tr "repo.cloudbrain.task"}}</a>
<a style="border: none;color: #000;" class=" item" href="{{AppSubUrl}}/reward/point">{{.i18n.Tr "calculation_points"}}</a>
</div> </div>
</div> </div>
</div> </div>


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save