diff --git a/custom/public/css/git.openi.css b/custom/public/css/git.openi.css index 3d7e15ada..3a4eff8e4 100755 --- a/custom/public/css/git.openi.css +++ b/custom/public/css/git.openi.css @@ -72,9 +72,7 @@ z-index: 10; } .ui.secondary.c2net.segment{ - /* background: #f8faff; - border: none;*/ - margin-bottom: 5em; + padding-bottom: 3em; padding-top: 2em; color: rgba(0,0,0,.87); background-image: linear-gradient(to bottom left,var(--tw-gradient-stops)); @@ -176,7 +174,7 @@ } .homeorg, .homepro, .homemodel, .i-env{ position: relative; - padding-bottom: 5em; + padding-bottom: 3em; } .homenews::before{ content: ''; @@ -203,7 +201,7 @@ padding: 0; } .newslist{ - height: 300px; + height: 260px; overflow: hidden; } @@ -278,7 +276,7 @@ z-index: 9; } .homeorg-list .card{ - background-image: linear-gradient(#FFF, #FFF 60%, #DFF0EF) !important; + /* background-image: linear-gradient(#FFF, #FFF 60%, #DFF0EF) !important; */ box-shadow: none !important; } .homeorg-list .card .ui.small.header .content{ @@ -299,7 +297,7 @@ background-color: #FFF; box-shadow: 0px 5px 10px 0px rgba(105, 192, 255, .3); border: 1px solid rgba(105, 192, 255, .4); - min-height: 10.8em; + /* min-height: 10.8em; */ } .homepro-list .ui.card>.content>.header{ line-height: 40px !important; @@ -307,7 +305,7 @@ .homepro-list .swiper-pagination-bullet-active, .homeorg-list .swiper-pagination-bullet-active{ width: 40px; - border-radius: 4px; + border-radius: 4px; } .i-env > div{ position: relative; @@ -317,6 +315,15 @@ } @media only screen and (max-width: 767px) { + .mobile-margin-left-20 { + margin-left: 20px !important; + } + .mobile-text-align-center { + text-align: center !important; + } + .mobile-justify-content-center { + justify-content: center !important; + } .am-mt-30{ margin-top: 1.5rem !important;} .ui.secondary.hometop.segment{ margin-bottom: 5.0rem; @@ -341,7 +348,7 @@ background: #FFF; } .homeorg{ - padding-left: 3.5em; + /* padding-left: 3.5em; */ } .homeorg-tit::after { left: -2.3em; diff --git a/custom/public/img/home-banner-01-en.jpg b/custom/public/img/home-banner-01-en.jpg new file mode 100644 index 000000000..59001d4f0 Binary files /dev/null and b/custom/public/img/home-banner-01-en.jpg differ diff --git a/custom/public/img/home-banner-01.jpg b/custom/public/img/home-banner-01.jpg new file mode 100644 index 000000000..1aa0563c4 Binary files /dev/null and b/custom/public/img/home-banner-01.jpg differ diff --git a/custom/public/img/home-bg-ps.png b/custom/public/img/home-bg-ps.png new file mode 100644 index 000000000..27e6ae1a1 Binary files /dev/null and b/custom/public/img/home-bg-ps.png differ diff --git a/custom/public/img/logo-footer.svg b/custom/public/img/logo-footer.svg new file mode 100644 index 000000000..3be0e5b18 --- /dev/null +++ b/custom/public/img/logo-footer.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/custom/public/img/logo-w.svg b/custom/public/img/logo-w.svg index 867acc1e2..133f63d23 100644 --- a/custom/public/img/logo-w.svg +++ b/custom/public/img/logo-w.svg @@ -1 +1,45 @@ -logo-w \ No newline at end of file + + + + + + + + + + + + + + + diff --git a/custom/public/rotation3D/img/baseimg.png b/custom/public/rotation3D/img/baseimg.png new file mode 100644 index 000000000..960f1c772 Binary files /dev/null and b/custom/public/rotation3D/img/baseimg.png differ diff --git a/index.html b/index.html index 643c31b06..0b804a669 100644 --- a/index.html +++ b/index.html @@ -107,7 +107,7 @@ - + @@ -250,7 +250,7 @@ var _hmt = _hmt || []; - + 7月中下旬登录启智AI协作平台,需登记手机号码啦>>> @@ -308,7 +308,7 @@ var _hmt = _hmt || []; isNewNotice=false; } let isShowNoticeTag = false; - let notices= [{"Title":"“我为开源打榜狂”上榜领奖者名单公示1周,10万奖金被瓜分,请大家自行确认\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/notices_0701/636.html","Visible":1},{"Title":"7月中下旬登录启智AI协作平台,需登记手机号码啦\u003e\u003e\u003e","Link":"https://git.openi.org.cn/OpenIOSSG/promote/src/branch/master/notice/Other_notes/RegisterMobileNumber.md","Visible":1},{"Title":"智算网络Beta版本上线,大大缩短算力排队时间,速来体验吧~\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/dongtai_0628/634.html","Visible":1},{"Title":"启智AI协作平台问卷调查,邀请您参加\u003e\u003e\u003e","Link":"https://wj.qq.com/s2/10362208/5c0c","Visible":1}] + let notices= [{"Title":"“我为开源打榜狂”上榜领奖者名单公示1周,10万奖金被瓜分,请大家自行确认\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/notices_0701/636.html","Visible":1},{"Title":"7月中下旬登录启智AI协作平台,需登记手机号码啦\u003e\u003e\u003e","Link":"https://openi.pcl.ac.cn/OpenIOSSG/promote/src/branch/master/notice/Other_notes/RegisterMobileNumber.md","Visible":1},{"Title":"智算网络Beta版本上线,大大缩短算力排队时间,速来体验吧~\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/dongtai_0628/634.html","Visible":1},{"Title":"启智AI协作平台问卷调查,邀请您参加\u003e\u003e\u003e","Link":"https://wj.qq.com/s2/10362208/5c0c","Visible":1}] if(notices != null && notices!=''){ for (i =0;i - + diff --git a/models/cloudbrain.go b/models/cloudbrain.go index b8384f746..c4a6260ef 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -2365,6 +2365,61 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er return cloudbrains, count, nil } +func CloudbrainAllKanBan(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { + sess := x.NewSession() + defer sess.Close() + + var cond = builder.NewCond() + + if (opts.Type) >= 0 { + cond = cond.And( + builder.Eq{"cloudbrain.type": opts.Type}, + ) + } + if opts.BeginTimeUnix > 0 && opts.EndTimeUnix > 0 { + cond = cond.And( + builder.And(builder.Gte{"cloudbrain.created_unix": opts.BeginTimeUnix}, builder.Lte{"cloudbrain.created_unix": opts.EndTimeUnix}), + ) + } + var count int64 + var err error + count, err = sess.Unscoped().Where(cond).Count(new(Cloudbrain)) + + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + if opts.Page >= 0 && opts.PageSize > 0 { + var start int + if opts.Page == 0 { + start = 0 + } else { + start = (opts.Page - 1) * opts.PageSize + } + sess.Limit(opts.PageSize, start) + } + // sess.OrderBy("cloudbrain.created_unix DESC") + cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) + if err := sess.Cols("id", "type", "work_server_number", "duration", "train_job_duration", "ai_center", "cluster").Table(&Cloudbrain{}).Unscoped().Where(cond). + Find(&cloudbrains); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + if opts.NeedRepoInfo { + var ids []int64 + for _, task := range cloudbrains { + ids = append(ids, task.RepoID) + } + repositoryMap, err := GetRepositoriesMapByIDs(ids) + if err == nil { + for _, task := range cloudbrains { + task.Repo = repositoryMap[task.RepoID] + } + } + + } + return cloudbrains, count, nil +} + func GetStartedCloudbrainTaskByUpdatedUnix(startTime, endTime time.Time) ([]Cloudbrain, error) { r := make([]Cloudbrain, 0) err := x.Where("updated_unix >= ? and updated_unix <= ? and start_time > 0", startTime.Unix(), endTime.Unix()).Unscoped().Find(&r) diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go index a7678b267..a213f179c 100644 --- a/models/cloudbrain_static.go +++ b/models/cloudbrain_static.go @@ -46,7 +46,8 @@ type CloudbrainDurationStatistic struct { ComputeResource string AccCardType string `xorm:"INDEX"` - DateTime timeutil.TimeStamp `xorm:"INDEX"` + DateTime timeutil.TimeStamp `xorm:"INDEX DEFAULT 0"` + DateTimeUnix timeutil.TimeStamp `xorm:"INDEX DEFAULT 0"` DayTime string `xorm:"INDEX"` HourTime int `xorm:"INDEX"` CardsUseDuration int @@ -274,7 +275,6 @@ func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, err sess := x.NewSession() defer sess.Close() var cond = builder.NewCond() - sess.Exec("if ") cond = cond.Or( builder.And(builder.Gte{"cloudbrain.end_time": beginTime}, builder.Lte{"cloudbrain.start_time": beginTime}, builder.Gt{"cloudbrain.start_time": 0}), ) @@ -284,11 +284,12 @@ func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, err cond = cond.Or( builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}), ) - sess.OrderBy("cloudbrain.created_unix ASC") + sess.OrderBy("cloudbrain.id ASC") cloudbrains := make([]*CloudbrainInfo, 0, 10) if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). Find(&cloudbrains); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return cloudbrains, nil } @@ -303,7 +304,8 @@ func GetSpecByAiCenterCodeAndType(aiCenterCode string, accCardType string) ([]*C cloudbrainSpecs := make([]*CloudbrainSpec, 0, 10) if err := sess.Table(&CloudbrainSpec{}).Where(cond). Find(&cloudbrainSpecs); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return cloudbrainSpecs, nil } @@ -328,7 +330,8 @@ func GetCanUseCardInfo() ([]*ResourceQueue, error) { sess.OrderBy("resource_queue.cluster DESC, resource_queue.ai_center_code ASC") ResourceQueues := make([]*ResourceQueue, 0, 10) if err := sess.Table(&ResourceQueue{}).Find(&ResourceQueues); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return ResourceQueues, nil } @@ -339,7 +342,7 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur var cond = builder.NewCond() if opts.BeginTime.Unix() > 0 && opts.EndTime.Unix() > 0 { cond = cond.And( - builder.And(builder.Gte{"cloudbrain_duration_statistic.date_time": opts.BeginTime.Unix()}, builder.Lt{"cloudbrain_duration_statistic.date_time": opts.EndTime.Unix()}), + builder.And(builder.Gte{"cloudbrain_duration_statistic.date_time_unix": opts.BeginTime.Unix()}, builder.Lt{"cloudbrain_duration_statistic.date_time_unix": opts.EndTime.Unix()}), ) } if opts.AiCenterCode != "" { @@ -350,7 +353,8 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0, 10) if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond). Find(&CloudbrainDurationStatistics); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return CloudbrainDurationStatistics, nil } @@ -358,10 +362,18 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur func GetDurationRecordBeginTime() ([]*CloudbrainDurationStatistic, error) { sess := xStatistic.NewSession() defer sess.Close() - sess.OrderBy("cloudbrain_duration_statistic.date_time ASC limit 1") + + var cond = builder.NewCond() + + cond = cond.And( + builder.Gt{"cloudbrain_duration_statistic.date_time_unix": 0}, + ) + + sess.OrderBy("cloudbrain_duration_statistic.date_time_unix ASC limit 1") CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0) - if err := sess.Table(&CloudbrainDurationStatistic{}).Find(&CloudbrainDurationStatistics); err != nil { - log.Info("find error.") + if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond).Find(&CloudbrainDurationStatistics); err != nil { + log.Error("find error.") + return nil, err } return CloudbrainDurationStatistics, nil } @@ -369,10 +381,16 @@ func GetDurationRecordBeginTime() ([]*CloudbrainDurationStatistic, error) { func GetDurationRecordUpdateTime() ([]*CloudbrainDurationStatistic, error) { sess := xStatistic.NewSession() defer sess.Close() - sess.OrderBy("cloudbrain_duration_statistic.date_time DESC limit 1") + var cond = builder.NewCond() + + cond = cond.And( + builder.Gt{"cloudbrain_duration_statistic.date_time_unix": 1577808000}, + ) + sess.OrderBy("cloudbrain_duration_statistic.date_time_unix DESC limit 1") CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0) - if err := sess.Table(&CloudbrainDurationStatistic{}).Find(&CloudbrainDurationStatistics); err != nil { - log.Info("find error.") + if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond).Find(&CloudbrainDurationStatistics); err != nil { + log.Error("find error.") + return nil, err } return CloudbrainDurationStatistics, nil } @@ -380,8 +398,8 @@ func GetDurationRecordUpdateTime() ([]*CloudbrainDurationStatistic, error) { func DeleteCloudbrainDurationStatistic(beginTime timeutil.TimeStamp, endTime timeutil.TimeStamp) error { sess := xStatistic.NewSession() defer sess.Close() - if _, err := sess.Exec("DELETE FROM cloudbrain_duration_statistic WHERE cloudbrain_duration_statistic.date_time BETWEEN ? AND ?", beginTime, endTime); err != nil { - log.Info("DELETE cloudbrain_duration_statistic data error.") + if _, err := sess.Exec("DELETE FROM cloudbrain_duration_statistic WHERE cloudbrain_duration_statistic.date_time_unix BETWEEN ? AND ?", beginTime, endTime); err != nil { + log.Error("DELETE cloudbrain_duration_statistic data error.") return err } return nil diff --git a/models/dataset.go b/models/dataset.go index 972503641..4c1dc24db 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -122,22 +122,22 @@ func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) for i := range datasets { if attachment.DatasetID == datasets[i].ID { - if !attachment.IsPrivate{ + if !attachment.IsPrivate { datasets[i].Attachments = append(datasets[i].Attachments, attachment) - }else{ + } else { permission, ok := permissionMap[datasets[i].ID] if !ok { permission = false datasets[i].Repo.GetOwner() if !permission { - if datasets[i].Repo.OwnerID==opts.User.ID{ + if datasets[i].Repo.OwnerID == opts.User.ID { permission = true - }else{ + } else { isCollaborator, _ := datasets[i].Repo.IsCollaborator(opts.User.ID) - isInRepoTeam,_:=datasets[i].Repo.IsInRepoTeam(opts.User.ID) + isInRepoTeam, _ := datasets[i].Repo.IsInRepoTeam(opts.User.ID) - if isCollaborator ||isInRepoTeam { + if isCollaborator || isInRepoTeam { permission = true } } @@ -603,3 +603,11 @@ func UpdateDatasetCreateUser(ID int64, user *User) error { } return nil } + +func QueryDatasetGroupByTask() ([]map[string]interface{}, error) { + rows, err := x.QueryInterface("SELECT count(*) as total,task FROM public.dataset where task <>'' group by task order by total desc limit 7") + if err != nil { + return nil, err + } + return rows, nil +} diff --git a/models/repo.go b/models/repo.go index b35b56a6f..393cda70a 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1329,7 +1329,7 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, opts ...Cr } if setting.Service.AutoWatchNewRepos { - if err = watchRepo(ctx.e, doer.ID, repo.ID, true); err != nil { + if err = watchRepo(ctx.e, doer.ID, repo.ID, true, ReceiveAllNotification); err != nil { return fmt.Errorf("watchRepo: %v", err) } } diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index b6fffca0e..26b2ea14f 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -263,7 +263,11 @@ func GetAllUserKPIStats(startTime time.Time, endTime time.Time) (map[string]*git log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error()) continue } - + // if repository.Name == "yolov5" { + // log.Info("repoName=" + repository.Name + " owner=" + repository.RepoPath()) + // authorsOneRepoJson, _ := json.Marshal(authorsOneRepo) + // log.Info("authorsOneRepoJson=" + string(authorsOneRepoJson)) + // } for key, value := range authorsOneRepo { if _, ok := authors[key]; !ok { authors[key] = &git.UserKPIStats{ diff --git a/models/repo_watch.go b/models/repo_watch.go index 573a2d78a..7c43ee352 100644 --- a/models/repo_watch.go +++ b/models/repo_watch.go @@ -24,6 +24,14 @@ const ( RepoWatchModeAuto // 3 ) +// NotifyType specifies what kind of watch the user has on a repository +type NotifyType int8 + +const ( + RejectAllNotification NotifyType = 0 + ReceiveAllNotification NotifyType = 9 +) + var ActionChan = make(chan *Action, 200) var ActionChan4Task = make(chan Action, 200) @@ -34,6 +42,7 @@ type Watch struct { RepoID int64 `xorm:"UNIQUE(watch)"` Mode RepoWatchMode `xorm:"SMALLINT NOT NULL DEFAULT 1"` CreatedUnix int64 `xorm:"created"` + NotifyType NotifyType `xorm:"SMALLINT NOT NULL DEFAULT 0"` } // getWatch gets what kind of subscription a user has on a given repository; returns dummy record if none found @@ -60,8 +69,20 @@ func IsWatching(userID, repoID int64) bool { return err == nil && isWatchMode(watch.Mode) } +// GetWatchNotifyType +func GetWatchNotifyType(userID, repoID int64) NotifyType { + watch, err := getWatch(x, userID, repoID) + if err != nil { + return RejectAllNotification + } + return watch.NotifyType +} + func watchRepoMode(e Engine, watch Watch, mode RepoWatchMode) (err error) { if watch.Mode == mode { + if _, err := e.ID(watch.ID).Cols("notify_type").Update(watch); err != nil { + return err + } return nil } if mode == RepoWatchModeAuto && (watch.Mode == RepoWatchModeDont || isWatchMode(watch.Mode)) { @@ -109,7 +130,7 @@ func WatchRepoMode(userID, repoID int64, mode RepoWatchMode) (err error) { return watchRepoMode(x, watch, mode) } -func watchRepo(e Engine, userID, repoID int64, doWatch bool) (err error) { +func watchRepo(e Engine, userID, repoID int64, doWatch bool, notifyTypes ...NotifyType) (err error) { var watch Watch if watch, err = getWatch(e, userID, repoID); err != nil { return err @@ -119,14 +140,19 @@ func watchRepo(e Engine, userID, repoID int64, doWatch bool) (err error) { } else if !doWatch { err = watchRepoMode(e, watch, RepoWatchModeNone) } else { + notifyType := RejectAllNotification + if len(notifyTypes) > 0 { + notifyType = notifyTypes[0] + } + watch.NotifyType = notifyType err = watchRepoMode(e, watch, RepoWatchModeNormal) } return err } // WatchRepo watch or unwatch repository. -func WatchRepo(userID, repoID int64, watch bool) (err error) { - return watchRepo(x, userID, repoID, watch) +func WatchRepo(userID, repoID int64, watch bool, notifyType ...NotifyType) (err error) { + return watchRepo(x, userID, repoID, watch, notifyType...) } func getWatchers(e Engine, repoID int64) ([]*Watch, error) { @@ -156,6 +182,7 @@ func getRepoWatchersIDs(e Engine, repoID int64) ([]int64, error) { return ids, e.Table("watch"). Where("watch.repo_id=?", repoID). And("watch.mode<>?", RepoWatchModeDont). + And("watch.notify_type > ?", RejectAllNotification). Select("user_id"). Find(&ids) } diff --git a/models/resource_specification.go b/models/resource_specification.go index 7a11edd05..2f815818b 100644 --- a/models/resource_specification.go +++ b/models/resource_specification.go @@ -298,6 +298,15 @@ func ResourceSpecOffShelf(id int64) (int64, error) { return n, err } +func GetResourceSpecificationByIds(ids []int64) ([]*Specification, error) { + r := make([]*Specification, 0) + err := x.In("resource_specification.id", ids). + Join("INNER", "resource_queue", "resource_queue.id = resource_specification.queue_id"). + Find(&r) + return r, err + +} + func GetResourceSpecification(r *ResourceSpecification) (*ResourceSpecification, error) { has, err := x.Get(r) if err != nil { diff --git a/models/user.go b/models/user.go index e4eed1f9a..dad252d92 100755 --- a/models/user.go +++ b/models/user.go @@ -380,7 +380,7 @@ func (u *User) DashboardLink() string { if u.IsOrganization() { return setting.AppSubURL + "/org/" + u.Name + "/dashboard/" } - return setting.AppSubURL + "/" + return setting.AppSubURL + "/dashboard" } // HomeLink returns the user or organization home page link. diff --git a/models/user_mail.go b/models/user_mail.go index 8bf74b81b..8388da068 100755 --- a/models/user_mail.go +++ b/models/user_mail.go @@ -216,6 +216,27 @@ func (email *EmailAddress) updateActivation(e Engine, activate bool) error { return updateUserCols(e, user, "rands") } +// UpdateEmailAddress update an email address of given user. +func (email *EmailAddress) UpdateEmailAddress(newEmailAddress string) error { + return email.updateEmailAddress(x, newEmailAddress) +} +func (email *EmailAddress) updateEmailAddress(e Engine, newEmailAddress string) error { + user, err := getUserByID(e, email.UID) + if err != nil { + return err + } + if user.Rands, err = GetUserSalt(); err != nil { + return err + } + user.Email = newEmailAddress + user.AvatarEmail = newEmailAddress + email.Email = newEmailAddress + if _, err := e.ID(email.ID).Cols("email").Update(email); err != nil { + return err + } + return updateUserCols(e, user, "email", "avatar_email") +} + // DeleteEmailAddress deletes an email address of given user. func DeleteEmailAddress(email *EmailAddress) (err error) { var deleted int64 diff --git a/modules/auth/user_form.go b/modules/auth/user_form.go index ad78607ab..521585264 100755 --- a/modules/auth/user_form.go +++ b/modules/auth/user_form.go @@ -88,6 +88,10 @@ type RegisterForm struct { Agree bool } +type UpdateEmailForm struct { + NewEmail string `binding:"Required;MaxSize(254)"` +} + // Validate valideates the fields func (f *RegisterForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { return validate(errs, ctx.Data, f, ctx.Locale) diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 8d4e57670..6111cf460 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -145,7 +145,7 @@ func isAdminOrImageCreater(ctx *context.Context, image *models.Image, err error) func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) { var id = ctx.Params(":id") - job, err := models.GetCloudbrainByID(id) + job, err := GetCloudBrainByIdOrJobId(id) if err != nil { log.Error("GetCloudbrainByID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -161,7 +161,7 @@ func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) { func AdminOrJobCreaterRight(ctx *context.Context) { var id = ctx.Params(":id") - job, err := models.GetCloudbrainByID(id) + job, err := GetCloudBrainByIdOrJobId(id) if err != nil { log.Error("GetCloudbrainByID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -177,7 +177,7 @@ func AdminOrJobCreaterRight(ctx *context.Context) { func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) { var jobID = ctx.Params(":jobid") - job, err := models.GetCloudbrainByJobID(jobID) + job, err := GetCloudBrainByIdOrJobId(jobID) if err != nil { log.Error("GetCloudbrainByJobID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -193,7 +193,7 @@ func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) { func AdminOrJobCreaterRightForTrain(ctx *context.Context) { var jobID = ctx.Params(":jobid") - job, err := models.GetCloudbrainByJobID(jobID) + job, err := GetCloudBrainByIdOrJobId(jobID) if err != nil { log.Error("GetCloudbrainByJobID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -652,3 +652,19 @@ func IsElementExist(s []string, str string) bool { } return false } + +func GetCloudBrainByIdOrJobId(id string) (*models.Cloudbrain,error) { + _, err := strconv.ParseInt(id, 10, 64) + var job *models.Cloudbrain + if err != nil { + + job, err = models.GetCloudbrainByJobID(id) + } else { + job, err = models.GetCloudbrainByID(id) + if err!=nil{ + job, err = models.GetCloudbrainByJobID(id) + } + + } + return job,err +} diff --git a/modules/context/repo.go b/modules/context/repo.go index 7c425c8c0..3bdc34f0d 100755 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -474,6 +474,7 @@ func RepoAssignment() macaron.Handler { if ctx.IsSigned { ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID) + ctx.Data["WatchNotifyType"] = models.GetWatchNotifyType(ctx.User.ID, repo.ID) ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID) ctx.Data["IsStaringDataset"] = models.IsDatasetStaringByRepoId(ctx.User.ID, repo.ID) diff --git a/modules/grampus/grampus.go b/modules/grampus/grampus.go index b6f62560a..280407240 100755 --- a/modules/grampus/grampus.go +++ b/modules/grampus/grampus.go @@ -37,7 +37,7 @@ var ( SpecialPools *models.SpecialPools - CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" + + CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://openi.pcl.ac.cn/OpenIOSSG/%s/archive/master.zip;" + "echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;" ) diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go index dd502dfd0..b59be307b 100755 --- a/modules/modelarts/modelarts.go +++ b/modules/modelarts/modelarts.go @@ -1,13 +1,18 @@ package modelarts import ( + "encoding/base64" "encoding/json" "errors" "fmt" + "io/ioutil" + "net/http" "path" "strconv" "strings" + "code.gitea.io/gitea/modules/cloudbrain" + "code.gitea.io/gitea/modules/modelarts_cd" "code.gitea.io/gitea/models" @@ -23,7 +28,7 @@ const ( //notebook storageTypeOBS = "obs" autoStopDuration = 4 * 60 * 60 - autoStopDurationMs = 4 * 60 * 60 * 1000 + AutoStopDurationMs = 4 * 60 * 60 * 1000 MORDELART_USER_IMAGE_ENGINE_ID = -1 DataSetMountPath = "/home/ma-user/work" NotebookEnv = "Python3" @@ -276,7 +281,7 @@ func GenerateTask(ctx *context.Context, jobName, uuid, description, flavor strin return nil } -func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification) error { +func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) { if poolInfos == nil { json.Unmarshal([]byte(setting.PoolInfos), &poolInfos) } @@ -284,14 +289,14 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc imageName, err := GetNotebookImageName(imageId) if err != nil { log.Error("GetNotebookImageName failed: %v", err.Error()) - return err + return "", err } createTime := timeutil.TimeStampNow() jobResult, err := createNotebook2(models.CreateNotebook2Params{ JobName: jobName, Description: description, Flavor: spec.SourceSpecId, - Duration: autoStopDurationMs, + Duration: autoStopDurationInMs, ImageID: imageId, PoolID: poolInfos.PoolInfo[0].PoolId, Feature: models.NotebookFeature, @@ -316,10 +321,10 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc }) if errTemp != nil { log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error()) - return errTemp + return "", errTemp } } - return err + return "", err } task := &models.Cloudbrain{ Status: jobResult.Status, @@ -334,6 +339,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc Uuid: uuid, ComputeResource: models.NPUResource, Image: imageName, + BootFile: bootFile, Description: description, CreatedUnix: createTime, UpdatedUnix: createTime, @@ -342,12 +348,12 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc err = models.CreateCloudbrain(task) if err != nil { - return err + return "", err } stringId := strconv.FormatInt(task.ID, 10) notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask) - return nil + return jobResult.ID, nil } func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) { @@ -907,6 +913,11 @@ func HandleNotebookInfo(task *models.Cloudbrain) error { if task.FlavorCode == "" { task.FlavorCode = result.Flavor } + + if oldStatus != task.Status && task.Status == string(models.ModelArtsRunning) && task.BootFile != "" { + uploadNoteBookFile(task, result) + + } err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) @@ -917,6 +928,81 @@ func HandleNotebookInfo(task *models.Cloudbrain) error { return nil } +func uploadNoteBookFile(task *models.Cloudbrain, result *models.GetNotebook2Result) { + jupyterUrl := result.Url + "?token=" + result.Token + + cookies, xsrf := getCookiesAndCsrf(jupyterUrl) + if xsrf == "" { + log.Error("browser jupyterUrl failed:%v", task.DisplayJobName) + } else { + + codePath := setting.JobPath + task.JobName + cloudbrain.CodeMountPath + fileContents, err := ioutil.ReadFile(codePath + "/" + task.BootFile) + if err != nil { + log.Error("read jupyter file failed:%v", task.DisplayJobName, err) + } + + base64Content := base64.StdEncoding.EncodeToString(fileContents) + client := getRestyClient() + uploadUrl := getJupyterBaseUrl(result.Url) + "api/contents/" + path.Base(task.BootFile) + res, err := client.R(). + SetCookies(cookies). + SetHeader("X-XSRFToken", xsrf). + SetBody(map[string]interface{}{ + "type": "file", + "format": "base64", + "name": path.Base(task.BootFile), + "path": path.Base(task.BootFile), + "content": base64Content}). + Put(uploadUrl) + if err != nil { + log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) + } else if res.StatusCode() != http.StatusCreated { + log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) + } + + } + +} + +func getJupyterBaseUrl(url string) string { + jupyterUrlLength := len(url) + baseUrl := url[0 : jupyterUrlLength-len(path.Base(url))] + return baseUrl +} + +func getCookiesAndCsrf(jupyterUrl string) ([]*http.Cookie, string) { + log.Info("jupyter url:"+jupyterUrl) + var cookies []*http.Cookie + const retryTimes = 10 + for i := 0; i < retryTimes; i++ { + res, err := http.Get(jupyterUrl) + if err != nil { + log.Error("browser jupyterUrl failed.",err) + if i==retryTimes-1{ + return cookies, "" + } + + } else { + cookies = res.Cookies() + xsrf := "" + for _, cookie := range cookies { + if cookie.Name == "_xsrf" { + xsrf = cookie.Value + break + } + + } + if xsrf != "" { + return cookies, xsrf + } + + } + } + return cookies, "" + +} + func SyncTempStatusJob() { jobs, err := models.GetCloudBrainTempJobs() if err != nil { diff --git a/modules/modelarts/resty.go b/modules/modelarts/resty.go index c38300606..3ccba9011 100755 --- a/modules/modelarts/resty.go +++ b/modules/modelarts/resty.go @@ -280,7 +280,7 @@ sendjob: SetHeader("Content-Type", "application/json"). SetAuthToken(TOKEN). SetResult(&result). - Post(HOST + "/v1/" + setting.ProjectID + urlNotebook2 + "/" + jobID + "/" + param.Action + "?duration=" + strconv.Itoa(autoStopDurationMs)) + Post(HOST + "/v1/" + setting.ProjectID + urlNotebook2 + "/" + jobID + "/" + param.Action + "?duration=" + strconv.Itoa(AutoStopDurationMs)) if err != nil { return &result, fmt.Errorf("resty ManageNotebook2: %v", err) diff --git a/modules/modelarts_cd/modelarts.go b/modules/modelarts_cd/modelarts.go index 330b048ca..93032fa89 100755 --- a/modules/modelarts_cd/modelarts.go +++ b/modules/modelarts_cd/modelarts.go @@ -88,18 +88,18 @@ type Parameters struct { } `json:"parameter"` } -func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification) error { +func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) { imageName, err := GetNotebookImageName(imageId) if err != nil { log.Error("GetNotebookImageName failed: %v", err.Error()) - return err + return "", err } createTime := timeutil.TimeStampNow() jobResult, err := createNotebook(models.CreateNotebookWithoutPoolParams{ JobName: jobName, Description: description, Flavor: spec.SourceSpecId, - Duration: autoStopDurationMs, + Duration: autoStopDurationInMs, ImageID: imageId, Feature: models.NotebookFeature, Volume: models.VolumeReq{ @@ -123,10 +123,10 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr }) if errTemp != nil { log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error()) - return errTemp + return "", errTemp } } - return err + return "", err } task := &models.Cloudbrain{ Status: jobResult.Status, @@ -145,16 +145,17 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr CreatedUnix: createTime, UpdatedUnix: createTime, Spec: spec, + BootFile: bootFile, } err = models.CreateCloudbrain(task) if err != nil { - return err + return "", err } stringId := strconv.FormatInt(task.ID, 10) notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask) - return nil + return jobResult.ID, nil } func GetNotebookImageName(imageId string) (string, error) { @@ -175,41 +176,3 @@ func GetNotebookImageName(imageId string) (string, error) { return imageName, nil } - -/* -func HandleNotebookInfo(task *models.Cloudbrain) error { - - result, err := GetNotebook(task.JobID) - if err != nil { - log.Error("GetNotebook2(%s) failed:%v", task.DisplayJobName, err) - return err - } - - if result != nil { - oldStatus := task.Status - task.Status = result.Status - if task.StartTime == 0 && result.Lease.UpdateTime > 0 { - task.StartTime = timeutil.TimeStamp(result.Lease.UpdateTime / 1000) - } - if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) { - task.EndTime = timeutil.TimeStampNow() - } - task.CorrectCreateUnix() - task.ComputeAndSetDuration() - if oldStatus != task.Status { - notification.NotifyChangeCloudbrainStatus(task, oldStatus) - } - if task.FlavorCode == "" { - task.FlavorCode = result.Flavor - } - err = models.UpdateJob(task) - if err != nil { - log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) - return err - } - } - - return nil -} - -*/ diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index 82d02b3f1..91fb418ad 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -36,7 +36,7 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []str fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } sizeLimitTpls = []string{ - fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nnew_branch_flag=0\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\n new_branch_flag=1\n echo \"You are creating a new remote branch,openI will check all files in commit history to find oversize files\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)' | \\\n awk -F ' ' -v maxbytes=\"$maxsize\" 'BEGIN {totalIn=0} {if( $3 > maxbytes && $2 == \"blob\") { totalIn+=$3; print $4} else { totalIn+=$3}} END { printf (\"totalIn=\\t%%s\",totalIn)}' )\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\nIFS=$'\\n'\n# rewrite IFS to seperate line in $files\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n \n if [[ ${file} == totalIn=* ]]; then\n\tIFS=$'\\t'\n\ttemp_array=(${file})\n\tpush_size=${temp_array[1]}\n\tcontinue\n fi\n\tunset IFS\n if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"help document -- https://git.openi.org.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\t\n fi\n echo -e \"\\033[31m- ${file}\\033[0m \"\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n# if create new branch or tag,use count-objects -v to get pack size\nif [[ $new_branch_flag -eq 1 ]]; then\n size_kb=`git count-objects -v | grep 'size-pack' | sed 's/.*\\(size-pack:\\).//'`\n size_pack_kb=`git count-objects -v | grep 'size:' | sed 's/.*\\(size:\\).//'`\n\ttotal_kb=`expr $size_kb + $size_pack_kb`\n\tlet push_size=$total_kb*1024\nfi\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n echo \"see the help document--https://git.openi.org.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS", setting.ScriptType), + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nnew_branch_flag=0\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\n new_branch_flag=1\n echo \"You are creating a new remote branch,openI will check all files in commit history to find oversize files\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)' | \\\n awk -F ' ' -v maxbytes=\"$maxsize\" 'BEGIN {totalIn=0} {if( $3 > maxbytes && $2 == \"blob\") { totalIn+=$3; print $4} else { totalIn+=$3}} END { printf (\"totalIn=\\t%%s\",totalIn)}' )\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\nIFS=$'\\n'\n# rewrite IFS to seperate line in $files\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n \n if [[ ${file} == totalIn=* ]]; then\n\tIFS=$'\\t'\n\ttemp_array=(${file})\n\tpush_size=${temp_array[1]}\n\tcontinue\n fi\n\tunset IFS\n if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"help document -- https://openi.pcl.ac.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\t\n fi\n echo -e \"\\033[31m- ${file}\\033[0m \"\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n# if create new branch or tag,use count-objects -v to get pack size\nif [[ $new_branch_flag -eq 1 ]]; then\n size_kb=`git count-objects -v | grep 'size-pack' | sed 's/.*\\(size-pack:\\).//'`\n size_pack_kb=`git count-objects -v | grep 'size:' | sed 's/.*\\(size:\\).//'`\n\ttotal_kb=`expr $size_kb + $size_pack_kb`\n\tlet push_size=$total_kb*1024\nfi\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n echo \"see the help document--https://openi.pcl.ac.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS", setting.ScriptType), fmt.Sprintf(""), fmt.Sprintf(""), } diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 0d623eb3b..cf5a4d513 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -584,6 +584,8 @@ var ( TrainJobFLAVORINFOS string ModelArtsSpecialPools string ModelArtsMultiNode string + //kanban + IsCloudbrainTimingEnabled bool // modelarts-cd config ModelartsCD = struct { @@ -719,6 +721,21 @@ var ( TeamName string }{} + FileNoteBook = struct { + ProjectName string + ImageGPU string + SpecIdGPU int64 + SpecIdCPU int64 + ImageIdNPU string + SpecIdNPU int64 + ImageIdNPUCD string + SpecIdNPUCD int64 + ImageCPUDescription string + ImageGPUDescription string + ImageNPUDescription string + ImageNPUCDDescription string + }{} + ModelConvert = struct { GPU_PYTORCH_IMAGE string GpuQueue string @@ -1433,7 +1450,7 @@ func NewContext() { DecompressOBSTaskName = sec.Key("DecompressOBSTaskName").MustString("LabelDecompressOBSQueue") sec = Cfg.Section("homepage") - RecommentRepoAddr = sec.Key("Address").MustString("https://git.openi.org.cn/OpenIOSSG/promote/raw/branch/master/") + RecommentRepoAddr = sec.Key("Address").MustString("https://openi.pcl.ac.cn/OpenIOSSG/promote/raw/branch/master/") ESSearchURL = sec.Key("ESSearchURL").MustString("http://192.168.207.94:9200") INDEXPOSTFIX = sec.Key("INDEXPOSTFIX").MustString("") @@ -1588,6 +1605,23 @@ func NewContext() { Course.OrgName = sec.Key("org_name").MustString("") Course.TeamName = sec.Key("team_name").MustString("") + sec = Cfg.Section("file_notebook") + FileNoteBook.ProjectName = sec.Key("project_name").MustString("openi-notebook") + FileNoteBook.ImageIdNPU = sec.Key("imageid_npu").MustString("") + FileNoteBook.ImageGPU = sec.Key("image_gpu").MustString("") + FileNoteBook.SpecIdCPU = sec.Key("specid_cpu").MustInt64(-1) + FileNoteBook.SpecIdGPU = sec.Key("specid_gpu").MustInt64(-1) + FileNoteBook.SpecIdNPU = sec.Key("specid_npu").MustInt64(-1) + FileNoteBook.ImageIdNPUCD = sec.Key("imageid_npu_cd").MustString("") + FileNoteBook.SpecIdNPUCD = sec.Key("specid_npu_cd").MustInt64(-1) + FileNoteBook.ImageCPUDescription = sec.Key("image_cpu_desc").MustString("") + FileNoteBook.ImageGPUDescription = sec.Key("image_gpu_desc").MustString("") + FileNoteBook.ImageNPUDescription = sec.Key("image_npu_desc").MustString("") + FileNoteBook.ImageNPUCDDescription = sec.Key("image_npu_cd_desc").MustString("") + + sec = Cfg.Section("kanban") + IsCloudbrainTimingEnabled = sec.Key("ENABLED").MustBool(false) + getGrampusConfig() getModelartsCDConfig() getModelConvertConfig() @@ -1620,7 +1654,7 @@ func getModelConvertConfig() { ModelConvert.MindsporeBootFile = sec.Key("MindsporeBootFile").MustString("convert_mindspore.py") ModelConvert.TensorFlowNpuBootFile = sec.Key("TensorFlowNpuBootFile").MustString("convert_tensorflow.py") ModelConvert.TensorFlowGpuBootFile = sec.Key("TensorFlowGpuBootFile").MustString("convert_tensorflow_gpu.py") - ModelConvert.ConvertRepoPath = sec.Key("ConvertRepoPath").MustString("https://git.openi.org.cn/zouap/npu_test") + ModelConvert.ConvertRepoPath = sec.Key("ConvertRepoPath").MustString("https://openi.pcl.ac.cn/zouap/npu_test") ModelConvert.GPU_Resource_Specs_ID = sec.Key("GPU_Resource_Specs_ID").MustInt(1) ModelConvert.NPU_FlavorCode = sec.Key("NPU_FlavorCode").MustString("modelarts.bm.910.arm.public.1") ModelConvert.NPU_PoolID = sec.Key("NPU_PoolID").MustString("pool7908321a") diff --git a/modules/structs/cloudbrain.go b/modules/structs/cloudbrain.go index 866c85dad..9ea5601c9 100644 --- a/modules/structs/cloudbrain.go +++ b/modules/structs/cloudbrain.go @@ -41,6 +41,14 @@ type CreateTrainJobOption struct { SpecId int64 `json:"spec_id" binding:"Required"` } +type CreateFileNotebookJobOption struct { + Type int `json:"type"` //0 CPU 1 GPU 2 NPU + File string `json:"file" binding:"Required"` + BranchName string `json:"branch_name" binding:"Required"` + OwnerName string `json:"owner_name" binding:"Required"` + ProjectName string `json:"project_name" binding:"Required"` +} + type Cloudbrain struct { ID int64 `json:"id"` JobID string `json:"job_id"` diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 3e424454b..c314127f1 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -151,6 +151,9 @@ func NewFuncMap() []template.FuncMap { "EscapePound": func(str string) string { return strings.NewReplacer("%", "%25", "#", "%23", " ", "%20", "?", "%3F").Replace(str) }, + "IpynbBool":func(str string) bool{ + return strings.Contains(str, ".ipynb") + }, "nl2br": func(text string) template.HTML { return template.HTML(strings.Replace(template.HTMLEscapeString(text), "\n", "
", -1)) }, diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 9a16ae0ff..647bdb1ad 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -24,6 +24,7 @@ enable_javascript = This website works better with JavaScript. toc = Table of Contents return=Back OpenI calculation_points = Calculation Points +notice_announcement = Notice Announcement username = Username email = Email Address @@ -236,7 +237,7 @@ page_title=Explore Better AI page_small_title=OpenI AI Development Cooperation Platform page_description=The one-stop collaborative development environment for AI field provides AI development pipeline integrating code development, data management, model debugging, reasoning and evaluation page_use=Use Now -page_only_dynamic=Only show the dynamics of open source projects +page_only_dynamic=The dynamics of open source projects page_recommend_org=Recommended Organizations page_recommend_org_desc=These excellent organizations are using the OpenI AI Collaboration Platform for collaborative development of projects. To show your organization here, page_recommend_org_commit=Click here to submit. @@ -260,7 +261,7 @@ page_dev_env_desc3_title=Once Configuration, Multiple Reuse page_dev_env_desc3_desc=Provide execution environment sharing, Once Configuration, Multiple Reuse. Lower the threshold of model development, and avoid spending repetitive time configuring complex environments. page_dev_yunlao=OpenI AI Collaboration Platform page_dev_yunlao_desc1=OpenI AI collaboration platform has cooperated with Pengcheng cloud brain and China computing power network (C²NET) can be used to complete AI development tasks by using the rich computing resources of Pengcheng cloud brain and China computing network. -page_dev_yunlao_desc2=Pengcheng CloudBrain's existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure consists of GPU servers equipped with NVIDIA Tesla V100 and A100, and Atlas 900 AI clusters equipped with Kunpeng and shengteng processors. +page_dev_yunlao_desc2=Pengcheng CloudBrain is existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure consists of GPU servers equipped with NVIDIA Tesla V100 and A100, and Atlas 900 AI clusters equipped with Kunpeng and shengteng processors. page_dev_yunlao_desc3=China computing power network (C²NET) phase I can realize high-speed network interconnection between different artificial intelligence computing centers, and realize reasonable scheduling of computing power and flexible allocation of resources. At present, 11 intelligent computing centers have been connected, and the total scale of computing power is 1924p OPS@FP16. OpenI AI collaboration platform has been connected to Pengcheng Cloud Computing Institute, Chengdu Intelligent Computing Center, Zhongyuan Intelligent Computing Center, Hefei brain and other nodes. page_dev_yunlao_desc4=Developers can freely select the corresponding computing resources according to the use needs, and can test the adaptability, performance, stability, etc. of the model in different hardware environments. page_dev_yunlao_desc5=If your model requires more computing resources, you can also apply for it separately. @@ -283,6 +284,7 @@ search_ge= wecome_AI_plt = Welcome to OpenI AI Collaboration Platform! explore_AI = Explore better AI, come here to find more interesting datasets = Datasets +datasets_descr = Open source dataset base, seamlessly integrated with your project. View all repositories = Repositories use_plt__fuction = To use the AI collaboration functions provided by this platform, such as: hosting code, sharing data, debugging algorithms or training models, start with provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for various types of AI tasks. @@ -290,6 +292,12 @@ activity = Activity no_events = There are no events related or_t = or powerdby=Powered_by Pengcheng CloudBrain、China Computing NET(C²NET)、 +experience_officer=Experience Officer +openi_experience_officer_plan=OpenI AI experience officer growth plan +more_benefits=, More benefits +org_see=See +more_notice=More notices +vedio_detail=Video details [explore] repos = Repositories @@ -345,8 +353,10 @@ account_activated = Account has been activated prohibit_login = Sign In Prohibited prohibit_login_desc = Your account is prohibited to sign in, please contact your site administrator. resent_limit_prompt = You have already requested an activation email recently. Please wait 3 minutes and try again. -has_unconfirmed_mail = Hi %s, you have an unconfirmed email address (%s). If you haven't received a confirmation email or need to resend a new one, please click on the button below. -resend_mail = Click here to resend your activation email +has_unconfirmed_mail = Hi %s, you have an unconfirmed email address (%s). +has_unconfirmed_mail_resend = If you did not receive the activation email, or need to resend it, please click the "Resend your activation email" button below. +has_unconfirmed_mail_change =If you need to change your email address before sending an activation email, please click the "Change email" button below. +resend_mail = Resend your activation email email_not_associate = The email address is not associated with any account. email_not_main=The email address is wrong, please input your primary email address. email_not_right=The email address is not associated with any account, please input the right email address. @@ -383,15 +393,19 @@ openid_register_desc = The chosen OpenID URI is unknown. Associate it with a new openid_signin_desc = Enter your OpenID URI. For example: https://anne.me, bob.openid.org.cn or gnusocial.net/carry. disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. email_domain_blacklisted = You cannot register with this kind of email address. +email_domain_blacklisted_change = This type of email address is not currently supported. authorize_application = Authorize Application authorize_redirect_notice = You will be redirected to %s if you authorize this application. authorize_application_created_by = This application was created by %s. authorize_application_description = If you grant the access, it will be able to access and write to all your account information, including private repos and organisations. authorize_title = Authorize "%s" to access your account? authorization_failed = Authorization failed -authorization_failed_desc = The authorization failed because we detected an invalid request. Please contact the maintainer of the app you've tried to authorize. +authorization_failed_desc = The authorization failed because we detected an invalid request. Please contact the maintainer of the app you have tried to authorize. disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. sspi_auth_failed = SSPI authentication failed +change_email = Change email +change_email_address = Change email address +new_email_address = New email address [phone] format_err=The format of phone number is wrong. query_err=Fail to query phone number, please try again later. @@ -1007,6 +1021,8 @@ readme = README readme_helper = Select a README file template. auto_init = Initialize Repository (Adds .gitignore, License and README) create_repo = Create Repository +failed_to_create_repo=Failed to create repository, please try again later. +failed_to_create_notebook_repo=Failed to create %s repository, please check whether you have the same name project, if yes please update or delete it first. create_course = Publish Course failed_to_create_course=Failed to publish course, please try again later. default_branch = Default Branch @@ -1041,6 +1057,10 @@ model_experience = Model Experience model_noright=You have no right to do the operation. model_rename=Duplicate model name, please modify model name. +notebook_file_not_exist=Notebook file does not exist. +notebook_select_wrong=Please select a Notebook(.ipynb) file first. +notebook_file_no_right=You have no right to access the Notebook(.ipynb) file. + date=Date repo_add=Project Increment repo_total=Project Total @@ -1217,10 +1237,10 @@ cloudbrain.benchmark.evaluate_child_type=Child Type cloudbrain.benchmark.evaluate_mirror=Mirror cloudbrain.benchmark.evaluate_train=Train Script cloudbrain.benchmark.evaluate_test=Test Script -cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} +cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} cloudbrain.morethanonejob=You already have a running or waiting task, create it after that task is over. cloudbrain.morethanonejob1=You have created an equivalent task that is waiting or running, please wait for the task to finish before creating it. -cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in Home > Cloudbrain Task . +cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in Home > Cloudbrain Task . modelarts.infer_job_model = Model modelarts.infer_job_model_file = Model File @@ -1243,7 +1263,7 @@ model_Evaluation_not_created = Model evaluation has not been created repo_not_initialized = Code version: You have not initialized the code repository, please initialized first ; debug_task_running_limit =Running time: no more than 4 hours, it will automatically stop if it exceeds 4 hours; dataset_desc = Dataset: Cloud Brain 1 provides CPU/GPU,Cloud Brain 2 provides Ascend NPU.And dataset also needs to be uploaded to the corresponding environment; -platform_instructions = Instructions for use: You can refer to the OpenI_Learning course of Qizhi AI collaboration platform. +platform_instructions = Instructions for use: You can refer to the OpenI_Learning course of Qizhi AI collaboration platform. platform_instructions1 = Instructions for use: You can refer to the platform_instructions2 = OpenI_Learning platform_instructions3 = course of Openi AI collaboration platform. @@ -1394,6 +1414,11 @@ star = Star fork = Fork download_archive = Download Repository star_fail=Failed to %s the dataset. +watched=Watched +notWatched=Not watched +un_watch=Unwatch +watch_all=Watch all +watch_no_notify=Watch but not notify no_desc = No Description no_label = No labels @@ -1435,6 +1460,7 @@ blame = Blame normal_view = Normal View line = line lines = lines +notebook_open = Open in Notebook editor.new_file = New File editor.upload_file = Upload File @@ -3188,6 +3214,9 @@ foot.copyright= Copyright: New Generation Artificial Intelligence Open Source Op Platform_Tutorial = Tutorial foot.advice_feedback = Feedback resource_description = Resource Note +foot.openi_subscription_number = OpenI subscription number +foot.user_communication_group = User communication group + [cloudbrain] all_resource_cluster=All Cluster all_ai_center=All Computing NET diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index ce179949a..8f9e6b664 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -24,6 +24,7 @@ enable_javascript=使用 JavaScript能使本网站更好的工作。 toc=目录 return=返回OpenI calculation_points=算力积分 +notice_announcement=通知公告 username=用户名 email=电子邮件地址 @@ -238,7 +239,7 @@ page_title=探索更好的AI page_small_title=启智AI协作平台 page_description=面向AI领域的一站式协同开发环境,提供集代码开发、数据管理、模型调试、推理和评测为一体的AI开发流水线 page_use=立即使用 -page_only_dynamic=仅展示开源项目动态 +page_only_dynamic=社区开源项目动态 page_recommend_org=推荐组织 page_recommend_org_desc=这些优秀的组织正在使用启智AI开发协作平台;你的组织也想展示到这里, page_recommend_org_commit=点此提交 @@ -285,6 +286,7 @@ search_ge=个 wecome_AI_plt=欢迎来到启智AI协作平台! explore_AI = 探索更好的AI,来这里发现更有意思的 datasets = 数据集 +datasets_descr=开源数据集大本营,同你的项目无缝集成。查看所有 repositories = 项目 use_plt__fuction = 使用本平台提供的AI协作功能,如:托管代码、共享数据、调试算法或训练模型,请先 provide_resoure = 平台目前提供CPU、GPU、NPU的普惠算力资源,可进行多种类型的AI任务。 @@ -293,7 +295,12 @@ activity = 活动 no_events = 还没有与您相关的活动 or_t = 或 powerdby=Powered_by 鹏城实验室云脑、中国算力网(C²NET)、 - +experience_officer=体验官 +openi_experience_officer_plan=启智社区体验官成长计划 +more_benefits=,超多福利大放送 +org_see=。查看 +more_notice=更多通知 +vedio_detail=详细介绍视频 [explore] repos=项目 @@ -349,8 +356,10 @@ account_activated=帐户已激活 prohibit_login=禁止登录 prohibit_login_desc=您的帐户被禁止登录,请与网站管理员联系。 resent_limit_prompt=您请求发送激活邮件过于频繁,请等待 3 分钟后再试! -has_unconfirmed_mail=%s 您好,系统检测到您有一封发送至 %s 但未被确认的邮件。如果您未收到激活邮件,或需要重新发送,请单击下方的按钮。 -resend_mail=单击此处重新发送确认邮件 +has_unconfirmed_mail=%s 您好,系统检测到您有一封发送至 %s 但未被确认的邮件。 +has_unconfirmed_mail_resend=如果您未收到激活邮件,或需要重新发送,请单击下方的 "重新发送确认邮件 " 按钮。 +has_unconfirmed_mail_change=如果您需要更改邮箱后再发送激活邮件,请单击下方的 "修改邮箱" 按钮。 +resend_mail=重新发送确认邮件 email_not_associate=您输入的邮箱地址未被关联到任何帐号! email_not_main=电子邮箱地址不正确,请输入您设置的主要邮箱地址。 email_not_right=您输入了不存在的邮箱地址,请输入正确的邮箱地址。 @@ -387,6 +396,7 @@ openid_register_desc=所选的 OpenID URI 未知。在这里关联一个新帐 openid_signin_desc=输入您的 OpenID URI。例如: https://anne.me、bob.openid.org.cn 或 gnusocial.net/carry。 disable_forgot_password_mail=帐户恢复功能已被禁用。请与网站管理员联系。 email_domain_blacklisted=暂不支持此类电子邮件地址注册。 +email_domain_blacklisted_change=暂不支持此类电子邮件地址。 authorize_application=应用授权 authorize_redirect_notice=如果您授权此应用,您将会被重定向到 %s。 authorize_application_created_by=此应用由%s创建。 @@ -396,6 +406,9 @@ authorization_failed=授权失败 authorization_failed_desc=授权失败,这是一个无效的请求。请联系尝试授权应用的管理员。 disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. sspi_auth_failed=SSPI 认证失败 +change_email=修改邮箱 +change_email_address=修改邮箱地址 +new_email_address=新邮箱地址 [phone] format_err=手机号格式错误。 query_err=查询手机号失败,请稍后再试。 @@ -1013,6 +1026,8 @@ readme=自述 readme_helper=选择自述文件模板。 auto_init=初始化存储库 (添加. gitignore、许可证和自述文件) create_repo=创建项目 +failed_to_create_repo=创建项目失败,请稍后再试。 +failed_to_create_notebook_repo=创建项目%s失败,请检查您是否有同名的项目,如果有请先手工修改或删除后重试。 create_course=发布课程 failed_to_create_course=发布课程失败,请稍后再试。 default_branch=默认分支 @@ -1041,6 +1056,9 @@ model_experience = 模型体验 model_noright=您没有操作权限。 model_rename=模型名称重复,请修改模型名称 +notebook_file_not_exist=Notebook文件不存在。 +notebook_select_wrong=请先选择Notebook(.ipynb)文件。 +notebook_file_no_right=您没有这个Notebook文件的读权限。 date=日期 repo_add=新增项目 @@ -1230,8 +1248,8 @@ cloudbrain.benchmark.evaluate_child_type=子类型 cloudbrain.benchmark.evaluate_mirror=镜像 cloudbrain.benchmark.evaluate_train=训练程序 cloudbrain.benchmark.evaluate_test=测试程序 -cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} -cloudbrain.benchmark.model.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} +cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} +cloudbrain.benchmark.model.types={"type":[{"id":1,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} cloudbrain.morethanonejob=您已经创建了一个正在等待或运行中的同类任务,请等待任务结束再创建。 cloudbrain.morethanonejob1=您已经有 同类任务 正在等待或运行中,请等待任务结束再创建; cloudbrain.morethanonejob2=可以在 “个人中心 > 云脑任务” 查看您所有的云脑任务。 @@ -1257,7 +1275,7 @@ model_Evaluation_not_created = 未创建过评测任务 repo_not_initialized = 代码版本:您还没有初始化代码仓库,请先创建代码版本; debug_task_running_limit = 运行时长:最长不超过4个小时,超过4个小时将自动停止; dataset_desc = 数据集:云脑1提供 CPU / GPU 资源,云脑2提供 Ascend NPU 资源,调试使用的数据集也需要上传到对应的环境; -platform_instructions = 使用说明:可以参考启智AI协作平台小白训练营课程。 +platform_instructions = 使用说明:可以参考启智AI协作平台小白训练营课程。 platform_instructions1 = 使用说明:可以参考启智AI协作平台 platform_instructions2 = 小白训练营课程 platform_instructions3 = 。 @@ -1411,6 +1429,11 @@ star=点赞 fork=派生 download_archive=下载此项目 star_fail=%s失败。 +watched=已关注 +notWatched=未关注 +un_watch=不关注 +watch_all=关注所有动态 +watch_no_notify=关注但不提醒动态 no_desc=暂无描述 @@ -1454,6 +1477,8 @@ normal_view=普通视图 line=行 lines=行 +notebook_open = 在Notebook中打开 + editor.new_file=新建文件 editor.upload_file=上传文件 editor.edit_file=编辑文件 @@ -3206,6 +3231,8 @@ foot.copyright= 版权所有:新一代人工智能开源开放平台(OpenI Platform_Tutorial=新手指引 foot.advice_feedback = 意见反馈 resource_description = 资源说明 +foot.openi_subscription_number = 启智社区订阅号 +foot.user_communication_group = 用户交流群 [cloudbrain] all_resource_cluster=全部集群 diff --git a/public/home/home.js b/public/home/home.js index 853c3ef23..aeb51b184 100755 --- a/public/home/home.js +++ b/public/home/home.js @@ -9,16 +9,20 @@ if(isEmpty(token)){ var swiperNewMessage = new Swiper(".newslist", { direction: "vertical", - slidesPerView: 9, + slidesPerView: 6, loop: true, + spaceBetween: 8, autoplay: { delay: 2500, disableOnInteraction: false, }, }); -var swiperEvent = new Swiper(".event-list", { - slidesPerView: 3, - spaceBetween: 30, + +var swiperRepo = new Swiper(".homepro-list", { + slidesPerView: 1, + // slidesPerColumn: 2, + // slidesPerColumnFill:'row', + spaceBetween: 20, pagination: { el: ".swiper-pagination", clickable: true, @@ -27,49 +31,130 @@ var swiperEvent = new Swiper(".event-list", { delay: 2500, disableOnInteraction: false, }, + breakpoints: { + 768: { + slidesPerView: 2, + }, + 1024: { + slidesPerView: 2, + }, + 1200: { + slidesPerView: 3, + }, + 1440: { + slidesPerView: 3, + }, + 1840: { + slidesPerView: 3, + }, + 1920: { + slidesPerView: 3, + }, + }, }); -var swiperRepo = new Swiper(".homepro-list", { + +var swiperOrg = new Swiper(".homeorg-list", { slidesPerView: 1, slidesPerColumn: 2, slidesPerColumnFill:'row', - spaceBetween: 30, + spaceBetween: 25, pagination: { el: ".swiper-pagination", clickable: true, }, autoplay: { - delay: 2500, + delay: 4500, disableOnInteraction: false, }, breakpoints: { 768: { - slidesPerView: 2, + slidesPerView: 3, + slidesPerColumn: 2, }, 1024: { slidesPerView: 3, + slidesPerColumn: 2, + }, + 1200: { + slidesPerView: 4, + slidesPerColumn: 2, + }, + 1440: { + slidesPerView: 4, + slidesPerColumn: 2, + }, + 1840: { + slidesPerView: 4, + slidesPerColumn: 2, + }, + 1920: { + slidesPerView: 4, + slidesPerColumn: 2, }, }, }); -var swiperOrg = new Swiper(".homeorg-list", { +var swiperUserExp = new Swiper(".home-user-exp-list", { slidesPerView: 1, - slidesPerColumn: 4, + spaceBetween: 0, + navigation: { + nextEl: '.homeuserexp .swiper-prev', + prevEl: '.homeuserexp .swiper-next', + }, + autoplay: { + delay: 2500, + disableOnInteraction: false, + }, + breakpoints: { + 768: { + slidesPerView: 2, + }, + 1200: { + slidesPerView: 3, + }, + 1440: { + slidesPerView: 4, + }, + 1840: { + slidesPerView: 4, + }, + 1920: { + slidesPerView: 5, + }, + }, +}); + +var swiperDataset = new Swiper(".home-dataset-list", { + slidesPerView: 2, + slidesPerColumn: 1, slidesPerColumnFill:'row', - spaceBetween: 15, + spaceBetween: 30, pagination: { el: ".swiper-pagination", clickable: true, }, autoplay: { - delay: 4500, + delay: 2500, disableOnInteraction: false, }, breakpoints: { + 676: { + slidesPerView: 3, + }, 768: { - slidesPerView: 2, + slidesPerView: 4, }, - 1024: { - slidesPerView: 3, + 1320: { + slidesPerView: 5, + }, + 1520: { + slidesPerView: 6, + }, + 1720: { + slidesPerView: 7, + }, + 1920: { + slidesPerView: 7, }, }, }); @@ -85,7 +170,7 @@ document.onreadystatechange = function () { if(document.readyState != "complete"){ return; } - console.log("Start to open WebSocket." + document.readyState); + console.log("Start to open WebSocket." + document.readyState); queryRecommendData(); var output = document.getElementById("newmessage"); @@ -101,6 +186,7 @@ document.onreadystatechange = function () { }; socket.onmessage = function (e) { + if (!output) return; var data =JSON.parse(e.data) var html = ""; if (data != null){ @@ -177,18 +263,17 @@ document.onreadystatechange = function () { var time = getTime(record.CreatedUnix,currentTime); html += " " + time; } - html += ""; + html += ""; html += ""; } } output.innerHTML = html; + $('#homenews p').show(); swiperNewMessage.updateSlides(); swiperNewMessage.updateProgress(); }; } - - function getTaskLink(record){ var re = getRepoLink(record); if(record.OpType == 24){ @@ -223,7 +308,7 @@ function getMsg(record){ }else{ console.log("act user is null."); } - html += " \"\"" + html += "
\"\"" html += "
" html += " " + name + "" return html; @@ -236,6 +321,7 @@ function getRepotext(record){ return record.Repo.OwnerName + "/" + record.Repo.Name; } } + function getRepoLink(record){ return encodeURI(record.Repo.OwnerName + "/" + record.Repo.Name); @@ -437,10 +523,6 @@ function getAction(opType,isZh){ } } - - - - function queryRecommendData(){ $.ajax({ type:"GET", @@ -453,7 +535,10 @@ function queryRecommendData(){ success:function(json){ displayOrg(json.org); displayRepo(json.repo); - displayActivity(json.image); + displayActivity(json.activity); + displayDataset(json.dataset); + displayUserExp(json.user_experience); + LetterAvatar && LetterAvatar.transform(); }, error:function(response) { } @@ -463,49 +548,99 @@ function queryRecommendData(){ function displayActivity(json){ var activityDiv = document.getElementById("recommendactivity"); + if (!activityDiv) return; var html = ""; if (json != null && json.length > 0){ for(var i = 0; i < json.length;i++){ - var record = json[i] - html += "
"; - html += ""; - html += "
" + var record = json[i]; + var name = isZh ? (record["name"] || '') : (record["name_en"] || record["name"]); + html += "
"; } + var swiperEvent = new Swiper(".event-list", { + slidesPerView: 1, + spaceBetween: 30, + // pagination: { + // el: ".swiper-pagination", + // clickable: true, + // }, + autoplay: { + delay: 2500, + disableOnInteraction: false, + }, + breakpoints: { + 768: { + slidesPerView: Math.min(2, json.length), + }, + 1024: { + slidesPerView: Math.min(3, json.length), + }, + 1200: { + slidesPerView: Math.min(3, json.length), + }, + 1440: { + slidesPerView: Math.min(4, json.length), + }, + 1840: { + slidesPerView: Math.min(4, json.length), + }, + 1920: { + slidesPerView: Math.min(4, json.length), + }, + }, + }); + activityDiv.innerHTML = html; + swiperEvent.updateSlides(); + swiperEvent.updateProgress(); } - activityDiv.innerHTML = html; - swiperEvent.updateSlides(); - swiperEvent.updateProgress(); } function displayRepo(json){ var orgRepo = document.getElementById("recommendrepo"); var html = ""; if (json != null && json.length > 0){ - for(var i = 0; i < json.length;i++){ - var record = json[i] - html += "
"; - html += "
"; - html += "
"; - html += " "; - html += " " + record["NumStars"] + "" + record["NumForks"]; - html += " "; - html += " "; - html += " " + record["Alias"] +""; - html += "
" + record["Description"] + "
"; - html += "
" - if(record["Topics"] != null){ - for(var j = 0; j < record["Topics"].length; j++){ - topic = record["Topics"][j]; - url = "/explore/repos?q=" + (topic) + "&topic=" - html += "" + topic + ""; - } + var repoMap = {}; + for (var i = 0, iLen = json.length; i < iLen; i++) { + var repo = json[i]; + var label = isZh ? repo.Label : repo.Label_en; + if (repoMap[label]) { + repoMap[label].push(repo); + } else { + repoMap[label] = [repo]; } - html += "
"; - html += "
"; - html += "
"; - html += "
"; + } + + for (var label in repoMap) { + var repos = repoMap[label]; + var labelSearch = repos[0].Label; + html += `
`; + for (var i = 0, iLen = repos.length; i < iLen; i++) { + if (i >= 4) break; + var repo = repos[i]; + // ${repo["NumStars"]}${repo["NumForks"]}
+ html += `
+
+ ${repo["Avatar"] ? `` : ``} + ${repo["Alias"]} +
${repo["Description"]}
+ `; + // if (repo["Topics"] != null) { + // for(var j = 0; j < repo["Topics"].length; j++){ + // var topic = repo["Topics"][j]; + // var url = "/explore/repos?q=" + (topic) + "&topic=" + // html += `${topic}`; + // } + // } + html += ` +
+
`; + } + html += '
' } } orgRepo.innerHTML = html; @@ -513,7 +648,6 @@ function displayRepo(json){ swiperRepo.updateProgress(); } - function getRepoOrOrg(key,isZhLang,numbers=1){ if(numbers > 1){ key+="1"; @@ -537,7 +671,7 @@ function displayOrg(json){ html += " "; html += "
"; html += " " + record["Name"] + " " + record["FullName"]; - html += "
" + record["NumRepos"] +" " + getRepoOrOrg(1,isZh,record["NumRepos"]) + " ・ " + record["NumMembers"] +" " + getRepoOrOrg(2,isZh,record["NumMembers"]) + " ・ " + record["NumTeams"] + " " + getRepoOrOrg(3,isZh,record["NumTeams"]) + "
"; + html += "
" + record["NumRepos"] +" " + getRepoOrOrg(1,isZh,record["NumRepos"]) + " ・ " + record["NumMembers"] +" " + getRepoOrOrg(2,isZh,record["NumMembers"]) + " ・ " + record["NumTeams"] + " " + getRepoOrOrg(3,isZh,record["NumTeams"]) + "
"; html += "
"; html += "
"; html += "
"; @@ -548,3 +682,187 @@ function displayOrg(json){ orgDiv.innerHTML = html; swiperOrg.updateSlides(); } + +function displayDataset(data) { + var homeDatasetEl = document.getElementById("home_dataset"); + if (!homeDatasetEl) return; + var html = ''; + var svgStrMap = { + '0': '', + '1': '', + '2': '', + '3': '', + '4': '', + '5': '', + '6': '', + } + for (var i = 0, iLen = data.length; i < iLen; i++) { + var dataI = data[i]; + html += `` + } + homeDatasetEl.innerHTML = html; + swiperDataset.updateSlides(); + swiperDataset.updateProgress(); +} + +function displayUserExp(data) { + var homeUserExpEl = document.getElementById("home_user-exp"); + if (!homeUserExpEl) return; + var html = ''; + for (var i = 0, iLen = data.length; i < iLen; i++) { + var dataI = data[i]; + html += `
+
+
+ +
+
+
+
${dataI.fullname || dataI.name}
+
${dataI.desc}
+
+
` + } + homeUserExpEl.innerHTML = html; + swiperUserExp.updateSlides(); + swiperUserExp.updateProgress(); +} + +function getNotice() { + $.ajax({ + type:"GET", + url:"/dashboard/invitation", + headers: { authorization:token, }, + dataType:"json", + data: { + filename: 'notice/notice.json', + }, + success:function(json){ + if (json) { + try { + var noticeList = JSON.parse(json).Notices || []; + var noticeEls = $('._hm-recommend-info-area-1 a._hm-notice'); + for (var i = 0, iLen = noticeEls.length; i < iLen; i++) { + var noticeEl = noticeEls.eq(i); + var noticeObj = noticeList[i]; + if (noticeObj) { + var title = isZh ? noticeObj.Title : (noticeObj.Title_en || noticeObj.Title); + noticeEl.attr('href', noticeObj.Link); + noticeEl.find('span').text(title).attr('title', title); + noticeEl.show(); + } else { + noticeEl.hide(); + } + } + } catch (e) { + console.info(e); + } + } + }, + error:function(response) { + } + }); +} + +function getRecommendModule() { + $.ajax({ + type:"GET", + url:"/dashboard/invitation", + headers: { authorization:token, }, + dataType:"json", + data: { + filename: 'home/newfunction', + }, + success:function(json){ + if (json) { + try { + var recommendModuleList = JSON.parse(json) || []; + var recommendModuleEls = $('._hm-recommend-info-area a._hm-link'); + for (var i = 0, iLen = recommendModuleEls.length; i < iLen; i++) { + var recommendModuleEl = recommendModuleEls.eq(i); + var recommendModuleObj = recommendModuleList[i]; + if (recommendModuleObj) { + recommendModuleEl.attr('href', recommendModuleObj.image_link); + recommendModuleEl.text(isZh ? recommendModuleObj.name : (recommendModuleObj.name_en || recommendModuleObj.name)); + } else { + } + } + } catch (e) { + console.info(e); + } + } + }, + error:function(response) { + } + }); +} + +function initHomeTopBanner() { + var homeSlideTimer = null; + var homeSlideDuration = 8000; + function homeSlide(direction, index) { + var slidePages = $('._hm-pg-c ._hm-pg'); + var currentPage = slidePages.filter('._hm-pg-show'); + var slidePagination = $('._hm-slide-pagination-c ._hm-slide-pagination-item'); + var currentIndex = currentPage.index(); + var next = 0; + if (direction) { + next = direction == 'left' ? currentIndex - 1 : currentIndex + 1; + } else { + next = index || 0; + } + if (next < 0) next = slidePages.length - 1; + if (next == slidePages.length) next = 0; + slidePages.removeClass('_hm-pg-show'); + slidePages.eq(next).addClass('_hm-pg-show'); + slidePagination.removeClass('_hm-slide-pagination-item-active'); + slidePagination.eq(next).addClass('_hm-slide-pagination-item-active'); + } + + function startSlide() { + homeSlideTimer && clearTimeout(homeSlideTimer); + homeSlideTimer = setTimeout(function() { + homeSlide('right'); + startSlide(); + }, homeSlideDuration); + } + + function stopSlide() { + homeSlideTimer && clearTimeout(homeSlideTimer); + } + + $('._hm-slide-btn').on('click', function () { + if ($(this).hasClass('_hm-slide-btn-left')) { + homeSlide('left'); + } else { + homeSlide('right'); + } + startSlide(); + }); + $('._hm-pg #homenews').on('mouseenter', function() { + stopSlide(); + }).on('mouseleave', function() { + startSlide(); + }); + $('._hm-slide-pagination-c ._hm-slide-pagination-item').on('click', function() { + var self = $(this); + if (self.hasClass('_hm-slide-pagination-item-active')) return; + homeSlide('', self.index()); + startSlide(); + }); + setTimeout(function() { startSlide(); }, 500); +} + +initHomeTopBanner(); +getNotice(); +getRecommendModule(); diff --git a/public/img/search.svg b/public/img/search.svg index ec91b07dd..a4d965f9a 100644 --- a/public/img/search.svg +++ b/public/img/search.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 2afbb9b7d..8958c55a4 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -614,6 +614,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/download_invitation_detail", operationReq, repo_ext.DownloadInvitationDetail) //cloudbrain board + m.Get("/cloudbrainboard/cloudbrain/resource_queues", repo.GetResourceQueues) m.Group("/cloudbrainboard", func() { m.Get("/downloadAll", repo.DownloadCloudBrainBoard) m.Group("/cloudbrain", func() { @@ -631,7 +632,6 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/overview_resource", repo.GetCloudbrainResourceOverview) m.Get("/resource_usage_statistic", repo.GetDurationRateStatistic) m.Get("/resource_usage_rate_detail", repo.GetCloudbrainResourceUsageDetail) - m.Get("/resource_queues", repo.GetResourceQueues) m.Get("/apitest_for_statistic", repo.CloudbrainDurationStatisticForTest) }) }, operationReq) @@ -737,6 +737,12 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/my_favorite", repo.MyFavoriteDatasetMultiple) }, reqToken(), repoAssignment()) + m.Group("/file_notebook", func() { + m.Get("", reqToken(), repo.GetFileNoteBookInfo) + m.Post("/create", reqToken(), reqWeChat(), bind(api.CreateFileNotebookJobOption{}), repo.CreateFileNoteBook) + + }) + m.Group("/repos", func() { m.Get("/search", repo.Search) diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go index 68baf3287..cd8340c41 100755 --- a/routers/api/v1/repo/cloudbrain.go +++ b/routers/api/v1/repo/cloudbrain.go @@ -11,6 +11,7 @@ import ( "io" "net/http" "os" + "path" "sort" "strconv" "strings" @@ -78,6 +79,74 @@ func CloudBrainShow(ctx *context.APIContext) { ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)}) +} +func CreateFileNoteBook(ctx *context.APIContext, option api.CreateFileNotebookJobOption) { + cloudbrainTask.FileNotebookCreate(ctx.Context, option) +} + +func GetFileNoteBookInfo(ctx *context.APIContext) { + //image description spec description waiting count + + specs, err := models.GetResourceSpecificationByIds([]int64{setting.FileNoteBook.SpecIdCPU, setting.FileNoteBook.SpecIdGPU, setting.FileNoteBook.SpecIdNPU, setting.FileNoteBook.SpecIdNPUCD}) + if err != nil { + log.Error("Fail to query specifications", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_query_fail"))) + return + } + + var specCPU, specGpu, specNPU, specNPUCD *api.SpecificationShow + var specGpuQueueCode string + for _, spec := range specs { + if spec.ID == setting.FileNoteBook.SpecIdCPU { + specCPU = convert.ToSpecification(spec) + } else if spec.ID == setting.FileNoteBook.SpecIdGPU { + specGpu = convert.ToSpecification(spec) + specGpuQueueCode = spec.QueueCode + } else if spec.ID == setting.FileNoteBook.SpecIdNPU { + specNPU = convert.ToSpecification(spec) + } else if spec.ID == setting.FileNoteBook.SpecIdNPUCD { + specNPUCD = convert.ToSpecification(spec) + } + } + + waitCountNpu := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") + + queuesMap, err := cloudbrain.GetQueuesDetail() + if err != nil { + log.Error("Fail to query gpu queues waiting count", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_query_fail"))) + return + } + waitCountGPU := (*queuesMap)[specGpuQueueCode] + if !setting.ModelartsCD.Enabled { + ctx.JSON(http.StatusOK, map[string]interface{}{ + "code": 0, + "projectName": setting.FileNoteBook.ProjectName, + "specCpu": specCPU, + "specGpu": specGpu, + "specNpu": specNPU, + "waitCountGpu": waitCountGPU, + "waitCountNpu": waitCountNpu, + "imageCpuDescription": setting.FileNoteBook.ImageCPUDescription, + "imageGpuDescription": setting.FileNoteBook.ImageGPUDescription, + "imageNpuDescription": setting.FileNoteBook.ImageNPUDescription, + }) + } else { + ctx.JSON(http.StatusOK, map[string]interface{}{ + "code": 0, + "projectName": setting.FileNoteBook.ProjectName, + "specCpu": specCPU, + "specGpu": specGpu, + "specNpu": specNPUCD, + "waitCountGpu": waitCountGPU, + "waitCountNpu": waitCountNpu, + "imageCpuDescription": setting.FileNoteBook.ImageCPUDescription, + "imageGpuDescription": setting.FileNoteBook.ImageGPUDescription, + "imageNpuDescription": setting.FileNoteBook.ImageNPUCDDescription, + }) + + } + } func CreateCloudBrain(ctx *context.APIContext, option api.CreateTrainJobOption) { @@ -141,10 +210,11 @@ func GetCloudbrainTask(ctx *context.APIContext) { ) ID := ctx.Params(":id") - job, err := models.GetCloudbrainByID(ID) + + job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID) + if err != nil { ctx.NotFound(err) - log.Error("GetCloudbrainByID failed:", err) return } if job.JobType == string(models.JobTypeModelSafety) { @@ -487,6 +557,12 @@ func ModelSafetyGetLog(ctx *context.APIContext) { }) return } + prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, job.JobName, modelarts.LogPath, job.VersionName), "/") + "/job" + _, err = storage.GetObsLogFileName(prefix) + canLogDownload := isCanDownloadLog(ctx, job) + if err != nil { + canLogDownload = false + } ctx.Data["log_file_name"] = resultLogFile.LogFileList[0] ctx.JSON(http.StatusOK, map[string]interface{}{ "JobID": job.JobID, @@ -495,7 +571,7 @@ func ModelSafetyGetLog(ctx *context.APIContext) { "EndLine": result.EndLine, "Content": result.Content, "Lines": result.Lines, - "CanLogDownload": isCanDownloadLog(ctx, job), + "CanLogDownload": canLogDownload, "StartTime": job.StartTime, }) } @@ -566,7 +642,6 @@ func CloudbrainDownloadLogFile(ctx *context.Context) { url, err := storage.Attachments.PresignedGetURL(prefix+"/"+fileName, fileName) if err != nil { log.Error("Get minio get SignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("Get minio get SignedUrl failed", err) return } log.Info("fileName=" + fileName) @@ -650,7 +725,7 @@ func CloudbrainGetLog(ctx *context.APIContext) { result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath) if result == nil { log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"]) - ctx.ServerError(err.Error(), err) + //ctx.ServerError(err.Error(), err) return } } @@ -865,7 +940,7 @@ func CloudBrainModelConvertList(ctx *context.APIContext) { err = json.Unmarshal([]byte(dirs), &fileInfos) if err != nil { log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("json.Unmarshal failed:", err) + //ctx.ServerError("json.Unmarshal failed:", err) return } @@ -896,7 +971,7 @@ func CloudBrainModelConvertList(ctx *context.APIContext) { models, err := storage.GetObsListObject(job.ID, "output/", parentDir, versionName) if err != nil { log.Info("get TrainJobListModel failed:", err) - ctx.ServerError("GetObsListObject:", err) + //ctx.ServerError("GetObsListObject:", err) return } @@ -941,7 +1016,7 @@ func CloudBrainModelList(ctx *context.APIContext) { err = json.Unmarshal([]byte(dirs), &fileInfos) if err != nil { log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("json.Unmarshal failed:", err) + //ctx.ServerError("json.Unmarshal failed:", err) return } diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go index 446522fc2..7fe5d603c 100755 --- a/routers/api/v1/repo/cloudbrain_dashboard.go +++ b/routers/api/v1/repo/cloudbrain_dashboard.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/routers/repo" cloudbrainService "code.gitea.io/gitea/services/cloudbrain" "code.gitea.io/gitea/services/cloudbrain/resource" @@ -58,42 +59,30 @@ func GetAllCloudbrainsOverview(ctx *context.Context) { return } cloudbrainTypeCount, err := models.GetCloudbrainTypeCount() - log.Info("cloudbrainTypeCount:", cloudbrainTypeCount) if err != nil { log.Error("Can not query cloudbrainTypeCount.", err) return } - cloudbrainTpyeDurationSum, err := models.GetCloudbrainTpyeDurationSum() - log.Info("cloudbrainTpyeDurationSum:", cloudbrainTpyeDurationSum) - if err != nil { - log.Error("Can not query cloudbrainTpyeDurationSum.", err) - return - } - todayCloudbrainCount, err := models.GetTodayCloudbrainCount(beginTime, endTime) - log.Info("todayCloudbrainCount:", todayCloudbrainCount) if err != nil { log.Error("Can not query todayCloudbrainCount.", err) return } todayRunningCount, err := models.GetTodayRunningCount(beginTime, endTime) - log.Info("todayRunningCount:", todayRunningCount) if err != nil { log.Error("Can not query todayRunningCount.", err) return } todayWaitingCount, err := models.GetTodayWaitingCount(beginTime, endTime) - log.Info("todayWaittingCount:", todayWaitingCount) if err != nil { log.Error("Can not query todayWaitingCount.", err) return } todayCompletedCount := todayCloudbrainCount - todayRunningCount - todayWaitingCount - log.Info("todayCompletedCount:", todayCompletedCount) creatorCount, err := models.GetCreatorCount() if err != nil { @@ -123,8 +112,9 @@ func GetOverviewDuration(ctx *context.Context) { recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix now := time.Now() endTime := now - // worker_server_num := 1 - // cardNum := 1 + var workServerNumber int64 + var cardNum int64 + durationAllSum := int64(0) cardDuSum := int64(0) @@ -138,7 +128,7 @@ func GetOverviewDuration(ctx *context.Context) { c2NetDuration := int64(0) cDCenterDuration := int64(0) - cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ + cloudbrains, _, err := models.CloudbrainAllKanBan(&models.CloudbrainsOptions{ Type: models.TypeCloudBrainAll, BeginTimeUnix: int64(recordBeginTime), EndTimeUnix: endTime.Unix(), @@ -151,22 +141,18 @@ func GetOverviewDuration(ctx *context.Context) { for _, cloudbrain := range cloudbrains { cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) - CardDurationString := repo.GetCloudbrainCardDuration(cloudbrain.Cloudbrain) - CardDuration := models.ConvertStrToDuration(CardDurationString) - // if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { - // worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber - // } else { - // worker_server_num = 1 - // } - // if cloudbrain.Cloudbrain.Spec == nil { - // cardNum = 1 - // } else { - // cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum - // } - // duration := cloudbrain.Duration - // duration := cloudbrain.Duration + if cloudbrain.Cloudbrain.Spec != nil { + cardNum = int64(cloudbrain.Cloudbrain.Spec.AccCardsNum) + } else { + cardNum = 1 + } + if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { + workServerNumber = int64(cloudbrain.Cloudbrain.WorkServerNumber) + } else { + workServerNumber = 1 + } duration := models.ConvertStrToDuration(cloudbrain.TrainJobDuration) - // CardDuration := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum) + CardDuration := workServerNumber * int64(cardNum) * duration if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne { cloudBrainOneDuration += duration @@ -1479,12 +1465,17 @@ func getCloudbrainTimePeroid(ctx *context.Context, recordBeginTime time.Time) (t } func GetCloudbrainResourceOverview(ctx *context.Context) { + var recordBeginTime timeutil.TimeStamp recordCloudbrainDuration, err := models.GetDurationRecordBeginTime() if err != nil { log.Error("Can not get GetDurationRecordBeginTime", err) return } - recordBeginTime := recordCloudbrainDuration[0].DateTime + if len(recordCloudbrainDuration) > 0 && err == nil { + recordBeginTime = recordCloudbrainDuration[0].DateTimeUnix + } else { + recordBeginTime = timeutil.TimeStamp(time.Now().Unix()) + } recordUpdateTime := time.Now().Unix() resourceQueues, err := models.GetCanUseCardInfo() if err != nil { @@ -1611,6 +1602,7 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { now := time.Now() beginTimeStr := ctx.QueryTrim("beginTime") endTimeStr := ctx.QueryTrim("endTime") + var brainRecordBeginTime time.Time var beginTime time.Time var endTime time.Time @@ -1623,7 +1615,12 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) return beginTime, endTime } - brainRecordBeginTime := recordCloudbrainDuration[0].DateTime.AsTime() + if len(recordCloudbrainDuration) > 0 && err == nil { + brainRecordBeginTime = recordCloudbrainDuration[0].DateTimeUnix.AsTime() + } else { + brainRecordBeginTime = now + } + beginTime = brainRecordBeginTime endTime = now } else if queryType == "today" { @@ -1665,7 +1662,11 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) return beginTime, endTime } - brainRecordBeginTime := recordCloudbrainDuration[0].DateTime.AsTime() + if len(recordCloudbrainDuration) > 0 && err == nil { + brainRecordBeginTime = recordCloudbrainDuration[0].DateTimeUnix.AsTime() + } else { + brainRecordBeginTime = now + } beginTime = brainRecordBeginTime endTime = now } else { @@ -1696,7 +1697,7 @@ func getAiCenterUsageDuration(beginTime time.Time, endTime time.Time, cloudbrain usageRate := float64(0) for _, cloudbrainStatistic := range cloudbrainStatistics { - if int64(cloudbrainStatistic.DateTime) >= beginTime.Unix() && int64(cloudbrainStatistic.DateTime) < endTime.Unix() { + if int64(cloudbrainStatistic.DateTimeUnix) >= beginTime.Unix() && int64(cloudbrainStatistic.DateTimeUnix) < endTime.Unix() { totalDuration += cloudbrainStatistic.CardsTotalDuration usageDuration += cloudbrainStatistic.CardsUseDuration } @@ -1914,7 +1915,7 @@ func CloudbrainUpdateAiCenter(ctx *context.Context) { func GetResourceQueues(ctx *context.Context) { resourceQueues, err := models.GetCanUseCardInfo() if err != nil { - log.Info("GetCanUseCardInfo err: %v", err) + log.Error("GetCanUseCardInfo err: %v", err) return } Resource := make([]*models.ResourceQueue, 0) diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go index e0db9eda3..127ddd835 100755 --- a/routers/api/v1/repo/modelarts.go +++ b/routers/api/v1/repo/modelarts.go @@ -6,6 +6,7 @@ package repo import ( + "code.gitea.io/gitea/modules/cloudbrain" "encoding/json" "net/http" "path" @@ -37,11 +38,14 @@ func GetModelArtsNotebook2(ctx *context.APIContext) { ) ID := ctx.Params(":id") - job, err := models.GetCloudbrainByID(ID) + + job,err := cloudbrain.GetCloudBrainByIdOrJobId(ID) + if err != nil { ctx.NotFound(err) return } + err = modelarts.HandleNotebookInfo(job) if err != nil { ctx.NotFound(err) diff --git a/routers/home.go b/routers/home.go index d54a0160f..7378fbc4f 100755 --- a/routers/home.go +++ b/routers/home.go @@ -7,6 +7,7 @@ package routers import ( "bytes" + "encoding/json" "code.gitea.io/gitea/routers/response" "net/http" "strconv" @@ -771,7 +772,7 @@ func NotFound(ctx *context.Context) { } func getRecommendOrg() ([]map[string]interface{}, error) { - url := setting.RecommentRepoAddr + "organizations" + url := setting.RecommentRepoAddr + "home/organizations" result, err := repository.RecommendFromPromote(url) if err != nil { @@ -844,7 +845,7 @@ func GetMapInfo(ctx *context.Context) { } func GetRankUser(index string) ([]map[string]interface{}, error) { - url := setting.RecommentRepoAddr + "user_rank_" + index + url := setting.RecommentRepoAddr + "user_rank/user_rank_" + index result, err := repository.RecommendFromPromote(url) if err != nil { @@ -855,13 +856,25 @@ func GetRankUser(index string) ([]map[string]interface{}, error) { tmpIndex := strings.Index(userRank, " ") userName := userRank score := 0 + label := "" if tmpIndex != -1 { userName = userRank[0:tmpIndex] - tmpScore, err := strconv.Atoi(userRank[tmpIndex+1:]) - if err != nil { - log.Info("convert to int error.") + left := userRank[tmpIndex+1:] + tmpIndex1 := strings.Index(left, " ") + if tmpIndex1 != -1 { + tmpScore, err := strconv.Atoi(left[0:tmpIndex1]) + if err != nil { + log.Info("convert to int error.") + } + score = tmpScore + label = left[tmpIndex1+1:] + } else { + tmpScore, err := strconv.Atoi(left[tmpIndex+1:]) + if err != nil { + log.Info("convert to int error.") + } + score = tmpScore } - score = tmpScore } user, err := models.GetUserByName(userName) if err == nil { @@ -871,6 +884,7 @@ func GetRankUser(index string) ([]map[string]interface{}, error) { userMap["FullName"] = user.FullName userMap["HomeLink"] = user.HomeLink() userMap["ID"] = user.ID + userMap["Label"] = label userMap["Avatar"] = user.RelAvatarLink() userMap["Score"] = score resultOrg = append(resultOrg, userMap) @@ -891,25 +905,54 @@ func GetUserRankFromPromote(ctx *context.Context) { ctx.JSON(200, resultUserRank) } +func getMapContent(fileName string) []map[string]string { + url := setting.RecommentRepoAddr + fileName + result, err := repository.RecommendContentFromPromote(url) + remap := make([]map[string]string, 0) + if err == nil { + json.Unmarshal([]byte(result), &remap) + } + return remap +} + +func HomeNoticeTmpl(ctx *context.Context) { + ctx.Data["url_params"] = "" + ctx.HTML(200, "notice") +} + func RecommendHomeInfo(ctx *context.Context) { resultOrg, err := getRecommendOrg() if err != nil { log.Info("error." + err.Error()) } - resultRepo, err := repository.GetRecommendRepoFromPromote("projects") + repoMap := getMapContent("home/projects") + resultRepo, err := repository.GetRecommendRepoFromPromote(repoMap) if err != nil { log.Info("error." + err.Error()) } - resultImage, err := getImageInfo("picture_info") - if err != nil { - log.Info("error." + err.Error()) - } - + resultActivityInfo := getMapContent("home/activity_info") mapInterface := make(map[string]interface{}) mapInterface["org"] = resultOrg mapInterface["repo"] = resultRepo - mapInterface["image"] = resultImage - //mapInterface["cloudbrain"] = resultCloudBrain + mapInterface["activity"] = resultActivityInfo + + user_experience := getMapContent("home/user_experience") + for _, amap := range user_experience { + userId := amap["userid"] + userIntId, _ := strconv.Atoi(userId) + user, err := models.GetUserByID(int64(userIntId)) + if err == nil { + amap["name"] = user.Name + amap["fullname"] = user.FullName + amap["detail"] = user.Description + amap["avatar"] = user.AvatarLink() + } + } + mapInterface["user_experience"] = user_experience + dataset, err := models.QueryDatasetGroupByTask() + if err == nil { + mapInterface["dataset"] = dataset + } ctx.JSON(http.StatusOK, mapInterface) } @@ -923,4 +966,4 @@ func HomePrivacy(ctx *context.Context) { func HomeResoruceDesc(ctx *context.Context) { ctx.HTML(200, tplResoruceDesc) -} +} \ No newline at end of file diff --git a/routers/repo/ai_model_convert.go b/routers/repo/ai_model_convert.go index 962c76aae..4ba414bff 100644 --- a/routers/repo/ai_model_convert.go +++ b/routers/repo/ai_model_convert.go @@ -49,7 +49,7 @@ const ( //TensorFlowNpuBootFile = "convert_tensorflow.py" //TensorFlowGpuBootFile = "convert_tensorflow_gpu.py" - //ConvertRepoPath = "https://git.openi.org.cn/zouap/npu_test" + //ConvertRepoPath = "https://openi.pcl.ac.cn/zouap/npu_test" CONVERT_FORMAT_ONNX = 0 CONVERT_FORMAT_TRT = 1 @@ -828,5 +828,4 @@ func ModelConvertDownloadModel(ctx *context.Context) { http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusTemporaryRedirect) } } - } diff --git a/routers/repo/aisafety.go b/routers/repo/aisafety.go index b638a486b..6176fcda5 100644 --- a/routers/repo/aisafety.go +++ b/routers/repo/aisafety.go @@ -11,7 +11,8 @@ import ( "os" "strconv" "strings" - "time" + + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/aisafety" @@ -483,7 +484,6 @@ func isTaskNotFinished(status string) bool { } func AiSafetyCreateForGetGPU(ctx *context.Context) { - t := time.Now() ctx.Data["PageIsCloudBrain"] = true ctx.Data["IsCreate"] = true ctx.Data["type"] = models.TypeCloudBrainOne @@ -497,7 +497,7 @@ func AiSafetyCreateForGetGPU(ctx *context.Context) { log.Info("GPUBaseDataSetUUID=" + setting.ModelSafetyTest.GPUBaseDataSetUUID) log.Info("GPUCombatDataSetName=" + setting.ModelSafetyTest.GPUCombatDataSetName) log.Info("GPUCombatDataSetUUID=" + setting.ModelSafetyTest.GPUCombatDataSetUUID) - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName prepareCloudbrainOneSpecs(ctx) queuesDetail, _ := cloudbrain.GetQueuesDetail() @@ -514,12 +514,11 @@ func AiSafetyCreateForGetGPU(ctx *context.Context) { } func AiSafetyCreateForGetNPU(ctx *context.Context) { - t := time.Now() ctx.Data["PageIsCloudBrain"] = true ctx.Data["IsCreate"] = true ctx.Data["type"] = models.TypeCloudBrainTwo ctx.Data["compute_resource"] = models.NPUResource - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName ctx.Data["datasetType"] = models.TypeCloudBrainTwo ctx.Data["BaseDataSetName"] = setting.ModelSafetyTest.NPUBaseDataSetName diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 7d96d1b58..d3d76f440 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -15,6 +15,8 @@ import ( "time" "unicode/utf8" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/modules/urfs_client/urchin" "code.gitea.io/gitea/modules/dataset" @@ -92,28 +94,9 @@ func MustEnableCloudbrain(ctx *context.Context) { } } -func cutString(str string, lens int) string { - if len(str) < lens { - return str - } - return str[:lens] -} - -func jobNamePrefixValid(s string) string { - lowStr := strings.ToLower(s) - re := regexp.MustCompile(`[^a-z0-9_\\-]+`) - - removeSpecial := re.ReplaceAllString(lowStr, "") - - re = regexp.MustCompile(`^[_\\-]+`) - return re.ReplaceAllString(removeSpecial, "") - -} - func cloudBrainNewDataPrepare(ctx *context.Context, jobType string) error { ctx.Data["PageIsCloudBrain"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName ctx.Data["command"] = cloudbrain.GetCloudbrainDebugCommand() @@ -696,7 +679,7 @@ func CloudBrainRestart(ctx *context.Context) { } else { if count >= 1 { log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) - resultCode = "-1" + resultCode = "2" errorMsg = ctx.Tr("repo.cloudbrain.morethanonejob") break } @@ -759,43 +742,13 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo return } if task.Status == string(models.JobWaiting) || task.Status == string(models.JobRunning) { - result, err := cloudbrain.GetJob(task.JobID) + task, err = cloudbrainTask.SyncCloudBrainOneStatus(task) if err != nil { log.Info("error:" + err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) return } - if result != nil { - jobRes, _ := models.ConvertToJobResultPayload(result.Payload) - taskRoles := jobRes.TaskRoles - taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) - ctx.Data["taskRes"] = taskRes - ctx.Data["ExitDiagnostics"] = taskRes.TaskStatuses[0].ExitDiagnostics - oldStatus := task.Status - task.Status = taskRes.TaskStatuses[0].State - task.ContainerIp = "" - task.ContainerID = taskRes.TaskStatuses[0].ContainerID - models.ParseAndSetDurationFromCloudBrainOne(jobRes, task) - - if task.DeletedAt.IsZero() { //normal record - if oldStatus != task.Status { - notification.NotifyChangeCloudbrainStatus(task, oldStatus) - } - err = models.UpdateJob(task) - if err != nil { - ctx.Data["error"] = err.Error() - return - } - } else { //deleted record - - } - - ctx.Data["result"] = jobRes - } else { - log.Info("error:" + err.Error()) - return - } } user, err := models.GetUserByID(task.UserID) @@ -889,7 +842,13 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo func CloudBrainDebug(ctx *context.Context) { task := ctx.Cloudbrain debugUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName - ctx.Redirect(debugUrl) + if task.BootFile!=""{ + ctx.Redirect(getFileUrl(debugUrl,task.BootFile)) + + }else{ + ctx.Redirect(debugUrl) + } + } func prepareSpec4Show(ctx *context.Context, task *models.Cloudbrain) { diff --git a/routers/repo/cloudbrain_statistic.go b/routers/repo/cloudbrain_statistic.go index 8084614eb..de95babe9 100644 --- a/routers/repo/cloudbrain_statistic.go +++ b/routers/repo/cloudbrain_statistic.go @@ -8,6 +8,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" cloudbrainService "code.gitea.io/gitea/services/cloudbrain" ) @@ -19,32 +20,33 @@ func CloudbrainDurationStatisticHour() { return } }() - var statisticTime time.Time - var count int64 - recordDurationUpdateTime, err := models.GetDurationRecordUpdateTime() - if err != nil { - log.Error("Can not get GetDurationRecordBeginTime", err) - } - now := time.Now() - currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location()) - if err == nil && len(recordDurationUpdateTime) > 0 { - statisticTime = time.Unix(int64(recordDurationUpdateTime[0].DateTime), 0).Add(+1 * time.Hour) - } else { - statisticTime = currentTime - } - deleteBeginTime := time.Unix(int64(recordDurationUpdateTime[0].DateTime), 0) + if setting.IsCloudbrainTimingEnabled { + var statisticTime time.Time + var count int64 + recordDurationUpdateTime, err := models.GetDurationRecordUpdateTime() + if err != nil { + log.Error("Can not get GetDurationRecordBeginTime", err) + } + now := time.Now() + currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location()) + if err == nil && len(recordDurationUpdateTime) > 0 { + statisticTime = time.Unix(int64(recordDurationUpdateTime[0].DateTimeUnix), 0).Add(+1 * time.Hour) + } else { + statisticTime = currentTime + } - err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(deleteBeginTime.Unix()), timeutil.TimeStamp(currentTime.Unix())) - if err != nil { - log.Error("DeleteCloudbrainDurationStatistic failed", err) - } + err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(statisticTime.Add(-1*time.Hour).Unix()), timeutil.TimeStamp(currentTime.Unix())) + if err != nil { + log.Error("DeleteCloudbrainDurationStatistic failed", err) + } - for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) { - countEach := summaryDurationStat(statisticTime) - count += countEach - statisticTime = statisticTime.Add(+1 * time.Hour) + for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) { + countEach := summaryDurationStat(statisticTime) + count += countEach + statisticTime = statisticTime.Add(+1 * time.Hour) + } + log.Info("summaryDurationStat count: %v", count) } - log.Info("summaryDurationStat count: %v", count) } func UpdateDurationStatisticHistoryData(beginTime time.Time, endTime time.Time) int64 { var count int64 @@ -61,7 +63,7 @@ func UpdateDurationStatisticHistoryData(beginTime time.Time, endTime time.Time) //statisticTime是当前的时辰,比如当前是2019-01-01 12:01:01,那么statisticTime就是2019-01-01 12:00:00 func summaryDurationStat(statisticTime time.Time) int64 { var count int64 - dateTime := timeutil.TimeStamp(statisticTime.Add(-1 * time.Hour).Unix()) + dateTimeUnix := timeutil.TimeStamp(statisticTime.Add(-1 * time.Hour).Unix()) beginTime := statisticTime.Add(-1 * time.Hour).Unix() dayTime := statisticTime.Add(-1 * time.Hour).Format("2006-01-02") hourTime := statisticTime.Add(-1 * time.Hour).Hour() @@ -72,27 +74,8 @@ func summaryDurationStat(statisticTime time.Time) int64 { log.Info("GetCloudbrainByTime err: %v", err) return 0 } - cloudbrainMap := make(map[string]*models.Cloudbrain) models.LoadSpecs4CloudbrainInfo(ciTasks) - - for _, cloudbrain := range ciTasks { - if cloudbrain.Cloudbrain.StartTime == 0 { - cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix - } - if cloudbrain.Cloudbrain.EndTime == 0 { - cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.UpdatedUnix - } - cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) - if cloudbrain.Cloudbrain.Spec != nil { - if _, ok := cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { - if cloudbrain.Cloudbrain.Spec != nil { - cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = &cloudbrain.Cloudbrain - } - } - } - } - - cloudBrainCenterCodeAndCardTypeInfo := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime) + cloudBrainCenterCodeAndCardTypeInfo, cloudbrainMap := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime) resourceQueues, err := models.GetCanUseCardInfo() if err != nil { @@ -117,7 +100,7 @@ func summaryDurationStat(statisticTime time.Time) int64 { cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType] = 0 } cloudbrainDurationStat := models.CloudbrainDurationStatistic{ - DateTime: dateTime, + DateTimeUnix: dateTimeUnix, DayTime: dayTime, HourTime: hourTime, Cluster: cloudbrainTable.Cluster, @@ -139,7 +122,7 @@ func summaryDurationStat(statisticTime time.Time) int64 { for key, cardsTotalDuration := range cardsTotalDurationMap { cloudbrainDurationStat := models.CloudbrainDurationStatistic{ - DateTime: dateTime, + DateTimeUnix: dateTimeUnix, DayTime: dayTime, HourTime: hourTime, Cluster: strings.Split(key, "/")[0], @@ -176,11 +159,27 @@ func GetAiCenterNameByCode(centerCode string, language string) string { return aiCenterName } -func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) map[string]map[string]int { +func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) (map[string]map[string]int, map[string]*models.Cloudbrain) { var WorkServerNumber int var AccCardsNum int + cloudbrainMap := make(map[string]*models.Cloudbrain) cloudBrainCenterCodeAndCardType := make(map[string]map[string]int) for _, cloudbrain := range ciTasks { + if cloudbrain.Cloudbrain.StartTime == 0 { + cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix + } + if cloudbrain.Cloudbrain.EndTime == 0 { + cloudbrain.Cloudbrain.EndTime = timeutil.TimeStamp(time.Now().Unix()) + } + cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) + if cloudbrain.Cloudbrain.Spec != nil { + if _, ok := cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { + if cloudbrain.Cloudbrain.Spec != nil { + cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = &cloudbrain.Cloudbrain + } + } + } + cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) if cloudbrain.Cloudbrain.StartTime == 0 { cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix @@ -202,7 +201,7 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter] = make(map[string]int) } if cloudbrain.Cloudbrain.Spec != nil { - if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) { + if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) && cloudbrain.Cloudbrain.DeletedAt.IsZero() { if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) @@ -246,19 +245,26 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be } } - return cloudBrainCenterCodeAndCardType + return cloudBrainCenterCodeAndCardType, cloudbrainMap } func CloudbrainUpdateHistoryData(ctx *context.Context) { beginTimeStr := ctx.QueryTrim("beginTime") endTimeStr := ctx.QueryTrim("endTime") - beginTime, _ := time.ParseInLocation("2006-01-02 15:04:05", beginTimeStr, time.Local) - endTime, _ := time.ParseInLocation("2006-01-02 15:04:05", endTimeStr, time.Local) - beginTimeUnix := timeutil.TimeStamp(beginTime.Unix()) - endTimeUnix := timeutil.TimeStamp(endTime.Unix()) + var count int64 + var err error + if beginTimeStr != "" && endTimeStr != "" { + beginTime, _ := time.ParseInLocation("2006-01-02 15:04:05", beginTimeStr, time.Local) + endTime, _ := time.ParseInLocation("2006-01-02 15:04:05", endTimeStr, time.Local) + if time.Now().Before(endTime) { + endTime = time.Now() + } + beginTimeUnix := timeutil.TimeStamp(beginTime.Unix()) + endTimeUnix := timeutil.TimeStamp(endTime.Unix()) - err := models.DeleteCloudbrainDurationStatistic(beginTimeUnix, endTimeUnix) - count := UpdateDurationStatisticHistoryData(beginTime, endTime) + err = models.DeleteCloudbrainDurationStatistic(beginTimeUnix, endTimeUnix) + count = UpdateDurationStatisticHistoryData(beginTime.Add(+1*time.Hour), endTime.Add(+1*time.Hour)) + } ctx.JSON(http.StatusOK, map[string]interface{}{ "message": 0, "count": count, diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go index de7bb454d..8f3182758 100755 --- a/routers/repo/grampus.go +++ b/routers/repo/grampus.go @@ -10,7 +10,6 @@ import ( "path" "strconv" "strings" - "time" "code.gitea.io/gitea/modules/urfs_client/urchin" "code.gitea.io/gitea/routers/response" @@ -77,8 +76,7 @@ func GrampusTrainJobNPUNew(ctx *context.Context) { func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) error { ctx.Data["PageIsCloudBrain"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName //get valid images diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index fabf7e555..01d2e2fa4 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -15,6 +15,8 @@ import ( "time" "unicode/utf8" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/services/cloudbrain/cloudbrainTask" "code.gitea.io/gitea/modules/dataset" @@ -128,8 +130,7 @@ func NotebookNew(ctx *context.Context) { func notebookNewDataPrepare(ctx *context.Context) error { ctx.Data["PageIsCloudBrain"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsUserAttachments(ctx.User.ID) @@ -239,9 +240,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm } if setting.ModelartsCD.Enabled { - err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec) + _, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs) } else { - err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, imageId, spec) + _, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs) } if err != nil { @@ -387,8 +388,31 @@ func NotebookDebug2(ctx *context.Context) { ctx.RenderWithErr(err.Error(), tplModelArtsNotebookIndex, nil) return } + if task.BootFile != "" { + ctx.Redirect(getFileUrl(result.Url, task.BootFile) + "?token=" + result.Token) + } else { + ctx.Redirect(result.Url + "?token=" + result.Token) + } - ctx.Redirect(result.Url + "?token=" + result.Token) +} + +func getFileUrl(url string, filename string) string { + middle := "" + if url[len(url)-3:] == "lab" || url[len(url)-4:] == "lab/" { + if url[len(url)-1] == '/' { + middle = "tree/" + } else { + middle = "/tree/" + } + } else { + if url[len(url)-1] == '/' { + middle = "lab/tree/" + } else { + middle = "/lab/tree/" + } + } + + return url + middle + path.Base(filename) } func NotebookRestart(ctx *context.Context) { @@ -420,7 +444,8 @@ func NotebookRestart(ctx *context.Context) { } else { if count >= 1 { log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) - errorMsg = "you have already a running or waiting task, can not create more" + resultCode = "2" + errorMsg = ctx.Tr("repo.cloudbrain.morethanonejob") break } } @@ -714,8 +739,7 @@ func trainJobNewDataPrepare(ctx *context.Context) error { // return //} - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -2351,8 +2375,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { ctx.Data["PageIsCloudBrain"] = true ctx.Data["newInference"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) diff --git a/routers/repo/repo.go b/routers/repo/repo.go index 2c8c2f45b..4919b2487 100644 --- a/routers/repo/repo.go +++ b/routers/repo/repo.go @@ -414,7 +414,9 @@ func Action(ctx *context.Context) { var err error switch ctx.Params(":action") { case "watch": - err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true) + err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true, models.ReceiveAllNotification) + case "watch_but_reject": + err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true, models.RejectAllNotification) case "unwatch": err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, false) case "star": diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 8ecca32e2..b8ea5b2e6 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -359,6 +359,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/all/dosearch/", routers.SearchApi) m.Post("/user/login/kanban", user.SignInPostAPI) m.Get("/home/term", routers.HomeTerm) + m.Get("/home/notice", routers.HomeNoticeTmpl) m.Get("/home/privacy", routers.HomePrivacy) m.Get("/extension/tuomin/upload", modelapp.ProcessImageUI) m.Post("/extension/tuomin/upload", reqSignIn, modelapp.ProcessImage) @@ -529,6 +530,7 @@ func RegisterRoutes(m *macaron.Macaron) { // r.Get("/feeds", binding.Bind(auth.FeedsForm{}), user.Feeds) m.Any("/activate", user.Activate, reqSignIn) m.Any("/activate_email", user.ActivateEmail) + m.Post("/update_email", bindIgnErr(auth.UpdateEmailForm{}), user.UpdateEmailPost) m.Get("/avatar/:username/:size", user.Avatar) m.Get("/email2user", user.Email2User) m.Get("/recover_account", user.ResetPasswd) @@ -1283,8 +1285,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/modelsafety", func() { m.Group("/:id", func() { - m.Get("/show", reqRepoCloudBrainWriter, repo.GetAiSafetyTaskTmpl) - m.Get("", reqRepoCloudBrainWriter, repo.GetAiSafetyTask) + m.Get("/show", reqRepoCloudBrainReader, repo.GetAiSafetyTaskTmpl) + m.Get("", reqRepoCloudBrainReader, repo.GetAiSafetyTask) m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.StopAiSafetyTask) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.DelAiSafetyTask) }) diff --git a/routers/user/auth.go b/routers/user/auth.go index 57ffb1710..3d74b6ddd 100755 --- a/routers/user/auth.go +++ b/routers/user/auth.go @@ -1413,6 +1413,34 @@ func SignUpPost(ctx *context.Context, cpt *captcha.Captcha, form auth.RegisterFo handleSignInFull(ctx, u, false, true) } +//update user emailAddress +func UpdateEmailPost(ctx *context.Context, form auth.UpdateEmailForm) { + newEmailAddress := ctx.Query("NewEmail") + if newEmailAddress == "" { + log.Error("please input the newEmail") + return + } + if used, _ := models.IsEmailUsed(newEmailAddress); used { + ctx.RenderWithErr(ctx.Tr("form.email_been_used"), TplActivate, &form) + return + } + user := ctx.User + email, err := models.GetEmailAddressByIDAndEmail(user.ID, user.Email) + if err != nil { + ctx.ServerError("GetEmailAddressByIDAndEmail failed", err) + return + } + err = email.UpdateEmailAddress(newEmailAddress) + if err != nil { + ctx.ServerError("UpdateEmailAddress failed", err) + return + } + ctx.Data["SignedUser.Email"] = newEmailAddress + ctx.User.Email = newEmailAddress + Activate(ctx) + +} + // Activate render activate user page func Activate(ctx *context.Context) { code := ctx.Query("code") diff --git a/services/cloudbrain/cloudbrainTask/notebook.go b/services/cloudbrain/cloudbrainTask/notebook.go new file mode 100644 index 000000000..6b2fcf707 --- /dev/null +++ b/services/cloudbrain/cloudbrainTask/notebook.go @@ -0,0 +1,362 @@ +package cloudbrainTask + +import ( + "fmt" + "net/http" + "path" + + "code.gitea.io/gitea/modules/modelarts" + "code.gitea.io/gitea/modules/modelarts_cd" + + "code.gitea.io/gitea/modules/git" + + "code.gitea.io/gitea/modules/cloudbrain" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/redis/redis_key" + "code.gitea.io/gitea/modules/redis/redis_lock" + "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/services/cloudbrain/resource" + "code.gitea.io/gitea/services/reward/point/account" + + "code.gitea.io/gitea/modules/setting" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + repo_service "code.gitea.io/gitea/services/repository" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" +) + +const NoteBookExtension = ".ipynb" + +func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption) { + + if ctx.Written() { + return + } + + if path.Ext(option.File) != NoteBookExtension { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_select_wrong"))) + return + } + + isNotebookFileExist, _ := isNoteBookFileExist(ctx, option) + if !isNotebookFileExist { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) + return + } + + sourceRepo, err := models.GetRepositoryByOwnerAndName(option.OwnerName, option.ProjectName) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) + return + } + + permission, err := models.GetUserRepoPermission(sourceRepo, ctx.User) + if err != nil { + log.Error("Get permission failed", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_no_right"))) + return + } + + if !permission.CanRead(models.UnitTypeCode) { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_no_right"))) + return + } + + //create repo if not exist + repo, err := models.GetRepositoryByName(ctx.User.ID, setting.FileNoteBook.ProjectName) + if repo == nil { + repo, err = repo_service.CreateRepository(ctx.User, ctx.User, models.CreateRepoOptions{ + Name: setting.FileNoteBook.ProjectName, + Alias: "", + Description: "", + IssueLabels: "", + Gitignores: "", + License: "", + Readme: "Default", + IsPrivate: false, + AutoInit: true, + DefaultBranch: "master", + }) + } + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo",setting.FileNoteBook.ProjectName))) + return + } + if option.Type <= 1 { + cloudBrainFileNoteBookCreate(ctx, option, repo, sourceRepo) + } else { + modelartsFileNoteBookCreate(ctx, option, repo, sourceRepo) + } + +} + +func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption, repo *models.Repository, sourceRepo *models.Repository) { + + displayJobName := cloudbrainService.GetDisplayJobName(ctx.User.Name) + jobName := util.ConvertDisplayJobNameToJobName(displayJobName) + jobType := string(models.JobTypeDebug) + + lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName)) + defer lock.UnLock() + isOk, err := lock.Lock(models.CloudbrainKeyDuration) + if !isOk { + log.Error("lock processed failed:%v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + + tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName) + if err == nil { + if len(tasks) != 0 { + log.Error("the job name did already exist", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + } else { + if !models.IsErrJobNotExist(err) { + log.Error("system error, %v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } + } + + count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, jobType) + if err != nil { + log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } else { + if count >= 1 { + log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK,models.BaseMessageApi{ + Code: 2, + Message: ctx.Tr("repo.cloudbrain.morethanonejob"), + }) + return + } + } + + errStr := uploadCodeFile(sourceRepo, getCodePath(jobName), option.BranchName, option.File, jobName) + if errStr != "" { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) + return + } + command := cloudbrain.GetCloudbrainDebugCommand() + specId := setting.FileNoteBook.SpecIdGPU + if option.Type == 0 { + specId = setting.FileNoteBook.SpecIdCPU + } + spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{ + JobType: models.JobType(jobType), + ComputeResource: models.GPU, + Cluster: models.OpenICluster, + AiCenterCode: models.AICenterOfCloudBrainOne}) + if err != nil || spec == nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.wrong_specification"))) + return + } + + if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) { + log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance"))) + return + } + ctx.Repo = &context.Repository{ + Repository: repo, + } + + req := cloudbrain.GenerateCloudBrainTaskReq{ + Ctx: ctx, + DisplayJobName: displayJobName, + JobName: jobName, + Image: setting.FileNoteBook.ImageGPU, + Command: command, + Uuids: "", + DatasetNames: "", + DatasetInfos: nil, + CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), + ModelPath: storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"), + BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), + Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), + BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), + JobType: jobType, + Description: getDescription(option), + BranchName: option.BranchName, + BootFile: option.File, + Params: "{\"parameter\":[]}", + CommitID: "", + BenchmarkTypeID: 0, + BenchmarkChildTypeID: 0, + ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"), + Spec: spec, + } + + jobId, err := cloudbrain.GenerateTask(req) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) + return + } + ctx.JSON(http.StatusOK, models.BaseMessageApi{ + Code: 0, + Message: jobId, + }) + +} + +func getCodePath(jobName string) string { + return setting.JobPath + jobName + cloudbrain.CodeMountPath +} + +func getDescription(option api.CreateFileNotebookJobOption) string { + return option.OwnerName + "/" + option.ProjectName + "/" + option.File +} + +func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption, repo *models.Repository, sourceRepo *models.Repository) { + displayJobName := cloudbrainService.GetDisplayJobName(ctx.User.Name) + jobName := util.ConvertDisplayJobNameToJobName(displayJobName) + + lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeDebug), displayJobName)) + isOk, err := lock.Lock(models.CloudbrainKeyDuration) + if !isOk { + log.Error("lock processed failed:%v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + defer lock.UnLock() + + count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeDebug)) + + if err != nil { + log.Error("GetCloudbrainNotebookCountByUserID failed:%v", err, ctx.Data["MsgID"]) + + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } else { + if count >= 1 { + log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK,models.BaseMessageApi{ + Code: 2, + Message: ctx.Tr("repo.cloudbrain.morethanonejob"), + }) + return + } + } + + tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeDebug), displayJobName) + if err == nil { + if len(tasks) != 0 { + log.Error("the job name did already exist", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + } else { + if !models.IsErrJobNotExist(err) { + log.Error("system error, %v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } + } + + err = downloadCode(sourceRepo, getCodePath(jobName), option.BranchName) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) + return + } + + var aiCenterCode = models.AICenterOfCloudBrainTwo + var specId = setting.FileNoteBook.SpecIdNPU + if setting.ModelartsCD.Enabled { + aiCenterCode = models.AICenterOfChengdu + specId = setting.FileNoteBook.SpecIdNPUCD + } + spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{ + JobType: models.JobTypeDebug, + ComputeResource: models.NPU, + Cluster: models.OpenICluster, + AiCenterCode: aiCenterCode}) + if err != nil || spec == nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.wrong_specification"))) + return + } + if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) { + log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance"))) + return + } + ctx.Repo = &context.Repository{ + Repository: repo, + } + + var jobId string + if setting.ModelartsCD.Enabled { + jobId, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPUCD, spec, option.File,modelarts.AutoStopDurationMs/4) + } else { + jobId, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPU, spec, option.File,modelarts.AutoStopDurationMs/4) + } + + if err != nil { + log.Error("GenerateNotebook2 failed, %v", err, ctx.Data["MsgID"]) + + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) + + return + } + + ctx.JSON(http.StatusOK, models.BaseMessageApi{ + Code: 0, + Message: jobId, + }) + +} + +func isNoteBookFileExist(ctx *context.Context, option api.CreateFileNotebookJobOption) (bool, error) { + repoPathOfNoteBook := models.RepoPath(option.OwnerName, option.ProjectName) + + gitRepoOfNoteBook, err := git.OpenRepository(repoPathOfNoteBook) + if err != nil { + log.Error("RepoRef Invalid repo "+repoPathOfNoteBook, err.Error()) + return false, err + } + // We opened it, we should close it + defer func() { + // If it's been set to nil then assume someone else has closed it. + if gitRepoOfNoteBook != nil { + gitRepoOfNoteBook.Close() + } + }() + fileExist, err := fileExists(gitRepoOfNoteBook, option.File, option.BranchName) + if err != nil || !fileExist { + log.Error("Get file error:", err, ctx.Data["MsgID"]) + + return false, err + } + return true, nil +} + +func uploadCodeFile(repo *models.Repository, codePath string, branchName string, filePath string, jobName string) string { + err := downloadCode(repo, codePath, branchName) + if err != nil { + return "cloudbrain.load_code_failed" + } + + err = uploadOneFileToMinio(codePath, filePath, jobName, cloudbrain.CodeMountPath+"/") + if err != nil { + return "cloudbrain.load_code_failed" + } + return "" +} + +func fileExists(gitRepo *git.Repository, path string, branch string) (bool, error) { + + commit, err := gitRepo.GetBranchCommit(branch) + if err != nil { + return false, err + } + if _, err := commit.GetTreeEntryByPath(path); err != nil { + return false, err + } + return true, nil +} diff --git a/services/cloudbrain/cloudbrainTask/sync_status.go b/services/cloudbrain/cloudbrainTask/sync_status.go index 67dc4d3b7..973b9bbc2 100644 --- a/services/cloudbrain/cloudbrainTask/sync_status.go +++ b/services/cloudbrain/cloudbrainTask/sync_status.go @@ -1,20 +1,21 @@ package cloudbrainTask import ( - "net/http" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/cloudbrain" - "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/setting" + "net/http" + "strconv" ) var noteBookOKMap = make(map[int64]int, 20) +var noteBookFailMap = make(map[int64]int, 20) -//if a task notebook url can get two times, the notebook can browser. +//if a task notebook url can get successfulCount times, the notebook can browser. const successfulCount = 3 +const maxSuccessfulCount=10 func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error) { jobResult, err := cloudbrain.GetJob(task.JobID) @@ -62,21 +63,29 @@ func isNoteBookReady(task *models.Cloudbrain) bool { return true } noteBookUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName - r := httplib.Get(noteBookUrl) - res, err := r.Response() + res,err := http.Get(noteBookUrl) if err != nil { return false } + log.Info("notebook success count:"+strconv.Itoa(noteBookOKMap[task.ID])+",fail count:"+strconv.Itoa(noteBookFailMap[task.ID])) if res.StatusCode == http.StatusOK { count := noteBookOKMap[task.ID] - if count < successfulCount-1 { + if count==0{ //如果是第一次成功,把失败数重置为0 + noteBookFailMap[task.ID]=0 + } + + if count < successfulCount-1 || (noteBookFailMap[task.ID]==0 && count < maxSuccessfulCount-1) { noteBookOKMap[task.ID] = count + 1 return false } else { + log.Info("notebook success count:"+strconv.Itoa(count)+",fail count:"+strconv.Itoa(noteBookFailMap[task.ID])) delete(noteBookOKMap, task.ID) + delete(noteBookFailMap, task.ID) return true } + }else{ + noteBookFailMap[task.ID]+=1 } return false diff --git a/services/cloudbrain/cloudbrainTask/train.go b/services/cloudbrain/cloudbrainTask/train.go index 8e4673d66..00d01a7ce 100644 --- a/services/cloudbrain/cloudbrainTask/train.go +++ b/services/cloudbrain/cloudbrainTask/train.go @@ -810,6 +810,18 @@ func uploadCodeToMinio(codePath, jobName, parentDir string) error { return nil } +func uploadOneFileToMinio(codePath, filePath, jobName, parentDir string) error { + destObject := setting.CBCodePathPrefix + jobName + parentDir + path.Base(filePath) + sourceFile := codePath + "/" + filePath + err := storage.Attachments.UploadObject(destObject, sourceFile) + if err != nil { + log.Error("UploadObject(%s) failed: %s", filePath, err.Error()) + return err + } + return nil + +} + func readDir(dirname string) ([]os.FileInfo, error) { f, err := os.Open(dirname) if err != nil { diff --git a/services/cloudbrain/util.go b/services/cloudbrain/util.go index dc9177ecf..0a3096e3f 100644 --- a/services/cloudbrain/util.go +++ b/services/cloudbrain/util.go @@ -1,7 +1,11 @@ package cloudbrain import ( + "regexp" + "strconv" "strings" + "time" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" @@ -33,6 +37,28 @@ func GetAiCenterShow(aiCenter string, ctx *context.Context) string { } +func GetDisplayJobName(username string) string { + t := time.Now() + return jobNamePrefixValid(cutString(username, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] +} + +func cutString(str string, lens int) string { + if len(str) < lens { + return str + } + return str[:lens] +} + +func jobNamePrefixValid(s string) string { + lowStr := strings.ToLower(s) + re := regexp.MustCompile(`[^a-z0-9_\\-]+`) + + removeSpecial := re.ReplaceAllString(lowStr, "") + + re = regexp.MustCompile(`^[_\\-]+`) + return re.ReplaceAllString(removeSpecial, "") +} + func GetAiCenterInfoByCenterCode(aiCenterCode string) *setting.C2NetSequenceInfo { if setting.AiCenterCodeAndNameMapInfo != nil { if info, ok := setting.AiCenterCodeAndNameMapInfo[aiCenterCode]; ok { diff --git a/services/repository/repository.go b/services/repository/repository.go index 02928d855..e9a8570d5 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -106,18 +106,13 @@ func GetRecommendCourseKeyWords() ([]string, error) { } -func GetRecommendRepoFromPromote(filename string) ([]map[string]interface{}, error) { +func GetRecommendRepoFromPromote(repoMap []map[string]string) ([]map[string]interface{}, error) { resultRepo := make([]map[string]interface{}, 0) - url := setting.RecommentRepoAddr + filename - result, err := RecommendFromPromote(url) - - if err != nil { - - return resultRepo, err - } //resultRepo := make([]*models.Repository, 0) - for _, repoName := range result { + for _, record := range repoMap { + repoName := record["project_url"] + //log.Info("repoName=" + repoName + " tmpIndex1=" + fmt.Sprint(tmpIndex1) + " len(repoName)=" + fmt.Sprint(len(repoName))) tmpIndex := strings.Index(repoName, "/") if tmpIndex == -1 { log.Info("error repo name format.") @@ -130,7 +125,8 @@ func GetRecommendRepoFromPromote(filename string) ([]map[string]interface{}, err repoMap["ID"] = fmt.Sprint(repo.ID) repoMap["Name"] = repo.Name repoMap["Alias"] = repo.Alias - + repoMap["Label"] = record["class"] + repoMap["Label_en"] = record["class_en"] repoMap["OwnerName"] = repo.OwnerName repoMap["NumStars"] = repo.NumStars repoMap["NumForks"] = repo.NumForks diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl index 20e704a4d..94f80c0fa 100755 --- a/templates/admin/cloudbrain/list.tmpl +++ b/templates/admin/cloudbrain/list.tmpl @@ -238,6 +238,7 @@ {{$.i18n.Tr "repo.debug"}} {{else}} + {{if not .BootFile}} {{end}} + {{end}}
{{end}} diff --git a/templates/base/footer_content.tmpl b/templates/base/footer_content.tmpl index b4c8518c4..3a35e69a3 100755 --- a/templates/base/footer_content.tmpl +++ b/templates/base/footer_content.tmpl @@ -1,15 +1,17 @@ -