{{.i18n.Tr "repo.images.task_delete_confirm"}}
+diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 98b23b552..0465faf9a 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -1,13 +1,14 @@ package models import ( - "code.gitea.io/gitea/modules/util" "encoding/json" "fmt" "strconv" "strings" "time" + "code.gitea.io/gitea/modules/util" + "xorm.io/builder" "xorm.io/xorm" @@ -111,7 +112,7 @@ type Cloudbrain struct { SubTaskName string ContainerID string ContainerIp string - CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` Duration int64 `xorm:"DEFAULT 0"` //运行时长 单位秒 TrainJobDuration string `xorm:"DEFAULT '00:00:00'"` @@ -184,6 +185,12 @@ func (task *Cloudbrain) ComputeAndSetDuration() { task.TrainJobDuration = ConvertDurationToStr(d) } +func (task *Cloudbrain) CorrectCreateUnix() { + if task.StartTime > 0 && task.CreatedUnix > task.StartTime { + task.CreatedUnix = task.StartTime + } +} + func (task *Cloudbrain) IsTerminal() bool { status := task.Status return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled) || status == string(ModelArtsStopped) || status == string(JobStopped) || status == string(JobFailed) || status == string(JobSucceeded) @@ -218,9 +225,22 @@ func ParseAndSetDurationFromCloudBrainOne(result JobResultPayload, task *Cloudbr task.EndTime = timeutil.TimeStamp(result.JobStatus.CompletedTime / 1000) } } + task.CorrectCreateUnix() task.ComputeAndSetDuration() } +func ParseAndSetDurationFromModelArtsNotebook(result *GetNotebook2Result, job *Cloudbrain) { + if job.StartTime == 0 && result.Lease.UpdateTime > 0 { + job.StartTime = timeutil.TimeStamp(result.Lease.UpdateTime / 1000) + } + job.Status = result.Status + if job.EndTime == 0 && IsModelArtsDebugJobTerminal(job.Status) { + job.EndTime = timeutil.TimeStampNow() + } + job.CorrectCreateUnix() + job.ComputeAndSetDuration() +} + type CloudbrainInfo struct { Cloudbrain `xorm:"extends"` User `xorm:"extends"` @@ -306,6 +326,7 @@ type CloudbrainsOptions struct { IsLatestVersion string JobTypeNot bool NeedRepoInfo bool + RepoIDList []int64 } type TaskPod struct { @@ -546,13 +567,21 @@ type PoolInfo struct { PoolType string `json:"pool_type"` } -type CommitImageParams struct { +type CommitImageCloudBrainParams struct { Ip string `json:"ip"` TaskContainerId string `json:"taskContainerId"` ImageTag string `json:"imageTag"` ImageDescription string `json:"imageDescription"` } +type CommitImageParams struct { + CommitImageCloudBrainParams + IsPrivate bool + Topics []string + CloudBrainType int + UID int64 +} + type CommitImageResult struct { Code string `json:"code"` Msg string `json:"msg"` @@ -1178,6 +1207,12 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { ) } } + if len(opts.RepoIDList) > 0 { + cond = cond.And( + builder.In("cloudbrain.repo_id", opts.RepoIDList), + ) + + } var count int64 var err error @@ -1464,7 +1499,7 @@ func UpdateTrainJobVersion(job *Cloudbrain) error { func updateJobTrainVersion(e Engine, job *Cloudbrain) error { var sess *xorm.Session sess = e.Where("job_id = ? AND version_name=?", job.JobID, job.VersionName) - _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time").Update(job) + _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time", "created_unix").Update(job) return err } @@ -1553,7 +1588,7 @@ func UpdateInferenceJob(job *Cloudbrain) error { func updateInferenceJob(e Engine, job *Cloudbrain) error { var sess *xorm.Session sess = e.Where("job_id = ?", job.JobID) - _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time").Update(job) + _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time", "created_unix").Update(job) return err } func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) { @@ -1580,3 +1615,64 @@ func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) { return nil } +func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { + sess := x.NewSession() + defer sess.Close() + var cond = builder.NewCond() + if (opts.Type) >= 0 { + cond = cond.And( + builder.Eq{"cloudbrain.type": opts.Type}, + ) + } + + var count int64 + var err error + condition := "cloudbrain.user_id = `user`.id" + if len(opts.Keyword) == 0 { + count, err = sess.Where(cond).Count(new(Cloudbrain)) + } else { + lowerKeyWord := strings.ToLower(opts.Keyword) + + cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) + count, err = sess.Table(&Cloudbrain{}).Where(cond). + Join("left", "`user`", condition).Count(new(CloudbrainInfo)) + + } + + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + if opts.Page >= 0 && opts.PageSize > 0 { + var start int + if opts.Page == 0 { + start = 0 + } else { + start = (opts.Page - 1) * opts.PageSize + } + sess.Limit(opts.PageSize, start) + } + + sess.OrderBy("cloudbrain.created_unix DESC") + cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) + if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). + Join("left", "`user`", condition). + Find(&cloudbrains); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + if opts.NeedRepoInfo { + var ids []int64 + for _, task := range cloudbrains { + ids = append(ids, task.RepoID) + } + repositoryMap, err := GetRepositoriesMapByIDs(ids) + if err == nil { + for _, task := range cloudbrains { + task.Repo = repositoryMap[task.RepoID] + } + } + + } + + return cloudbrains, count, nil +} diff --git a/models/cloudbrain_image.go b/models/cloudbrain_image.go new file mode 100644 index 000000000..c88db0f67 --- /dev/null +++ b/models/cloudbrain_image.go @@ -0,0 +1,583 @@ +package models + +import ( + "fmt" + "strings" + "unicode/utf8" + + "xorm.io/builder" + + "code.gitea.io/gitea/modules/timeutil" +) + +const RECOMMOND_TYPE = 5 +const NORMAL_TYPE = 0 +const IMAGE_STATUS_COMMIT = 0 +const IMAGE_STATUS_SUCCESS = 1 +const IMAGE_STATUS_Failed = 2 + +type Image struct { + ID int64 `xorm:"pk autoincr" json:"id"` + Type int `xorm:"INDEX NOT NULL" json:"type"` //0 normal 5官方推荐,中间值保留为后续扩展 + CloudbrainType int `xorm:"INDEX NOT NULL" json:"cloudbrainType"` //0 云脑一 1云脑二 + UID int64 `xorm:"INDEX NOT NULL" json:"uid"` + IsPrivate bool `xorm:"INDEX NOT NULL" json:"isPrivate"` + Tag string `xorm:"varchar(100) UNIQUE" json:"tag"` + Description string `xorm:"varchar(765)" json:"description"` + Topics []string `xorm:"TEXT JSON" json:"topics"` + Place string `xorm:"varchar(300)" json:"place"` + NumStars int `xorm:"NOT NULL DEFAULT 0" json:"numStars"` + IsStar bool `xorm:"-" json:"isStar"` + UserName string `xorm:"-" json:"userName"` + RelAvatarLink string `xorm:"-" json:"relAvatarLink"` + Status int `xorm:"INDEX NOT NULL DEFAULT 0" json:"status"` //0代表正在提交,1提交完成,2提交失败 + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created" json:"createdUnix"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated" json:"updatedUnix"` +} + +type ImageList []*Image + +type ImageStar struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"UNIQUE(s)"` + ImageID int64 `xorm:"UNIQUE(s)"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +type ImageTopic struct { + ID int64 + Name string `xorm:"UNIQUE VARCHAR(105)"` + ImageCount int + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` +} + +type ImageTopicRelation struct { + ImageID int64 `xorm:"UNIQUE(s)"` + TopicID int64 `xorm:"UNIQUE(s)"` +} + +type SearchImageOptions struct { + Keyword string + UID int64 + Status int + IncludePublicOnly bool + IncludeOfficialOnly bool + IncludePrivateOnly bool + IncludeStarByMe bool + IncludeCustom bool + IncludeOwnerOnly bool + Topics string + ListOptions + SearchOrderBy +} +type ErrorImageTagExist struct { + Tag string +} + +type ErrorImageCommitting struct { + Tag string +} + +type ImagesPageResult struct { + Count int64 `json:"count"` + Images []*Image `json:"images"` +} + +func (err ErrorImageTagExist) Error() string { + return fmt.Sprintf("Image already exists [tag: %s]", err.Tag) +} + +func (err ErrorImageCommitting) Error() string { + return fmt.Sprintf("Image already exists [tag: %s]", err.Tag) +} + +type ErrImageNotExist struct { + ID int64 + Tag string +} + +func (err ErrImageNotExist) Error() string { + return fmt.Sprintf("Image does not exist [id: %d] [tag: %s]", err.ID, err.Tag) +} + +func IsErrorImageCommitting(err error) bool { + _, ok := err.(ErrorImageCommitting) + return ok +} + +func IsErrImageNotExist(err error) bool { + _, ok := err.(ErrImageNotExist) + return ok +} + +func IsErrImageTagExist(err error) bool { + _, ok := err.(ErrorImageTagExist) + return ok +} + +func IsImageExist(tag string) (bool, error) { + return x.Exist(&Image{ + Tag: tag, + }) +} + +func IsImageExistByUser(tag string, uid int64) (bool, error) { + return x.Exist(&Image{ + Tag: tag, + UID: uid, + Status: IMAGE_STATUS_SUCCESS, + }) +} + +type FindImageTopicOptions struct { + ListOptions + ImageID int64 + Keyword string +} + +func (opts *FindImageTopicOptions) toConds() builder.Cond { + var cond = builder.NewCond() + if opts.ImageID > 0 { + cond = cond.And(builder.Eq{"image_topic_relation.image_id": opts.ImageID}) + } + + if opts.Keyword != "" { + cond = cond.And(builder.Like{"image_topic.name", strings.ToLower(opts.Keyword)}) + } + + return cond +} + +func GetImageByID(id int64) (*Image, error) { + rel := new(Image) + has, err := x. + ID(id). + Get(rel) + if err != nil { + return nil, err + } else if !has { + return nil, ErrImageNotExist{ID: id} + } + + return rel, nil +} + +func GetImageByTag(tag string) (*Image, error) { + + image := &Image{Tag: tag} + has, err := x. + Get(image) + if err != nil { + return nil, err + } else if !has { + return nil, ErrImageNotExist{Tag: tag} + } + + return image, nil +} + +func SanitizeAndValidateImageTopics(topics []string) (validTopics []string, invalidTopics []string) { + validTopics = make([]string, 0) + mValidTopics := make(map[string]struct{}) + invalidTopics = make([]string, 0) + + for _, topic := range topics { + topic = strings.TrimSpace(strings.ToLower(topic)) + // ignore empty string + if len(topic) == 0 { + continue + } + // ignore same topic twice + if _, ok := mValidTopics[topic]; ok { + continue + } + if utf8.RuneCountInString(topic) <= 35 { + validTopics = append(validTopics, topic) + mValidTopics[topic] = struct{}{} + } else { + invalidTopics = append(invalidTopics, topic) + } + } + + return validTopics, invalidTopics +} +func FindImageTopics(opts *FindImageTopicOptions) (topics []*ImageTopic, err error) { + sess := x.Select("image_topic.*").Where(opts.toConds()) + if opts.ImageID > 0 { + sess.Join("INNER", "image_topic_relation", "image_topic_relation.topic_id = image_topic.id") + } + if opts.PageSize != 0 && opts.Page != 0 { + sess = opts.setSessionPagination(sess) + } + return topics, sess.Desc("image_topic.image_count").Find(&topics) +} + +func SaveImageTopics(imageID int64, topicNames ...string) error { + topics, err := FindImageTopics(&FindImageTopicOptions{ + ImageID: imageID, + }) + if err != nil { + return err + } + + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { + return err + } + + var addedTopicNames []string + for _, topicName := range topicNames { + if strings.TrimSpace(topicName) == "" { + continue + } + + var found bool + for _, t := range topics { + if strings.EqualFold(topicName, t.Name) { + found = true + break + } + } + if !found { + addedTopicNames = append(addedTopicNames, topicName) + } + } + + var removeTopics []*ImageTopic + for _, t := range topics { + var found bool + for _, topicName := range topicNames { + if strings.EqualFold(topicName, t.Name) { + found = true + break + } + } + if !found { + removeTopics = append(removeTopics, t) + } + } + + for _, topicName := range addedTopicNames { + _, err := addTopicByNameToImage(sess, imageID, topicName) + if err != nil { + return err + } + } + + for _, topic := range removeTopics { + err := removeTopicFromImage(sess, imageID, topic) + if err != nil { + return err + } + } + + topicNames = make([]string, 0, 25) + if err := sess.Table("image_topic").Cols("name"). + Join("INNER", "image_topic_relation", "image_topic_relation.topic_id = image_topic.id"). + Where("image_topic_relation.image_id = ?", imageID).Desc("image_topic.image_count").Find(&topicNames); err != nil { + return err + } + + if _, err := sess.ID(imageID).Cols("topics").Update(&Image{ + Topics: topicNames, + }); err != nil { + return err + } + + return sess.Commit() +} + +func addTopicByNameToImage(e Engine, imageID int64, topicName string) (*ImageTopic, error) { + var topic ImageTopic + has, err := e.Where("name = ?", topicName).Get(&topic) + if err != nil { + return nil, err + } + if !has { + topic.Name = topicName + topic.ImageCount = 1 + if _, err := e.Insert(&topic); err != nil { + return nil, err + } + } else { + topic.ImageCount++ + if _, err := e.ID(topic.ID).Cols("image_count").Update(&topic); err != nil { + return nil, err + } + } + + if _, err := e.Insert(&ImageTopicRelation{ + ImageID: imageID, + TopicID: topic.ID, + }); err != nil { + return nil, err + } + + return &topic, nil +} + +func removeTopicFromImage(e Engine, imageId int64, topic *ImageTopic) error { + topic.ImageCount-- + if _, err := e.ID(topic.ID).Cols("image_count").Update(topic); err != nil { + return err + } + + if _, err := e.Delete(&ImageTopicRelation{ + ImageID: imageId, + TopicID: topic.ID, + }); err != nil { + return err + } + + return nil +} + +func SearchImage(opts *SearchImageOptions) (ImageList, int64, error) { + cond := SearchImageCondition(opts) + return SearchImageByCondition(opts, cond) +} + +func SearchImageCondition(opts *SearchImageOptions) builder.Cond { + var cond = builder.NewCond() + + if len(opts.Keyword) > 0 { + + var subQueryCond = builder.NewCond() + for _, v := range strings.Split(opts.Keyword, ",") { + + subQueryCond = subQueryCond.Or(builder.Like{"LOWER(image_topic.name)", strings.ToLower(v)}) + + } + subQuery := builder.Select("image_topic_relation.image_id").From("image_topic_relation"). + Join("INNER", "image_topic", "image_topic.id = image_topic_relation.topic_id"). + Where(subQueryCond). + GroupBy("image_topic_relation.image_id") + var keywordCond = builder.In("id", subQuery) + + var likes = builder.NewCond() + for _, v := range strings.Split(opts.Keyword, ",") { + likes = likes.Or(builder.Like{"LOWER(tag)", strings.ToLower(v)}) + + likes = likes.Or(builder.Like{"LOWER(description)", strings.ToLower(v)}) + + } + keywordCond = keywordCond.Or(likes) + + cond = cond.And(keywordCond) + + } + if len(opts.Topics) > 0 { //标签精确匹配 + var subQueryCond = builder.NewCond() + for _, v := range strings.Split(opts.Keyword, ",") { + + subQueryCond = subQueryCond.Or(builder.Eq{"LOWER(image_topic.name)": strings.ToLower(v)}) + subQuery := builder.Select("image_topic_relation.image_id").From("image_topic_relation"). + Join("INNER", "image_topic", "image_topic.id = image_topic_relation.topic_id"). + Where(subQueryCond). + GroupBy("image_topic_relation.image_id") + var topicCond = builder.In("id", subQuery) + cond = cond.And(topicCond) + } + } + + if opts.IncludePublicOnly { + cond = cond.And(builder.Eq{"is_private": false}) + } + + if opts.IncludePrivateOnly { + cond = cond.And(builder.Eq{"is_private": true}) + } + + if opts.IncludeOwnerOnly { + + cond = cond.And(builder.Eq{"uid": opts.UID}) + } + if opts.IncludeOfficialOnly { + cond = cond.And(builder.Eq{"type": RECOMMOND_TYPE}) + } + if opts.Status >= 0 { + cond = cond.And(builder.Eq{"status": opts.Status}) + } + + if opts.IncludeStarByMe { + + subQuery := builder.Select("image_id").From("image_star"). + Where(builder.Eq{"uid": opts.UID}) + var starCond = builder.In("id", subQuery) + cond = cond.And(starCond) + + } + + return cond +} + +func SearchImageByCondition(opts *SearchImageOptions, cond builder.Cond) (ImageList, int64, error) { + if opts.Page <= 0 { + opts.Page = 1 + } + + var err error + sess := x.NewSession() + defer sess.Close() + + images := make(ImageList, 0, opts.PageSize) + count, err := sess.Where(cond).Count(new(Image)) + + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + sess.Where(cond).OrderBy(opts.SearchOrderBy.String()) + + if opts.PageSize > 0 { + sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + } + if err = sess.Find(&images); err != nil { + return nil, 0, fmt.Errorf("Images: %v", err) + } + + if err = images.loadAttributes(sess, opts.UID); err != nil { + return nil, 0, fmt.Errorf("LoadAttributes: %v", err) + } + + return images, count, nil +} + +func (images ImageList) loadAttributes(e Engine, uid int64) error { + if len(images) == 0 { + return nil + } + + set := make(map[int64]struct{}) + + for i := range images { + set[images[i].UID] = struct{}{} + } + + // Load creators. + users := make(map[int64]*User, len(set)) + if err := e.Table("\"user\""). + Cols("name", "lower_name", "avatar", "email"). + Where("id > 0"). + In("id", keysInt64(set)). + Find(&users); err != nil { + return fmt.Errorf("find users: %v", err) + } + + for i := range images { + images[i].UserName = users[images[i].UID].Name + images[i].RelAvatarLink = users[images[i].UID].RelAvatarLink() + if uid == -1 { + images[i].IsStar = false + } else { + images[i].IsStar = isImageStaring(e, uid, images[i].ID) + } + } + + return nil +} + +func GetCommittingImageCount() int { + + total, err := x.Where("status =?", 0).Count(new(Image)) + + if err != nil { + return 0 + } + return int(total) +} + +func CreateLocalImage(image *Image) error { + + _, err := x.Insert(image) + return err +} + +func UpdateLocalImage(image *Image) error { + + _, err := x.ID(image.ID).Cols("description", "is_private", "status").Update(image) + return err +} + +func UpdateLocalImageStatus(image *Image) error { + + _, err := x.ID(image.ID).Cols("status").Update(image) + return err +} + +func DeleteLocalImage(id int64) error { + image := new(Image) + _, err := x.ID(id).Delete(image) + return err +} + +//star or unstar Image +func StarImage(userID, imageID int64, star bool) error { + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { + return err + } + + if star { + if isImageStaring(sess, userID, imageID) { + return nil + } + + if _, err := sess.Insert(&ImageStar{UID: userID, ImageID: imageID}); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `image` SET num_stars = num_stars + 1 WHERE id = ?", imageID); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `user` SET num_image_stars = num_image_stars + 1 WHERE id = ?", userID); err != nil { + return err + } + } else { + if !isImageStaring(sess, userID, imageID) { + return nil + } + + if _, err := sess.Delete(&ImageStar{0, userID, imageID, 0}); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `image` SET num_stars = num_stars - 1 WHERE id = ?", imageID); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `user` SET num_image_stars = num_image_stars - 1 WHERE id = ?", userID); err != nil { + return err + } + } + + return sess.Commit() +} + +func IsImageStaring(userID, datasetID int64) bool { + return isImageStaring(x, userID, datasetID) + +} + +func isImageStaring(e Engine, userID, imageID int64) bool { + has, _ := e.Get(&ImageStar{0, userID, imageID, 0}) + return has +} +func RecommendImage(imageId int64, recommond bool) error { + + image := Image{Type: getRecommondType(recommond)} + _, err := x.ID(imageId).Cols("type").Update(image) + return err +} + +func getRecommondType(recommond bool) int { + if recommond { + + return RECOMMOND_TYPE + } else { + return NORMAL_TYPE + } + +} diff --git a/models/models.go b/models/models.go index 2ec61941d..2a2e119fb 100755 --- a/models/models.go +++ b/models/models.go @@ -131,6 +131,10 @@ func init() { new(Dataset), new(DatasetStar), new(Cloudbrain), + new(Image), + new(ImageStar), + new(ImageTopic), + new(ImageTopicRelation), new(FileChunk), new(BlockChain), new(RecommendOrg), diff --git a/models/org.go b/models/org.go index 28a6701c5..2a6528023 100755 --- a/models/org.go +++ b/models/org.go @@ -193,22 +193,22 @@ func (org *User) getOrgStatistics() (int, error) { } func FindTopNStarsOrgs(n int) ([]*OrgScore, error) { - sql := "select a.id,sum(b.num_stars) score from \"user\" a ,repository b where a.id=b.owner_id and a.type=1 group by a.id order by score desc limit " + strconv.Itoa(n) + sql := "select a.id,sum(b.num_stars) score from \"user\" a ,repository b where a.id=b.owner_id and a.type=1 and a.visibility=0 group by a.id order by score desc limit " + strconv.Itoa(n) return findTopNOrgs(sql) } func FindTopNMembersOrgs(n int) ([]*OrgScore, error) { sql := "select id, count(user_id) score from" + - " (select org_id as id, uid as user_id from org_user " + + " (select org_id as id, uid as user_id from org_user o, \"user\" u where o.org_id=u.id and u.visibility=0 " + "union select a.id,b.user_id from \"user\" a,collaboration b,repository c " + - "where a.type=1 and a.id=c.owner_id and b.repo_id=c.id) d " + + "where a.type=1 and a.visibility=0 and a.id=c.owner_id and b.repo_id=c.id) d " + "group by id order by score desc limit " + strconv.Itoa(n) return findTopNOrgs(sql) } func FindTopNOpenIOrgs(n int) ([]*OrgScore, error) { - sql := "select org_id id,num_score score from org_statistic order by num_score desc limit " + strconv.Itoa(n) + sql := "select org_id id,num_score score from org_statistic a, \"user\" b where a.org_id=b.id and b.visibility=0 order by num_score desc limit " + strconv.Itoa(n) return findTopNOrgs(sql) } diff --git a/models/user.go b/models/user.go index 71885aeb1..7d4c8ce34 100755 --- a/models/user.go +++ b/models/user.go @@ -157,6 +157,7 @@ type User struct { NumFollowing int `xorm:"NOT NULL DEFAULT 0"` NumStars int NumDatasetStars int `xorm:"NOT NULL DEFAULT 0"` + NumImageStars int `xorm:"NOT NULL DEFAULT 0"` NumRepos int // For organization diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 65ce642d5..ca8c5071f 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -246,7 +246,7 @@ func QueryUserStaticDataByTableName(start int, pageSize int, tableName string, q } log.Info("query return total:" + fmt.Sprint(allCount)) userBusinessAnalysisAllList := make([]*UserBusinessAnalysisAll, 0) - if err := statictisSess.Table(tableName).Where(cond).OrderBy("commit_count desc,id desc").Limit(pageSize, start). + if err := statictisSess.Table(tableName).Where(cond).OrderBy("user_index desc,id desc").Limit(pageSize, start). Find(&userBusinessAnalysisAllList); err != nil { return nil, 0 } @@ -448,6 +448,9 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS var indexTotal int64 indexTotal = 0 insertCount := 0 + userIndexMap := make(map[int64]float64, 0) + maxUserIndex := 0.0 + minUserIndex := 100000000.0 dateRecordBatch := make([]UserBusinessAnalysisAll, 0) for { sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) @@ -494,7 +497,13 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) - + userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex + if maxUserIndex < dateRecordAll.UserIndex { + maxUserIndex = dateRecordAll.UserIndex + } + if minUserIndex > dateRecordAll.UserIndex { + minUserIndex = dateRecordAll.UserIndex + } dateRecordBatch = append(dateRecordBatch, dateRecordAll) if len(dateRecordBatch) >= BATCH_INSERT_SIZE { insertTable(dateRecordBatch, tableName, statictisSess) @@ -523,9 +532,22 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS } } + //normalization + for k, v := range userIndexMap { + tmpResult := (v - minUserIndex) / (maxUserIndex - minUserIndex) + if tmpResult > 0.99 { + tmpResult = 0.99 + } + updateUserIndex(tableName, statictisSess, k, tmpResult) + } log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount)) } +func updateUserIndex(tableName string, statictisSess *xorm.Session, userId int64, userIndex float64) { + updateSql := "UPDATE public." + tableName + " set user_index=" + fmt.Sprint(userIndex*100) + " where id=" + fmt.Sprint(userId) + statictisSess.Exec(updateSql) +} + func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, statictisSess *xorm.Session) { insertBatchSql := "INSERT INTO public." + tableName + diff --git a/modules/auth/cloudbrain.go b/modules/auth/cloudbrain.go index 9d3d6290f..85f3a2127 100755 --- a/modules/auth/cloudbrain.go +++ b/modules/auth/cloudbrain.go @@ -27,9 +27,27 @@ type CreateCloudBrainForm struct { type CommitImageCloudBrainForm struct { Description string `form:"description" binding:"Required"` - Tag string `form:"tag" binding:"Required"` + Type int `form:"type" binding:"Required"` + Tag string `form:"tag" binding:"Required;MaxSize(100)" ` + IsPrivate bool `form:"isPrivate" binding:"Required"` + Topics string `form:"topics"` +} + +type EditImageCloudBrainForm struct { + ID int64 `form:"id" binding:"Required"` + Description string `form:"description" binding:"Required"` + IsPrivate bool `form:"isPrivate" binding:"Required"` + Topics string `form:"topics"` } func (f *CreateCloudBrainForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { return validate(errs, ctx.Data, f, ctx.Locale) } + +func (f *CommitImageCloudBrainForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) +} + +func (f *EditImageCloudBrainForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) +} diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index dab2941d0..eaf680c65 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -86,6 +86,18 @@ func isAdminOrJobCreater(ctx *context.Context, job *models.Cloudbrain, err error } +func isAdminOrImageCreater(ctx *context.Context, image *models.Image, err error) bool { + if !ctx.IsSigned { + return false + } + if err != nil { + return ctx.IsUserSiteAdmin() + } else { + return ctx.IsUserSiteAdmin() || ctx.User.ID == image.UID + } + +} + func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) { var ID = ctx.Params(":id") @@ -150,7 +162,31 @@ func AdminOrJobCreaterRightForTrain(ctx *context.Context) { } +func AdminOrImageCreaterRight(ctx *context.Context) { + + id, err := strconv.ParseInt(ctx.Params(":id"), 10, 64) + var image *models.Image + if err != nil { + log.Error("Get Image by ID failed:%v", err.Error()) + + } else { + image, err = models.GetImageByID(id) + if err != nil { + log.Error("Get Image by ID failed:%v", err.Error()) + return + } + } + + if !isAdminOrImageCreater(ctx, image, err) { + log.Error("!isAdminOrImageCreater error:%v", err.Error()) + ctx.NotFound(ctx.Req.URL.RequestURI(), nil) + } + +} + + func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, brainScorePath, jobType, gpuQueue, description, branchName, bootFile, params string, benchmarkTypeID, benchmarkChildTypeID, resourceSpecId int) error { + dataActualPath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.Attachment.Minio.BasePath + diff --git a/modules/cloudbrain/resty.go b/modules/cloudbrain/resty.go index 46b7c991b..f1f213bea 100755 --- a/modules/cloudbrain/resty.go +++ b/modules/cloudbrain/resty.go @@ -4,9 +4,11 @@ import ( "encoding/json" "errors" "fmt" + "math" "net/http" "strconv" "strings" + "time" "code.gitea.io/gitea/modules/log" @@ -28,6 +30,7 @@ const ( Custom = "custom" LogPageSize = 500 LogPageTokenExpired = "5m" + pageSize = 15 ) func getRestyClient() *resty.Client { @@ -210,6 +213,42 @@ func getQueryString(page int, size int, name string) string { } func CommitImage(jobID string, params models.CommitImageParams) error { + + dbImage, err := models.GetImageByTag(params.ImageTag) + + if err != nil && !models.IsErrImageNotExist(err) { + return fmt.Errorf("resty CommitImage: %v", err) + } + var createTime time.Time + var isSetCreatedUnix = false + if dbImage != nil { + if dbImage.UID != params.UID { + return models.ErrorImageTagExist{ + Tag: params.ImageTag, + } + } else { + if dbImage.Status == models.IMAGE_STATUS_COMMIT { + return models.ErrorImageCommitting{ + Tag: params.ImageTag, + } + + } else { //覆盖提交 + + result, err := GetImagesPageable(1, pageSize, Custom, "") + if err == nil && result.Code == "S000" { + for _, v := range result.Payload.ImageInfo { + if v.Place == dbImage.Place { + isSetCreatedUnix = true + createTime, _ = time.Parse(time.RFC3339, v.Createtime) + break + } + } + } + + } + } + } + checkSetting() client := getRestyClient() var result models.CommitImageResult @@ -220,7 +259,7 @@ sendjob: res, err := client.R(). SetHeader("Content-Type", "application/json"). SetAuthToken(TOKEN). - SetBody(params). + SetBody(params.CommitImageCloudBrainParams). SetResult(&result). Post(HOST + "/rest-server/api/v1/jobs/" + jobID + "/commitImage") @@ -238,7 +277,89 @@ sendjob: return fmt.Errorf("CommitImage err: %s", res.String()) } - return nil + image := models.Image{ + Type: models.NORMAL_TYPE, + CloudbrainType: params.CloudBrainType, + UID: params.UID, + IsPrivate: params.IsPrivate, + Tag: params.ImageTag, + Description: params.ImageDescription, + Place: setting.Cloudbrain.ImageURLPrefix + params.ImageTag, + Status: models.IMAGE_STATUS_COMMIT, + } + + err = models.WithTx(func(ctx models.DBContext) error { + if dbImage != nil { + dbImage.IsPrivate = params.IsPrivate + dbImage.Description = params.ImageDescription + dbImage.Status = models.IMAGE_STATUS_COMMIT + image = *dbImage + if err := models.UpdateLocalImage(dbImage); err != nil { + log.Error("Failed to update image record.", err) + return fmt.Errorf("CommitImage err: %s", res.String()) + } + + } else { + if err := models.CreateLocalImage(&image); err != nil { + log.Error("Failed to insert image record.", err) + return fmt.Errorf("CommitImage err: %s", res.String()) + } + } + if err := models.SaveImageTopics(image.ID, params.Topics...); err != nil { + log.Error("Failed to insert image record.", err) + return fmt.Errorf("CommitImage err: %s", res.String()) + } + return nil + }) + if err == nil { + + go updateImageStatus(image, isSetCreatedUnix, createTime) + } + return err +} + +func updateImageStatus(image models.Image, isSetCreatedUnix bool, createTime time.Time) { + attemps := 5 + commitSuccess := false + time.Sleep(5 * time.Second) + for i := 0; i < attemps; i++ { + + if commitSuccess { + break + } + + result, err := GetImagesPageable(1, pageSize, Custom, "") + if err == nil && result.Code == "S000" { + for _, v := range result.Payload.ImageInfo { + if v.Place == image.Place && (!isSetCreatedUnix || (isSetCreatedUnix && createTimeUpdated(v, createTime))) { + image.Status = models.IMAGE_STATUS_SUCCESS + models.UpdateLocalImageStatus(&image) + commitSuccess = true + break + } + + } + + } + //第一次循环等待4秒,第二次等待4的2次方16秒,...,第5次。。。 ,总共大概是20多分钟内进行5次重试 + var sleepTime = time.Duration(int(math.Pow(4, (float64(i + 1))))) + + time.Sleep(sleepTime * time.Second) + + } + if !commitSuccess { + image.Status = models.IMAGE_STATUS_Failed + models.UpdateLocalImageStatus(&image) + } + +} + +func createTimeUpdated(v *models.ImageInfo, createTime time.Time) bool { + newTime, err := time.Parse(time.RFC3339, v.Createtime) + if err != nil { + return false + } + return newTime.After(createTime) } func StopJob(jobID string) error { diff --git a/modules/convert/convert.go b/modules/convert/convert.go index fa2e8f2e7..a542fe78b 100755 --- a/modules/convert/convert.go +++ b/modules/convert/convert.go @@ -403,6 +403,16 @@ func ToTopicResponse(topic *models.Topic) *api.TopicResponse { } } +func ToImageTopicResponse(topic *models.ImageTopic) *api.ImageTopicResponse { + return &api.ImageTopicResponse{ + ID: topic.ID, + Name: topic.Name, + ImageCount: topic.ImageCount, + Created: topic.CreatedUnix.AsTime(), + Updated: topic.UpdatedUnix.AsTime(), + } +} + // ToOAuth2Application convert from models.OAuth2Application to api.OAuth2Application func ToOAuth2Application(app *models.OAuth2Application) *api.OAuth2Application { return &api.OAuth2Application{ diff --git a/modules/labelmsg/redismsgsender.go b/modules/labelmsg/redismsgsender.go index 8b2eae772..c06407588 100644 --- a/modules/labelmsg/redismsgsender.go +++ b/modules/labelmsg/redismsgsender.go @@ -50,6 +50,7 @@ func SendDecompressAttachToLabelOBS(attach string) error { _, err := redisclient.Do("Publish", setting.DecompressOBSTaskName, attach) if err != nil { log.Critical("redis Publish failed.") + return err } log.Info("LabelDecompressOBSQueue(%s) success", attach) diff --git a/modules/setting/cloudbrain.go b/modules/setting/cloudbrain.go index c0ab3b275..2d80eea25 100755 --- a/modules/setting/cloudbrain.go +++ b/modules/setting/cloudbrain.go @@ -1,9 +1,10 @@ package setting type CloudbrainLoginConfig struct { - Username string - Password string - Host string + Username string + Password string + Host string + ImageURLPrefix string } var ( @@ -15,5 +16,6 @@ func GetCloudbrainConfig() CloudbrainLoginConfig { Cloudbrain.Username = cloudbrainSec.Key("USERNAME").MustString("") Cloudbrain.Password = cloudbrainSec.Key("PASSWORD").MustString("") Cloudbrain.Host = cloudbrainSec.Key("REST_SERVER_HOST").MustString("") + Cloudbrain.ImageURLPrefix = cloudbrainSec.Key("IMAGE_URL_PREFIX").MustString("") return Cloudbrain } diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 26f068193..eee539d0c 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -438,6 +438,7 @@ var ( //home page RecommentRepoAddr string ESSearchURL string + INDEXPOSTFIX string //notice config UserNameOfNoticeRepo string RepoNameOfNoticeRepo string @@ -1268,6 +1269,7 @@ func NewContext() { sec = Cfg.Section("homepage") RecommentRepoAddr = sec.Key("Address").MustString("https://git.openi.org.cn/OpenIOSSG/promote/raw/branch/master/") ESSearchURL = sec.Key("ESSearchURL").MustString("http://192.168.207.94:9200") + INDEXPOSTFIX = sec.Key("INDEXPOSTFIX").MustString("") sec = Cfg.Section("notice") UserNameOfNoticeRepo = sec.Key("USER_NAME").MustString("OpenIOSSG") diff --git a/modules/storage/obs.go b/modules/storage/obs.go index f733eef6c..08a354359 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -30,6 +30,8 @@ type FileInfo struct { } type FileInfoList []FileInfo +const MAX_LIST_PARTS = 1000 + func (ulist FileInfoList) Swap(i, j int) { ulist[i], ulist[j] = ulist[j], ulist[i] } func (ulist FileInfoList) Len() int { return len(ulist) } func (ulist FileInfoList) Less(i, j int) bool { @@ -97,29 +99,48 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { input.Bucket = setting.Bucket input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") input.UploadId = uploadID - output, err := ObsCli.ListParts(&obs.ListPartsInput{ - Bucket: setting.Bucket, - Key: input.Key, - UploadId: uploadID, - }) - if err != nil { - log.Error("ListParts failed:", err.Error()) - return err - } - for _, partInfo := range output.Parts { - input.Parts = append(input.Parts, obs.Part{ - PartNumber: partInfo.PartNumber, - ETag: partInfo.ETag, + partNumberMarker := 0 + for { + output, err := ObsCli.ListParts(&obs.ListPartsInput{ + Bucket: setting.Bucket, + Key: input.Key, + UploadId: uploadID, + MaxParts: MAX_LIST_PARTS, + PartNumberMarker: partNumberMarker, }) + if err != nil { + log.Error("ListParts failed:", err.Error()) + return err + } + + partNumberMarker = output.NextPartNumberMarker + log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts)) + + for _, partInfo := range output.Parts { + input.Parts = append(input.Parts, obs.Part{ + PartNumber: partInfo.PartNumber, + ETag: partInfo.ETag, + }) + } + + if len(output.Parts) < output.MaxParts { + break + } else { + continue + } + + break } - _, err = ObsCli.CompleteMultipartUpload(input) + output, err := ObsCli.CompleteMultipartUpload(input) if err != nil { log.Error("CompleteMultipartUpload failed:", err.Error()) return err } + log.Info("uuid:%s, RequestId:%s", uuid, output.RequestId) + return nil } diff --git a/modules/structs/repo_topic.go b/modules/structs/repo_topic.go index 294d56a95..6fb6a92b4 100644 --- a/modules/structs/repo_topic.go +++ b/modules/structs/repo_topic.go @@ -17,6 +17,14 @@ type TopicResponse struct { Updated time.Time `json:"updated"` } +type ImageTopicResponse struct { + ID int64 `json:"id"` + Name string `json:"topic_name"` + ImageCount int `json:"image_count"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` +} + // TopicName a list of repo topic names type TopicName struct { TopicNames []string `json:"topics"` diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 77c6fca8d..006a1e046 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -92,6 +92,7 @@ func NewFuncMap() []template.FuncMap { "Safe": Safe, "SafeJS": SafeJS, "Str2html": Str2html, + "subOne": subOne, "TimeSince": timeutil.TimeSince, "TimeSinceUnix": timeutil.TimeSinceUnix, "TimeSinceUnix1": timeutil.TimeSinceUnix1, @@ -443,7 +444,10 @@ func SafeJS(raw string) template.JS { func Str2html(raw string) template.HTML { return template.HTML(markup.Sanitize(raw)) } - +// +func subOne(length int)int{ + return length-1 +} // Escape escapes a HTML string func Escape(raw string) string { return html.EscapeString(raw) diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 394b9b7df..14d4e19d1 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -266,6 +266,16 @@ search_related=related search_maybe=maybe search_ge= +wecome_AI_plt = Welcome to OpenI AI Collaboration Platform! +explore_AI = Explore better AI, come here to find more interesting +datasets = Datasets +repositories = Repositories +use_plt__fuction = To use the AI collaboration functions provided by this platform, such as: hosting code, sharing data, debugging algorithms or training models, start with +provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for various types of AI tasks. +activity = Activity +no_events = There are no events related +or_t = or + [explore] repos = Repositories select_repos = Select the project @@ -742,7 +752,7 @@ dataset_setting= Dataset Setting title = Name title_format_err=Name can only contain number,letter,'-','_' or '.', and can be up to 100 characters long. description = Description -description_format_err=Description's length can be up to 1024 characters long. +description_format_err=Description's length can be up to %s characters long. create_dataset = Create Dataset create_dataset_fail=Failed to create dataset. query_dataset_fail=Failed to query dataset. @@ -895,7 +905,7 @@ readme_helper = Select a README file template. auto_init = Initialize Repository (Adds .gitignore, License and README) create_repo = Create Repository create_course = Publish Course -failed_to_create_course=Fail to publish course, please try again later. +failed_to_create_course=Failed to publish course, please try again later. default_branch = Default Branch mirror_prune = Prune mirror_prune_desc = Remove obsolete remote-tracking references @@ -935,10 +945,28 @@ more=More gpu_type_all=All model_download=Model Download submit_image=Submit Image +modify_image=Modify Image +image_exist=Image name has been used, please use a new one. +image_committing=Image is submitting, please try again later. +image_commit_fail=Failed to submit image, please try again later. +image_not_exist=Image does not exits. +image_edit_fail=Failed to edit image, please try again later. +image_delete_fail=Failed to delete image, please try again later. +image_overwrite=You had submitted the same name image before, are you sure to overwrite the original image? download=Download score=Score +images.name = Image Tag +images.name_placerholder = Please enter the image name +image.label_tooltips = Example Python 3.7, Tensorflow 2.0, cuda 10, pytorch 1.6 +images.public_tooltips = After the image is set to public, it can be seen by other users. +images.name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-). +images.delete_task = Delete image +images.task_delete_confirm = Are you sure you want to delete this image? Once this image is deleted, it cannot be recovered. + cloudbrain=Cloudbrain +cloudbrain.task = Cloudbrain Task +cloudbrain.search = Seach Task Name cloudbrain.new=New cloudbrain cloudbrain.desc=Cloudbrain cloudbrain.cancel=Cancel @@ -971,7 +999,7 @@ total_count_get_error=Can not get the total page. last_update_time_error=Can not get the last updated time. get_repo_stat_error=Can not get the statistics of the repository. get_repo_info_error=Can not get the information of the repository. -generate_statistic_file_error=Fail to generate file. +generate_statistic_file_error=Failed to generate file. repo_stat_inspect=ProjectAnalysis all=All @@ -1121,7 +1149,7 @@ form.name_reserved = The repository name '%s' is reserved. form.course_name_reserved=The course name '%s' is reserved. form.name_pattern_not_allowed = The pattern '%s' is not allowed in a repository name. form.course_name_pattern_not_allowed=The pattern '%s' is not allowed in a course name. -add_course_org_fail=Fail to add organization, please try again later. +add_course_org_fail=Failed to add organization, please try again later. need_auth = Clone Authorization migrate_type = Migration Type @@ -2165,6 +2193,7 @@ topic.manage_topics = Manage Topics topic.done = Done topic.count_prompt = You can not select more than 25 topics topic.format_prompt = Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long. +imagetopic.format_prompt = Topics can be up to 35 characters long. [org] org_name_holder = Organization Name @@ -2955,3 +2984,11 @@ gpu_num = GPU cpu_num = CPU memory = Memory shared_memory = Shared Memory + + +DEBUG = DEBUG +SNN4IMAGENET = SNN4IMAGENET +BRAINSCORE = BRAINSCORE +TRAIN = TRAIN +INFERENCE = INFERENCE +BENCHMARK = BENCHMARK \ No newline at end of file diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 8982d59d7..de9f79319 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -268,6 +268,18 @@ search_related=相关 search_maybe=约为 search_ge=个 +wecome_AI_plt=欢迎来到启智AI协作平台! +explore_AI = 探索更好的AI,来这里发现更有意思的 +datasets = 数据集 +repositories = 项目 +use_plt__fuction = 使用本平台提供的AI协作功能,如:托管代码、共享数据、调试算法或训练模型,请先 +provide_resoure = 平台目前免费提供CPU、GPU、NPU的算力资源,可进行多种类型的AI任务。 +create_pro = 创建项目 +activity = 活动 +no_events = 还没有与您相关的活动 +or_t = 或 + + [explore] repos=项目 select_repos=精选项目 @@ -745,7 +757,7 @@ dataset_setting=数据集设置 title=名称 title_format_err=名称最多允许输入100个字符,只允许字母,数字,中划线 (‘-’),下划线 (‘_’) 和点 (‘.’) 。 description=描述 -description_format_err=描述最多允许输入1024个字符。 +description_format_err=描述最多允许输入%s个字符。 create_dataset=创建数据集 create_dataset_fail=创建数据集失败。 query_dataset_fail=查询数据集失败。 @@ -934,10 +946,29 @@ more=更多 gpu_type_all=全部 model_download=结果下载 submit_image=提交镜像 +modify_image=修改镜像 +image_exist=镜像Tag已被使用,请修改镜像Tag。 +image_committing=镜像正在提交中,请稍后再试。 +image_commit_fail=提交镜像失败,请稍后再试。 +image_not_exist=镜像不存在。 +image_edit_fail=编辑镜像失败,请稍后再试。 +image_delete_fail=删除镜像失败,请稍后再试。 +image_overwrite=您已经提交过相同名称的镜像,您确定要覆盖原来提交的镜像吗? download=模型下载 score=评分 + +images.name = 镜像Tag +images.name_placerholder = 请输入镜像Tag +image.label_tooltips = 如Python 3.7, Tensorflow 2.0, cuda 10, pytorch 1.6 +images.public_tooltips = 镜像设置为公开后,可被其他用户看到。 +images.name_rule = 请输入字母、数字、_和-,最长100个字符,且不能以中划线(-)结尾。 +images.delete_task = 删除镜像 +images.task_delete_confirm = 你确认删除该镜像么?此镜像一旦删除不可恢复。 + cloudbrain=云脑 +cloudbrain.task = 云脑任务 +cloudbrain.search = 搜索任务名称 cloudbrain.new=新建任务 cloudbrain.desc=云脑功能 cloudbrain.cancel=取消 @@ -1015,7 +1046,9 @@ modelarts.train_job.basic_info=基本信息 modelarts.train_job.job_status=任务状态 modelarts.train_job.job_name=任务名称 modelarts.train_job.version=任务版本 -modelarts.train_job.start_time=开始时间 +modelarts.train_job.start_time=开始运行时间 +modelarts.train_job.end_time=运行结束时间 +modelarts.train_job.wait_time=等待时间 modelarts.train_job.dura_time=运行时长 modelarts.train_job.description=任务描述 modelarts.train_job.parameter_setting=参数设置 @@ -2168,8 +2201,9 @@ branch.included=已包含 topic.manage_topics=管理主题 topic.done=保存 -topic.count_prompt=您最多选择25个主题 -topic.format_prompt=主题必须以中文、字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符 +topic.count_prompt=您最多选择25个标签 +topic.format_prompt=标签必须以中文、字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符 +imagetopic.format_prompt=标签长度不得超过35个字符 [org] org_name_holder=组织名称 @@ -2956,8 +2990,16 @@ task_delete_confirm = 你确认删除该任务么?此任务一旦删除不可 operate_confirm = 确定操作 operate_cancel = 取消操作 + gpu_num = GPU数 cpu_num = CPU数 memory = 内存 shared_memory = 共享内存 +DEBUG = 调试任务 +SNN4IMAGENET = 调试任务-脉冲神经网络图片分类测评 +BRAINSCORE = 调试任务-神经相似性测评 +TRAIN = 训练任务 +INFERENCE = 推理任务 +BENCHMARK = 评测任务 + diff --git a/public/img/jian.svg b/public/img/jian.svg new file mode 100644 index 000000000..0fc47c1b7 --- /dev/null +++ b/public/img/jian.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go index 884ed6b9b..91d866093 100755 --- a/routers/admin/cloudbrains.go +++ b/routers/admin/cloudbrains.go @@ -20,6 +20,7 @@ import ( const ( tplCloudBrains base.TplName = "admin/cloudbrain/list" + tplImages base.TplName = "admin/cloudbrain/images" EXCEL_DATE_FORMAT = "20060102150405" CREATE_TIME_FORMAT = "2006/01/02 15:04:05" ) @@ -107,6 +108,12 @@ func CloudBrains(ctx *context.Context) { } +func Images(ctx *context.Context) { + ctx.Data["PageIsAdminImages"] = true + ctx.HTML(200, tplImages) + +} + func DownloadCloudBrains(ctx *context.Context) { page := 1 diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 9de65662f..2b070a4b8 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -557,6 +557,10 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/query_user_last_month", operationReq, repo_ext.QueryUserStaticLastMonth) m.Get("/query_user_yesterday", operationReq, repo_ext.QueryUserStaticYesterday) m.Get("/query_user_all", operationReq, repo_ext.QueryUserStaticAll) + //cloudbrain board + m.Group("/cloudbrainboard", func() { + m.Get("/downloadAll", repo.DownloadCloudBrainBoard) + }, operationReq) // Users m.Group("/users", func() { m.Get("/search", user.Search) @@ -1007,6 +1011,9 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/topics", func() { m.Get("/search", repo.TopicSearch) }) + m.Group("/image/topics", func() { + m.Get("/search", repo.ImageTopicSearch) + }) m.Group("/from_wechat", func() { m.Get("/event", authentication.ValidEventSource) m.Post("/event", authentication.AcceptWechatEvent) diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go new file mode 100644 index 000000000..b979729a8 --- /dev/null +++ b/routers/api/v1/repo/cloudbrain_dashboard.go @@ -0,0 +1,135 @@ +package repo + +import ( + "net/http" + "net/url" + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "github.com/360EntSecGroup-Skylar/excelize/v2" +) + +func DownloadCloudBrainBoard(ctx *context.Context) { + + page := 1 + + pageSize := 300 + + var cloudBrain = ctx.Tr("repo.cloudbrain") + fileName := getCloudbrainFileName(cloudBrain) + + _, total, err := models.CloudbrainAll(&models.CloudbrainsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: 1, + }, + Type: models.TypeCloudBrainAll, + NeedRepoInfo: false, + }) + + if err != nil { + log.Warn("Can not get cloud brain info", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.cloudbrain_query_fail")) + return + } + + totalPage := getTotalPage(total, pageSize) + + f := excelize.NewFile() + + index := f.NewSheet(cloudBrain) + f.DeleteSheet("Sheet1") + + for k, v := range allCloudbrainHeader(ctx) { + f.SetCellValue(cloudBrain, k, v) + } + + var row = 2 + for i := 0; i < totalPage; i++ { + + pageRecords, _, err := models.CloudbrainAll(&models.CloudbrainsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: pageSize, + }, + Type: models.TypeCloudBrainAll, + NeedRepoInfo: true, + }) + if err != nil { + log.Warn("Can not get cloud brain info", err) + continue + } + for _, record := range pageRecords { + + for k, v := range allCloudbrainValues(row, record, ctx) { + f.SetCellValue(cloudBrain, k, v) + } + row++ + + } + + page++ + } + f.SetActiveSheet(index) + + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(fileName)) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + + f.WriteTo(ctx.Resp) +} +func getCloudbrainFileName(baseName string) string { + return baseName + "_" + time.Now().Format(EXCEL_DATE_FORMAT) + ".xlsx" + +} +func allCloudbrainHeader(ctx *context.Context) map[string]string { + + return map[string]string{"A1": ctx.Tr("repo.cloudbrain_task"), "B1": ctx.Tr("repo.cloudbrain_task_type"), "C1": ctx.Tr("repo.modelarts.status"), + "D1": ctx.Tr("repo.modelarts.createtime"), "E1": ctx.Tr("repo.modelarts.train_job.wait_time"), "F1": ctx.Tr("repo.modelarts.train_job.dura_time"), + "G1": ctx.Tr("repo.modelarts.train_job.start_time"), + "H1": ctx.Tr("repo.modelarts.train_job.end_time"), "I1": ctx.Tr("repo.modelarts.computing_resources"), + "J1": ctx.Tr("repo.cloudbrain_creator"), "K1": ctx.Tr("repo.repo_name"), "L1": ctx.Tr("repo.cloudbrain_task_name")} + +} +func allCloudbrainValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[string]string { + return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): rs.JobType, getCellName("C", row): rs.Status, + getCellName("D", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT), getCellName("E", row): getBrainWaitTime(rs), + getCellName("F", row): rs.TrainJobDuration, getCellName("G", row): getBrainStartTime(rs), + getCellName("H", row): getBrainEndTime(rs), + getCellName("I", row): rs.ComputeResource, getCellName("J", row): rs.Name, getCellName("K", row): getBrainRepo(rs), + getCellName("L", row): rs.JobName, + } +} +func getBrainRepo(rs *models.CloudbrainInfo) string { + if rs.Repo != nil { + return rs.Repo.OwnerName + "/" + rs.Repo.Alias + } + return "" +} +func getBrainStartTime(rs *models.CloudbrainInfo) string { + timeString := time.Unix(int64(rs.Cloudbrain.StartTime), 0).Format(CREATE_TIME_FORMAT) + if timeString != "1970/01/01 08:00:00" { + return timeString + } else { + return "0" + } + +} +func getBrainEndTime(rs *models.CloudbrainInfo) string { + timeString := time.Unix(int64(rs.Cloudbrain.EndTime), 0).Format(CREATE_TIME_FORMAT) + if timeString != "1970/01/01 08:00:00" { + return timeString + } else { + return "0" + } + +} +func getBrainWaitTime(rs *models.CloudbrainInfo) string { + waitTime := rs.Cloudbrain.StartTime - rs.Cloudbrain.CreatedUnix + if waitTime <= 0 { + return "0" + } else { + return models.ConvertDurationToStr(int64(waitTime)) + } +} diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go index e24ac95fb..9e4edea03 100755 --- a/routers/api/v1/repo/modelarts.go +++ b/routers/api/v1/repo/modelarts.go @@ -74,6 +74,7 @@ func GetModelArtsNotebook2(ctx *context.APIContext) { if job.EndTime == 0 && models.IsModelArtsDebugJobTerminal(job.Status) { job.EndTime = timeutil.TimeStampNow() } + job.CorrectCreateUnix() job.ComputeAndSetDuration() err = models.UpdateJob(job) if err != nil { @@ -160,6 +161,7 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { } if result.JobStatus.State != string(models.JobWaiting) { + models.ParseAndSetDurationFromCloudBrainOne(result, job) err = models.UpdateJob(job) if err != nil { log.Error("UpdateJob failed:", err) @@ -177,14 +179,12 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { } job.Status = modelarts.TransTrainJobStatus(result.IntStatus) job.Duration = result.Duration / 1000 - job.TrainJobDuration = result.TrainJobDuration - job.TrainJobDuration = models.ConvertDurationToStr(job.Duration) if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 { job.EndTime = job.StartTime.Add(job.Duration) } - + job.CorrectCreateUnix() err = models.UpdateTrainJobVersion(job) if err != nil { log.Error("UpdateJob failed:", err) @@ -417,7 +417,7 @@ func GetModelArtsInferenceJob(ctx *context.APIContext) { if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 { job.EndTime = job.StartTime.Add(job.Duration) } - + job.CorrectCreateUnix() err = models.UpdateInferenceJob(job) if err != nil { log.Error("UpdateJob failed:", err) diff --git a/routers/api/v1/repo/topic.go b/routers/api/v1/repo/topic.go index 530b92a10..f4ff7a329 100644 --- a/routers/api/v1/repo/topic.go +++ b/routers/api/v1/repo/topic.go @@ -300,3 +300,63 @@ func TopicSearch(ctx *context.APIContext) { "topics": topicResponses, }) } + +func ImageTopicSearch(ctx *context.APIContext) { + // swagger:operation GET /image/topics/search image topicSearch + // --- + // summary: search topics via keyword + // produces: + // - application/json + // parameters: + // - name: q + // in: query + // description: keywords to search + // required: true + // type: string + // - name: page + // in: query + // description: page number of results to return (1-based) + // type: integer + // - name: limit + // in: query + // description: page size of results, maximum page size is 50 + // type: integer + // responses: + // "200": + // "$ref": "#/responses/TopicListResponse" + // "403": + // "$ref": "#/responses/forbidden" + + if ctx.User == nil { + ctx.Error(http.StatusForbidden, "UserIsNil", "Only owners could change the topics.") + return + } + + kw := ctx.Query("q") + + listOptions := utils.GetListOptions(ctx) + if listOptions.Page < 1 { + listOptions.Page = 1 + } + if listOptions.PageSize < 1 { + listOptions.PageSize = 10 + } + + topics, err := models.FindImageTopics(&models.FindImageTopicOptions{ + Keyword: kw, + ListOptions: listOptions, + }) + if err != nil { + log.Error("SearchImageTopics failed: %v", err) + ctx.InternalServerError(err) + return + } + + topicResponses := make([]*api.ImageTopicResponse, len(topics)) + for i, topic := range topics { + topicResponses[i] = convert.ToImageTopicResponse(topic) + } + ctx.JSON(http.StatusOK, map[string]interface{}{ + "topics": topicResponses, + }) +} diff --git a/routers/image/image.go b/routers/image/image.go new file mode 100644 index 000000000..ae9912e3d --- /dev/null +++ b/routers/image/image.go @@ -0,0 +1,30 @@ +package image + +import ( + "net/http" + "strconv" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" +) + +func Action(ctx *context.Context) { + var err error + imageId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + switch ctx.Params(":action") { + + case "star": + err = models.StarImage(ctx.User.ID, imageId, true) + case "unstar": + err = models.StarImage(ctx.User.ID, imageId, false) + case "recommend": + err = models.RecommendImage(imageId, true) + case "unrecommend": + err = models.RecommendImage(imageId, false) + } + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } +} diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index 96f17b74b..3c66a3537 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -78,7 +78,7 @@ func UploadAttachmentUI(ctx *context.Context) { } func EditAttachmentUI(ctx *context.Context) { - + id, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) ctx.Data["PageIsDataset"] = true attachment, _ := models.GetAttachmentByID(id) @@ -986,23 +986,29 @@ func HandleUnDecompressAttachment() { if attach.Type == models.TypeCloudBrainOne { err = worker.SendDecompressTask(contexExt.Background(), attach.UUID, attach.Name) if err != nil { - log.Error("SendDecompressTask(%s) failed:%s", attach.UUID, err.Error()) + log.Error("SendDecompressTask(%s) failed:%s", attach.UUID, err.Error()) } else { - attach.DecompressState = models.DecompressStateIng - err = models.UpdateAttachment(attach) - if err != nil { - log.Error("UpdateAttachment state(%s) failed:%s", attach.UUID, err.Error()) - } + updateAttachmentDecompressStateIng(attach) } } else if attach.Type == models.TypeCloudBrainTwo { attachjson, _ := json.Marshal(attach) - labelmsg.SendDecompressAttachToLabelOBS(string(attachjson)) + err = labelmsg.SendDecompressAttachToLabelOBS(string(attachjson)) + if err != nil { + log.Error("SendDecompressTask to labelsystem (%s) failed:%s", attach.UUID, err.Error()) + } else { + updateAttachmentDecompressStateIng(attach) + } } - } - return } +func updateAttachmentDecompressStateIng(attach *models.Attachment) { + attach.DecompressState = models.DecompressStateIng + err := models.UpdateAttachment(attach) + if err != nil { + log.Error("UpdateAttachment state(%s) failed:%s", attach.UUID, err.Error()) + } +} func QueryAllPublicDataset(ctx *context.Context) { attachs, err := models.GetAllPublicAttachments() diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index fecd34faf..6b3452656 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -13,6 +13,7 @@ import ( "strconv" "strings" "time" + "unicode/utf8" "code.gitea.io/gitea/modules/timeutil" "github.com/unknwon/i18n" @@ -39,8 +40,13 @@ const ( tplCloudBrainBenchmarkNew base.TplName = "repo/cloudbrain/benchmark/new" tplCloudBrainBenchmarkShow base.TplName = "repo/cloudbrain/benchmark/show" + tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit" + tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit" + + tplCloudBrainTrainJobNew base.TplName = "repo/cloudbrain/trainjob/new" tplCloudBrainTrainJobShow base.TplName = "repo/cloudbrain/trainjob/show" + ) var ( @@ -436,15 +442,29 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo return } - if cloudbrain.ResourceSpecs == nil { - json.Unmarshal([]byte(setting.ResourceSpecs), &cloudbrain.ResourceSpecs) - } - for _, tmp := range cloudbrain.ResourceSpecs.ResourceSpec { - if tmp.Id == task.ResourceSpecId { - ctx.Data["GpuNum"] = tmp.GpuNum - ctx.Data["CpuNum"] = tmp.CpuNum - ctx.Data["MemMiB"] = tmp.MemMiB - ctx.Data["ShareMemMiB"] = tmp.ShareMemMiB + if task.JobType == string(models.JobTypeTrain) { + if cloudbrain.TrainResourceSpecs == nil { + json.Unmarshal([]byte(setting.TrainResourceSpecs), &cloudbrain.TrainResourceSpecs) + } + for _, tmp := range cloudbrain.TrainResourceSpecs.ResourceSpec { + if tmp.Id == task.ResourceSpecId { + ctx.Data["GpuNum"] = tmp.GpuNum + ctx.Data["CpuNum"] = tmp.CpuNum + ctx.Data["MemMiB"] = tmp.MemMiB + ctx.Data["ShareMemMiB"] = tmp.ShareMemMiB + } + } + } else { + if cloudbrain.ResourceSpecs == nil { + json.Unmarshal([]byte(setting.ResourceSpecs), &cloudbrain.ResourceSpecs) + } + for _, tmp := range cloudbrain.ResourceSpecs.ResourceSpec { + if tmp.Id == task.ResourceSpecId { + ctx.Data["GpuNum"] = tmp.GpuNum + ctx.Data["CpuNum"] = tmp.CpuNum + ctx.Data["MemMiB"] = tmp.MemMiB + ctx.Data["ShareMemMiB"] = tmp.ShareMemMiB + } } } @@ -590,26 +610,165 @@ func CloudBrainDebug(ctx *context.Context) { ctx.Redirect(debugUrl) } +func CloudBrainCommitImageShow(ctx *context.Context) { + ctx.Data["PageIsCloudBrain"] = true + ctx.Data["Type"] = ctx.Cloudbrain.Type + ctx.HTML(200, tplCloudBrainImageSubmit) +} + +func CloudBrainImageEdit(ctx *context.Context) { + ctx.Data["PageIsImageEdit"] = true + ctx.Data["PageFrom"] = ctx.Params(":from") + var ID = ctx.Params(":id") + id, err := strconv.ParseInt(ID, 10, 64) + if err != nil { + log.Error("GetImageByID failed:%v", err.Error()) + ctx.NotFound(ctx.Req.URL.RequestURI(), nil) + } + image, err := models.GetImageByID(id) + if err != nil { + log.Error("GetImageByID failed:%v", err.Error()) + ctx.NotFound(ctx.Req.URL.RequestURI(), nil) + } + ctx.Data["Image"] = image + ctx.HTML(http.StatusOK, tplCloudBrainImageEdit) + +} + +func CloudBrainImageEditPost(ctx *context.Context, form auth.EditImageCloudBrainForm) { + + if utf8.RuneCountInString(form.Description) > 255 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 255))) + return + } + + validTopics, errMessage := checkTopics(form.Topics) + if errMessage != "" { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr(errMessage))) + return + } + image, err := models.GetImageByID(form.ID) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.image_not_exist"))) + + } + + image.IsPrivate = form.IsPrivate + image.Description = form.Description + + err = models.WithTx(func(ctx models.DBContext) error { + if err := models.UpdateLocalImage(image); err != nil { + return err + } + if err := models.SaveImageTopics(image.ID, validTopics...); err != nil { + return err + } + return nil + + }) + + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.image_not_exist"))) + + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + +func CloudBrainImageDelete(ctx *context.Context) { + var ID = ctx.Params(":id") + id, err := strconv.ParseInt(ID, 10, 64) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.image_not_exist"))) + return + } + + err = models.DeleteLocalImage(id) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.image_delete_fail"))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + +func CloudBrainCommitImageCheck(ctx *context.Context, form auth.CommitImageCloudBrainForm) { + isExist, _ := models.IsImageExistByUser(form.Tag, ctx.User.ID) + if isExist { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.image_overwrite"))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrainForm) { + + if !NamePattern.MatchString(form.Tag) { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) + return + } + + if utf8.RuneCountInString(form.Description) > 255 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 255))) + return + } + + validTopics, errMessage := checkTopics(form.Topics) + if errMessage != "" { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr(errMessage))) + return + } + err := cloudbrain.CommitImage(ctx.Cloudbrain.JobID, models.CommitImageParams{ - Ip: ctx.Cloudbrain.ContainerIp, - TaskContainerId: ctx.Cloudbrain.ContainerID, - ImageDescription: form.Description, - ImageTag: form.Tag, + CommitImageCloudBrainParams: models.CommitImageCloudBrainParams{ + Ip: ctx.Cloudbrain.ContainerIp, + TaskContainerId: ctx.Cloudbrain.ContainerID, + ImageDescription: form.Description, + ImageTag: form.Tag, + }, + IsPrivate: form.IsPrivate, + CloudBrainType: form.Type, + Topics: validTopics, + UID: ctx.User.ID, }) if err != nil { log.Error("CommitImage(%s) failed:%v", ctx.Cloudbrain.JobName, err.Error(), ctx.Data["msgID"]) - ctx.JSON(200, map[string]string{ - "result_code": "-1", - "error_msg": "CommitImage failed", - }) + if models.IsErrImageTagExist(err) { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_exist"))) + + } else if models.IsErrorImageCommitting(err) { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_committing"))) + } else { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_commit_fail"))) + } + return } - ctx.JSON(200, map[string]string{ - "result_code": "0", - "error_msg": "", - }) + ctx.JSON(200, models.BaseOKMessage) +} + +func checkTopics(Topics string) ([]string, string) { + var topics = make([]string, 0) + var topicsStr = strings.TrimSpace(Topics) + if len(topicsStr) > 0 { + topics = strings.Split(topicsStr, ",") + } + + validTopics, invalidTopics := models.SanitizeAndValidateImageTopics(topics) + + if len(validTopics) > 25 { + return nil, "repo.topic.count_prompt" + + } + + if len(invalidTopics) > 0 { + return nil, "repo.imagetopic.format_prompt" + + } + return validTopics, "" } func CloudBrainStop(ctx *context.Context) { @@ -754,8 +913,11 @@ func CloudBrainDel(ctx *context.Context) { } var isAdminPage = ctx.Query("isadminpage") + var isHomePage = ctx.Query("ishomepage") if ctx.IsUserSiteAdmin() && isAdminPage == "true" { ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains") + } else if isHomePage == "true" { + ctx.Redirect(setting.AppSubURL + "/cloudbrains") } else { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=" + listType) } @@ -826,35 +988,106 @@ func CloudBrainShowModels(ctx *context.Context) { } func GetPublicImages(ctx *context.Context) { + uid := getUID(ctx) + opts := models.SearchImageOptions{ + IncludePublicOnly: true, + UID: uid, + Keyword: ctx.Query("q"), + Topics: ctx.Query("topic"), + IncludeOfficialOnly: ctx.QueryBool("recommend"), + SearchOrderBy: "type desc, num_stars desc,id desc", + Status: models.IMAGE_STATUS_SUCCESS, + } - getImages(ctx, cloudbrain.Public) + getImages(ctx, &opts) } func GetCustomImages(ctx *context.Context) { + uid := getUID(ctx) + opts := models.SearchImageOptions{ + UID: uid, + IncludeOwnerOnly: true, + Keyword: ctx.Query("q"), + Topics: ctx.Query("topic"), + Status: -1, + SearchOrderBy: "id desc", + } + getImages(ctx, &opts) - getImages(ctx, cloudbrain.Custom) +} +func GetStarImages(ctx *context.Context) { + + uid := getUID(ctx) + opts := models.SearchImageOptions{ + UID: uid, + IncludeStarByMe: true, + Keyword: ctx.Query("q"), + Topics: ctx.Query("topic"), + Status: models.IMAGE_STATUS_SUCCESS, + SearchOrderBy: "id desc", + } + getImages(ctx, &opts) } -func getImages(ctx *context.Context, imageType string) { - log.Info("Get images begin") +func getUID(ctx *context.Context) int64 { + var uid int64 = -1 + if ctx.IsSigned { + uid = ctx.User.ID + } + return uid +} + +func GetAllImages(ctx *context.Context) { + uid := getUID(ctx) + opts := models.SearchImageOptions{ + UID: uid, + Keyword: ctx.Query("q"), + Topics: ctx.Query("topic"), + IncludeOfficialOnly: ctx.QueryBool("recommend"), + SearchOrderBy: "id desc", + Status: -1, + } + + if ctx.Query("private") != "" { + if ctx.QueryBool("private") { + opts.IncludePrivateOnly = true + } else { + opts.IncludePublicOnly = true + } + } + getImages(ctx, &opts) +} + +func getImages(ctx *context.Context, opts *models.SearchImageOptions) { page := ctx.QueryInt("page") - size := ctx.QueryInt("size") - name := ctx.Query("name") - getImagesResult, err := cloudbrain.GetImagesPageable(page, size, imageType, name) + if page <= 0 { + page = 1 + } + + pageSize := ctx.QueryInt("pageSize") + if pageSize <= 0 { + pageSize = 15 + } + opts.ListOptions = models.ListOptions{ + Page: page, + PageSize: pageSize, + } + imageList, total, err := models.SearchImage(opts) if err != nil { log.Error("Can not get images:%v", err) - ctx.JSON(http.StatusOK, models.GetImagesPayload{ - Count: 0, - TotalPages: 0, - ImageInfo: []*models.ImageInfo{}, + ctx.JSON(http.StatusOK, models.ImagesPageResult{ + Count: 0, + Images: []*models.Image{}, }) } else { - ctx.JSON(http.StatusOK, getImagesResult.Payload) + ctx.JSON(http.StatusOK, models.ImagesPageResult{ + Count: total, + Images: imageList, + }) } - log.Info("Get images end") } func GetModelDirs(jobName string, parentDir string) (string, error) { @@ -1158,6 +1391,7 @@ func SyncCloudbrainStatus() { if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) { task.EndTime = timeutil.TimeStampNow() } + task.CorrectCreateUnix() task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { @@ -1184,7 +1418,7 @@ func SyncCloudbrainStatus() { if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 { task.EndTime = task.StartTime.Add(task.Duration) } - + task.CorrectCreateUnix() err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.JobName, err) @@ -1306,6 +1540,7 @@ func handleNoDurationTask(cloudBrains []*models.Cloudbrain) { task.StartTime = timeutil.TimeStamp(startTime / 1000) task.EndTime = task.StartTime.Add(duration) } + task.CorrectCreateUnix() task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { @@ -1695,8 +1930,11 @@ func BenchmarkDel(ctx *context.Context) { } var isAdminPage = ctx.Query("isadminpage") + var isHomePage = ctx.Query("ishomepage") if ctx.IsUserSiteAdmin() && isAdminPage == "true" { ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains") + } else if isHomePage == "true" { + ctx.Redirect(setting.AppSubURL + "/cloudbrains") } else { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/cloudbrain/benchmark") } @@ -1749,8 +1987,11 @@ func CloudBrainTrainJobDel(ctx *context.Context) { } var isAdminPage = ctx.Query("isadminpage") + var isHomePage = ctx.Query("ishomepage") if ctx.IsUserSiteAdmin() && isAdminPage == "true" { ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains") + } else if isHomePage == "true" { + ctx.Redirect(setting.AppSubURL + "/cloudbrains") } else { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType) } diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go index d23722372..1a3762be3 100755 --- a/routers/repo/dataset.go +++ b/routers/repo/dataset.go @@ -4,7 +4,6 @@ import ( "encoding/json" "fmt" "net/http" - "regexp" "sort" "strconv" "strings" @@ -25,8 +24,6 @@ const ( taskstplIndex base.TplName = "repo/datasets/tasks/index" ) -var titlePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`) - // MustEnableDataset check if repository enable internal dataset func MustEnableDataset(ctx *context.Context) { if !ctx.Repo.CanRead(models.UnitTypeDatasets) { @@ -211,12 +208,12 @@ func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) { dataset := &models.Dataset{} - if !titlePattern.MatchString(form.Title) { + if !NamePattern.MatchString(form.Title) { ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) return } if utf8.RuneCountInString(form.Description) > 1024 { - ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err"))) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 1024))) return } @@ -248,12 +245,12 @@ func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) { ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset") - if !titlePattern.MatchString(form.Title) { + if !NamePattern.MatchString(form.Title) { ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) return } if utf8.RuneCountInString(form.Description) > 1024 { - ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err"))) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 1024))) return } diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 29387133d..19108c69f 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -267,6 +267,7 @@ func NotebookShow(ctx *context.Context) { if task.DeletedAt.IsZero() { //normal record if task.Status != result.Status { task.Status = result.Status + models.ParseAndSetDurationFromModelArtsNotebook(result, task) err = models.UpdateJob(task) if err != nil { ctx.Data["error"] = err.Error() @@ -533,8 +534,11 @@ func NotebookDel(ctx *context.Context) { } var isAdminPage = ctx.Query("isadminpage") + var isHomePage = ctx.Query("ishomepage") if ctx.IsUserSiteAdmin() && isAdminPage == "true" { ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains") + } else if isHomePage == "true" { + ctx.Redirect(setting.AppSubURL + "/cloudbrains") } else { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=" + listType) } @@ -1684,8 +1688,11 @@ func TrainJobDel(ctx *context.Context) { } var isAdminPage = ctx.Query("isadminpage") + var isHomePage = ctx.Query("ishomepage") if ctx.IsUserSiteAdmin() && isAdminPage == "true" { ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains") + } else if isHomePage == "true" { + ctx.Redirect(setting.AppSubURL + "/cloudbrains") } else { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType) } @@ -2034,6 +2041,7 @@ func InferenceJobNew(ctx *context.Context) { } func inferenceJobNewDataPrepare(ctx *context.Context) error { ctx.Data["PageIsCloudBrain"] = true + ctx.Data["newInference"] = true t := time.Now() var displayJobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 9d906270f..8bc9dc458 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -41,24 +41,24 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac "A1": ctx.Tr("user.static.id"), "B1": ctx.Tr("user.static.name"), "C1": ctx.Tr("user.static.codemergecount"), - "D1": ctx.Tr("user.static.commitcount"), - "E1": ctx.Tr("user.static.issuecount"), - "F1": ctx.Tr("user.static.commentcount"), - "G1": ctx.Tr("user.static.focusrepocount"), - "H1": ctx.Tr("user.static.starrepocount"), - "I1": ctx.Tr("user.static.logincount"), - "J1": ctx.Tr("user.static.watchedcount"), - "K1": ctx.Tr("user.static.commitcodesize"), - "L1": ctx.Tr("user.static.solveissuecount"), - "M1": ctx.Tr("user.static.encyclopediascount"), - "N1": ctx.Tr("user.static.createrepocount"), - "O1": ctx.Tr("user.static.openiindex"), - "P1": ctx.Tr("user.static.registdate"), + "D1": ctx.Tr("user.static.UserIndex"), + "E1": ctx.Tr("user.static.commitcount"), + "F1": ctx.Tr("user.static.issuecount"), + "G1": ctx.Tr("user.static.commentcount"), + "H1": ctx.Tr("user.static.focusrepocount"), + "I1": ctx.Tr("user.static.starrepocount"), + "J1": ctx.Tr("user.static.logincount"), + "K1": ctx.Tr("user.static.watchedcount"), + "L1": ctx.Tr("user.static.commitcodesize"), + "M1": ctx.Tr("user.static.solveissuecount"), + "N1": ctx.Tr("user.static.encyclopediascount"), + "O1": ctx.Tr("user.static.createrepocount"), + "P1": ctx.Tr("user.static.openiindex"), "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), "R1": ctx.Tr("user.static.CloudBrainRunTime"), "S1": ctx.Tr("user.static.CommitDatasetNum"), "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.UserIndex"), + "U1": ctx.Tr("user.static.registdate"), "V1": ctx.Tr("user.static.countdate"), } for k, v := range dataHeader { @@ -78,28 +78,25 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "O"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "P"+rows, formatTime[0:len(formatTime)-3]) - + xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) + xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) + xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) + xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) + xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) + xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) + xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) + xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) + xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) + xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) + xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) + xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - xlsx.SetCellValue(sheetName, "U"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) formatTime = userRecord.DataDate xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } @@ -243,24 +240,24 @@ func QueryUserStaticDataPage(ctx *context.Context) { "A1": ctx.Tr("user.static.id"), "B1": ctx.Tr("user.static.name"), "C1": ctx.Tr("user.static.codemergecount"), - "D1": ctx.Tr("user.static.commitcount"), - "E1": ctx.Tr("user.static.issuecount"), - "F1": ctx.Tr("user.static.commentcount"), - "G1": ctx.Tr("user.static.focusrepocount"), - "H1": ctx.Tr("user.static.starrepocount"), - "I1": ctx.Tr("user.static.logincount"), - "J1": ctx.Tr("user.static.watchedcount"), - "K1": ctx.Tr("user.static.commitcodesize"), - "L1": ctx.Tr("user.static.solveissuecount"), - "M1": ctx.Tr("user.static.encyclopediascount"), - "N1": ctx.Tr("user.static.createrepocount"), - "O1": ctx.Tr("user.static.openiindex"), - "P1": ctx.Tr("user.static.registdate"), + "D1": ctx.Tr("user.static.UserIndex"), + "E1": ctx.Tr("user.static.commitcount"), + "F1": ctx.Tr("user.static.issuecount"), + "G1": ctx.Tr("user.static.commentcount"), + "H1": ctx.Tr("user.static.focusrepocount"), + "I1": ctx.Tr("user.static.starrepocount"), + "J1": ctx.Tr("user.static.logincount"), + "K1": ctx.Tr("user.static.watchedcount"), + "L1": ctx.Tr("user.static.commitcodesize"), + "M1": ctx.Tr("user.static.solveissuecount"), + "N1": ctx.Tr("user.static.encyclopediascount"), + "O1": ctx.Tr("user.static.createrepocount"), + "P1": ctx.Tr("user.static.openiindex"), "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), "R1": ctx.Tr("user.static.CloudBrainRunTime"), "S1": ctx.Tr("user.static.CommitDatasetNum"), "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.UserIndex"), + "U1": ctx.Tr("user.static.registdate"), "V1": ctx.Tr("user.static.countdate"), } for k, v := range dataHeader { @@ -274,26 +271,25 @@ func QueryUserStaticDataPage(ctx *context.Context) { xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "O"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "P"+rows, formatTime[0:len(formatTime)-3]) + xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) + xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) + xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) + xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) + xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) + xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) + xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) + xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) + xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) + xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) + xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) + xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - xlsx.SetCellValue(sheetName, "U"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) formatTime = userRecord.DataDate xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } diff --git a/routers/repo/util.go b/routers/repo/util.go new file mode 100644 index 000000000..f148fc52e --- /dev/null +++ b/routers/repo/util.go @@ -0,0 +1,5 @@ +package repo + +import "regexp" + +var NamePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`) diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 4cffcd10b..9df429e8b 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -12,6 +12,8 @@ import ( "text/template" "time" + "code.gitea.io/gitea/routers/image" + "code.gitea.io/gitea/routers/authentication" "code.gitea.io/gitea/modules/cloudbrain" @@ -333,6 +335,8 @@ func RegisterRoutes(m *macaron.Macaron) { }) m.Get("/images/public", repo.GetPublicImages) m.Get("/images/custom", repo.GetCustomImages) + m.Get("/images/star", repo.GetStarImages) + m.Get("/repos", routers.ExploreRepos) m.Get("/datasets", routers.ExploreDatasets) m.Get("/users", routers.ExploreUsers) @@ -345,6 +349,7 @@ func RegisterRoutes(m *macaron.Macaron) { Post(bindIgnErr(auth.InstallForm{}), routers.InstallPost) m.Get("/^:type(issues|pulls)$", reqSignIn, user.Issues) m.Get("/milestones", reqSignIn, reqMilestonesDashboardPageEnabled, user.Milestones) + m.Get("/cloudbrains", reqSignIn, user.Cloudbrains) // ***** START: User ***** m.Group("/user", func() { @@ -526,6 +531,11 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("", admin.CloudBrains) m.Get("/download", admin.DownloadCloudBrains) }) + m.Group("/images", func() { + m.Get("", admin.Images) + m.Get("/data", repo.GetAllImages) + }) + m.Put("/image/:id/action/:action", image.Action) m.Group("/^:configType(hooks|system-hooks)$", func() { m.Get("", admin.DefaultOrSystemWebhooks) @@ -974,6 +984,12 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/topics", repo.TopicsPost) }, context.RepoAssignment(), context.RepoMustNotBeArchived(), reqRepoAdmin) + m.Group("/image/:id", func() { + m.Get("/:from", cloudbrain.AdminOrImageCreaterRight, repo.CloudBrainImageEdit) + m.Post("", cloudbrain.AdminOrImageCreaterRight, bindIgnErr(auth.EditImageCloudBrainForm{}), repo.CloudBrainImageEditPost) + m.Delete("", cloudbrain.AdminOrImageCreaterRight, repo.CloudBrainImageDelete) + m.Put("/action/:action", reqSignIn, image.Action) + }) m.Group("/:username/:reponame", func() { m.Group("", func() { m.Get("/^:type(issues|pulls)$", repo.Issues) @@ -1015,6 +1031,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/:id", func() { m.Get("", reqRepoCloudBrainReader, repo.CloudBrainShow) m.Get("/debug", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDebug) + m.Get("/commit_image", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainCommitImageShow) + m.Post("/commit_image/check", cloudbrain.AdminOrJobCreaterRight, bindIgnErr(auth.CommitImageCloudBrainForm{}), repo.CloudBrainCommitImageCheck) m.Post("/commit_image", cloudbrain.AdminOrJobCreaterRight, bindIgnErr(auth.CommitImageCloudBrainForm{}), repo.CloudBrainCommitImage) m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.CloudBrainStop) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.CloudBrainDel) diff --git a/routers/search.go b/routers/search.go index c5655b9e1..1cf78666e 100644 --- a/routers/search.go +++ b/routers/search.go @@ -68,23 +68,23 @@ func SearchApi(ctx *context.Context) { if OnlySearchLabel { searchRepoByLabel(ctx, Key, Page, PageSize) } else { - searchRepo(ctx, "repository-es-index", Key, Page, PageSize, OnlyReturnNum) + searchRepo(ctx, "repository-es-index"+setting.INDEXPOSTFIX, Key, Page, PageSize, OnlyReturnNum) } return } else if TableName == "issue" { - searchIssueOrPr(ctx, "issue-es-index", Key, Page, PageSize, OnlyReturnNum, "f") + searchIssueOrPr(ctx, "issue-es-index"+setting.INDEXPOSTFIX, Key, Page, PageSize, OnlyReturnNum, "f") return } else if TableName == "user" { - searchUserOrOrg(ctx, "user-es-index", Key, Page, PageSize, true, OnlyReturnNum) + searchUserOrOrg(ctx, "user-es-index"+setting.INDEXPOSTFIX, Key, Page, PageSize, true, OnlyReturnNum) return } else if TableName == "org" { - searchUserOrOrg(ctx, "user-es-index", Key, Page, PageSize, false, OnlyReturnNum) + searchUserOrOrg(ctx, "user-es-index"+setting.INDEXPOSTFIX, Key, Page, PageSize, false, OnlyReturnNum) return } else if TableName == "dataset" { - searchDataSet(ctx, "dataset-es-index", Key, Page, PageSize, OnlyReturnNum) + searchDataSet(ctx, "dataset-es-index"+setting.INDEXPOSTFIX, Key, Page, PageSize, OnlyReturnNum) return } else if TableName == "pr" { - searchIssueOrPr(ctx, "issue-es-index", Key, Page, PageSize, OnlyReturnNum, "t") + searchIssueOrPr(ctx, "issue-es-index"+setting.INDEXPOSTFIX, Key, Page, PageSize, OnlyReturnNum, "t") //searchPR(ctx, "issue-es-index", Key, Page, PageSize, OnlyReturnNum) return } diff --git a/routers/user/home.go b/routers/user/home.go index 2fc0c60aa..9c7bed2df 100755 --- a/routers/user/home.go +++ b/routers/user/home.go @@ -20,6 +20,7 @@ import ( issue_indexer "code.gitea.io/gitea/modules/indexer/issues" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup/markdown" + "code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" issue_service "code.gitea.io/gitea/services/issue" @@ -31,10 +32,11 @@ import ( ) const ( - tplDashboard base.TplName = "user/dashboard/dashboard" - tplIssues base.TplName = "user/dashboard/issues" - tplMilestones base.TplName = "user/dashboard/milestones" - tplProfile base.TplName = "user/profile" + tplDashboard base.TplName = "user/dashboard/dashboard" + tplIssues base.TplName = "user/dashboard/issues" + tplMilestones base.TplName = "user/dashboard/milestones" + tplProfile base.TplName = "user/profile" + tplCloudbrains base.TplName = "user/dashboard/cloudbrains" ) // getDashboardContextUser finds out dashboard is viewing as which context user. @@ -751,3 +753,111 @@ func Email2User(ctx *context.Context) { } ctx.Redirect(setting.AppSubURL + "/user/" + u.Name) } + +func Cloudbrains(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("user.cloudbrains") + + listType := ctx.Query("listType") + jobType := ctx.Query("jobType") + jobStatus := ctx.Query("jobStatus") + + ctx.Data["ListType"] = listType + ctx.Data["JobType"] = jobType + ctx.Data["JobStatus"] = jobStatus + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + debugType := models.TypeCloudBrainAll + if listType == models.GPUResource { + debugType = models.TypeCloudBrainOne + } else if listType == models.NPUResource { + debugType = models.TypeCloudBrainTwo + } + + var jobTypes []string + jobTypeNot := false + if jobType == string(models.JobTypeDebug) { + jobTypes = append(jobTypes, string(models.JobTypeSnn4imagenet), string(models.JobTypeBrainScore), string(models.JobTypeDebug)) + } else if jobType != "all" && jobType != "" { + jobTypes = append(jobTypes, jobType) + } + + var jobStatuses []string + jobStatusNot := false + if jobStatus == "other" { + jobStatusNot = true + jobStatuses = append(jobStatuses, string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobFailed), string(models.ModelArtsRunning), string(models.ModelArtsTrainJobCompleted), + string(models.ModelArtsStarting), string(models.ModelArtsRestarting), string(models.ModelArtsStartFailed), + string(models.ModelArtsStopping), string(models.ModelArtsStopped), string(models.JobSucceeded)) + } else if jobStatus != "all" && jobStatus != "" { + jobStatuses = append(jobStatuses, jobStatus) + } + + keyword := strings.Trim(ctx.Query("q"), " ") + + ctxUser := getDashboardContextUser(ctx) + if ctx.Written() { + return + } + repos, _, err := models.SearchRepository(&models.SearchRepoOptions{ + Actor: ctx.User, + OwnerID: ctxUser.ID, + }) + if err != nil { + ctx.ServerError("SearchRepository", err) + return + } + var repoIDList []int64 + for i, _ := range repos { + repoIDList = append(repoIDList, repos[i].ID) + } + ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.IssuePagingNum, + }, + Keyword: keyword, + UserID: ctxUser.ID, + Type: debugType, + JobTypeNot: jobTypeNot, + JobStatusNot: jobStatusNot, + JobStatus: jobStatuses, + JobTypes: jobTypes, + NeedRepoInfo: true, + IsLatestVersion: modelarts.IsLatestVersion, + RepoIDList: repoIDList, + }) + if err != nil { + ctx.ServerError("Get job failed:", err) + return + } + + for i, task := range ciTasks { + ciTasks[i].CanDebug = true + ciTasks[i].CanDel = true + ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource + } + + pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) + pager.SetDefaultParams(ctx) + pager.AddParam(ctx, "listType", "ListType") + ctx.Data["Page"] = pager + ctx.Data["PageIsUserCloudBrain"] = true + ctx.Data["Tasks"] = ciTasks + ctx.Data["CanCreate"] = true + ctx.Data["Keyword"] = keyword + + ctx.HTML(200, tplCloudbrains) + +} +func getTotalPage(total int64, pageSize int) int { + + another := 0 + if int(total)%pageSize != 0 { + another = 1 + } + return int(total)/pageSize + another + +} diff --git a/semantic.json b/semantic.json index fee52af53..bc9750cef 100644 --- a/semantic.json +++ b/semantic.json @@ -56,6 +56,7 @@ "tab", "table", "text", - "transition" + "transition", + "toast" ] } diff --git a/templates/admin/cloudbrain/images.html b/templates/admin/cloudbrain/images.html new file mode 100644 index 000000000..7d800eb64 --- /dev/null +++ b/templates/admin/cloudbrain/images.html @@ -0,0 +1,42 @@ +{{template "base/head" .}} + +
{{.i18n.Tr "repo.images.task_delete_confirm"}}
+{{.i18n.Tr "repo.images.task_delete_confirm"}}
+