你确认删除该任务么?此任务一旦删除不可恢复。
+{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
diff --git a/README.md b/README.md index 061ece70c..99f6a6e8c 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,7 @@ ## 授权许可 本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://git.openi.org.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。 + ## 需要帮助? 如果您在使用或者开发过程中遇到问题,可以在以下渠道咨询: - 点击[这里](https://git.openi.org.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**) @@ -49,3 +50,8 @@ ## 启智社区小白训练营: - 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://git.openi.org.cn/zeizei/OpenI_Learning) + +## 平台引用 +如果本平台对您的科研工作提供了帮助,可在论文致谢中加入: +英文版:```Thanks for the support provided by OpenI Community (https://git.openi.org.cn).``` +中文版:```感谢启智社区提供的技术支持(https://git.openi.org.cn)。``` \ No newline at end of file diff --git a/models/attachment.go b/models/attachment.go index c322d391b..a3fc6fa01 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -9,6 +9,7 @@ import ( "fmt" "io" "path" + "strings" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/obs" @@ -18,6 +19,7 @@ import ( "code.gitea.io/gitea/modules/timeutil" gouuid "github.com/satori/go.uuid" + "xorm.io/builder" "xorm.io/xorm" ) @@ -38,6 +40,7 @@ type Attachment struct { UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added CommentID int64 Name string + Description string `xorm:"TEXT"` DownloadCount int64 `xorm:"DEFAULT 0"` Size int64 `xorm:"DEFAULT 0"` IsPrivate bool `xorm:"DEFAULT false"` @@ -47,6 +50,7 @@ type Attachment struct { FileChunk *FileChunk `xorm:"-"` CanDel bool `xorm:"-"` + Uploader *User `xorm:"-"` } type AttachmentUsername struct { @@ -54,6 +58,27 @@ type AttachmentUsername struct { Name string } +type AttachmentInfo struct { + Attachment `xorm:"extends"` + Repo *Repository `xorm:"extends"` + RelAvatarLink string `xorm:"extends"` + UserName string `xorm:"extends"` +} + +type AttachmentsOptions struct { + ListOptions + DatasetIDs []int64 + DecompressState int + Type int + UploaderID int64 + NeedDatasetIDs bool + NeedIsPrivate bool + IsPrivate bool + JustNeedZipFile bool + NeedRepoInfo bool + Keyword string +} + func (a *Attachment) AfterUpdate() { if a.DatasetID > 0 { datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment)) @@ -326,6 +351,18 @@ func DeleteAttachmentsByComment(commentID int64, remove bool) (int, error) { func UpdateAttachment(atta *Attachment) error { return updateAttachment(x, atta) } +func UpdateAttachmentDescription(atta *Attachment) error { + return updateAttachmentDescription(x, atta) +} + +func updateAttachmentDescription(e Engine, atta *Attachment) error { + var sess *xorm.Session + + sess = e.ID(atta.ID) + + _, err := sess.Cols("description").Update(atta) + return err +} func updateAttachment(e Engine, atta *Attachment) error { var sess *xorm.Session @@ -503,3 +540,98 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) { func GetAllAttachmentSize() (int64, error) { return x.SumInt(&Attachment{}, "size") } + +func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { + sess := x.NewSession() + defer sess.Close() + + var cond = builder.NewCond() + if opts.NeedDatasetIDs { + cond = cond.And( + builder.In("attachment.dataset_id", opts.DatasetIDs), + ) + } + + if opts.UploaderID > 0 { + cond = cond.And( + builder.Eq{"attachment.uploader_id": opts.UploaderID}, + ) + } + + if (opts.Type) >= 0 { + cond = cond.And( + builder.Eq{"attachment.type": opts.Type}, + ) + } + + if opts.NeedIsPrivate { + cond = cond.And( + builder.Eq{"attachment.is_private": opts.IsPrivate}, + ) + } + + if opts.JustNeedZipFile { + var DecompressState []int32 + DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed) + cond = cond.And( + builder.In("attachment.decompress_state", DecompressState), + ) + } + + var count int64 + var err error + if len(opts.Keyword) == 0 { + count, err = sess.Where(cond).Count(new(Attachment)) + } else { + lowerKeyWord := strings.ToLower(opts.Keyword) + + cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord})) + count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo)) + + } + + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + if opts.Page >= 0 && opts.PageSize > 0 { + var start int + if opts.Page == 0 { + start = 0 + } else { + start = (opts.Page - 1) * opts.PageSize + } + sess.Limit(opts.PageSize, start) + } + + sess.OrderBy("attachment.created_unix DESC") + attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum) + if err := sess.Table(&Attachment{}).Where(cond). + Find(&attachments); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + + if opts.NeedRepoInfo { + for _, attachment := range attachments { + dataset, err := GetDatasetByID(attachment.DatasetID) + if err != nil { + return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) + } + repo, err := GetRepositoryByID(dataset.RepoID) + if err == nil { + attachment.Repo = repo + } else { + return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err) + } + user, err := GetUserByID(attachment.UploaderID) + if err == nil { + attachment.RelAvatarLink = user.RelAvatarLink() + attachment.UserName = user.Name + } else { + return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err) + } + } + } + + return attachments, count, nil +} diff --git a/models/base_message.go b/models/base_message.go new file mode 100644 index 000000000..37f7668ad --- /dev/null +++ b/models/base_message.go @@ -0,0 +1,16 @@ +package models + +type BaseMessage struct { + Code int + Message string +} + +var BaseOKMessage = BaseMessage{ + 0, "", +} + +func BaseErrorMessage(message string) BaseMessage { + return BaseMessage{ + 1, message, + } +} diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 06c2e98b4..ea6d0338e 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -1,6 +1,7 @@ package models import ( + "code.gitea.io/gitea/modules/util" "encoding/json" "fmt" "strconv" @@ -102,15 +103,15 @@ type Cloudbrain struct { ContainerIp string CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` - Duration int64 - TrainJobDuration string - Image string //镜像名称 - GpuQueue string //GPU类型即GPU队列 - ResourceSpecId int //GPU规格id - DeletedAt time.Time `xorm:"deleted"` - CanDebug bool `xorm:"-"` - CanDel bool `xorm:"-"` - CanModify bool `xorm:"-"` + Duration int64 `xorm:"DEFAULT 0"` //运行时长 单位秒 + TrainJobDuration string `xorm:"DEFAULT '00:00:00'"` + Image string //镜像名称 + GpuQueue string //GPU类型即GPU队列 + ResourceSpecId int //GPU规格id + DeletedAt time.Time `xorm:"deleted"` + CanDebug bool `xorm:"-"` + CanDel bool `xorm:"-"` + CanModify bool `xorm:"-"` Type int BenchmarkTypeID int BenchmarkChildTypeID int @@ -150,6 +151,44 @@ type Cloudbrain struct { Repo *Repository `xorm:"-"` BenchmarkTypeName string `xorm:"-"` BenchmarkTypeRankLink string `xorm:"-"` + StartTime timeutil.TimeStamp + EndTime timeutil.TimeStamp +} + +func (task *Cloudbrain) ComputeAndSetDuration() { + var d int64 + if task.StartTime == 0 { + d = 0 + } else if task.EndTime == 0 { + d = time.Now().Unix() - task.StartTime.AsTime().Unix() + } else { + d = task.EndTime.AsTime().Unix() - task.StartTime.AsTime().Unix() + } + + if d < 0 { + d = 0 + } + task.Duration = d + task.TrainJobDuration = ConvertDurationToStr(d) +} + +func ConvertDurationToStr(duration int64) string { + if duration == 0 { + return "00:00:00" + } + return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60) +} + +func IsTrainJobTerminal(status string) bool { + return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled) +} + +func IsModelArtsDebugJobTerminal(status string) bool { + return status == string(ModelArtsStopped) +} + +func IsCloudBrainOneDebugJobTerminal(status string) bool { + return status == string(JobStopped) || status == string(JobFailed) || status == string(JobSucceeded) } type CloudbrainInfo struct { @@ -1019,6 +1058,7 @@ type GetTrainJobResult struct { NasShareAddr string `json:"nas_share_addr"` DatasetName string ModelMetricList string `json:"model_metric_list"` //列表里包含f1_score,recall,precision,accuracy,若有的话 + StartTime int64 `json:"start_time"` //训练作业开始时间。 } type GetTrainJobLogResult struct { @@ -1117,7 +1157,7 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { } else { lowerKeyWord := strings.ToLower(opts.Keyword) - cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) + cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) count, err = sess.Table(&Cloudbrain{}).Where(cond). Join("left", "`user`", condition).Count(new(CloudbrainInfo)) @@ -1327,13 +1367,13 @@ func GetCloudbrainByJobIDAndIsLatestVersion(jobID string, isLatestVersion string func GetCloudbrainsNeededStopByUserID(userID int64) ([]*Cloudbrain, error) { cloudBrains := make([]*Cloudbrain, 0) - err := x.Cols("job_id", "status", "type", "job_type", "version_id").Where("user_id=? AND status !=?", userID, string(JobStopped)).Find(&cloudBrains) + err := x.Cols("job_id", "status", "type", "job_type", "version_id", "start_time").Where("user_id=? AND status !=?", userID, string(JobStopped)).Find(&cloudBrains) return cloudBrains, err } func GetCloudbrainsNeededStopByRepoID(repoID int64) ([]*Cloudbrain, error) { cloudBrains := make([]*Cloudbrain, 0) - err := x.Cols("job_id", "status", "type", "job_type", "version_id").Where("repo_id=? AND status !=?", repoID, string(JobStopped)).Find(&cloudBrains) + err := x.Cols("job_id", "status", "type", "job_type", "version_id", "start_time").Where("repo_id=? AND status !=?", repoID, string(JobStopped)).Find(&cloudBrains) return cloudBrains, err } @@ -1377,7 +1417,7 @@ func UpdateTrainJobVersion(job *Cloudbrain) error { func updateJobTrainVersion(e Engine, job *Cloudbrain) error { var sess *xorm.Session sess = e.Where("job_id = ? AND version_name=?", job.JobID, job.VersionName) - _, err := sess.Cols("status", "train_job_duration").Update(job) + _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time").Update(job) return err } @@ -1457,7 +1497,7 @@ func UpdateInferenceJob(job *Cloudbrain) error { func updateInferenceJob(e Engine, job *Cloudbrain) error { var sess *xorm.Session sess = e.Where("job_id = ?", job.JobID) - _, err := sess.Cols("status", "train_job_duration").Update(job) + _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time").Update(job) return err } func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) { diff --git a/models/dataset.go b/models/dataset.go index 2b3de752b..af47c53fe 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -22,6 +22,7 @@ type Dataset struct { Category string Description string `xorm:"TEXT"` DownloadTimes int64 + NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` License string Task string ReleaseID int64 `xorm:"INDEX"` @@ -35,6 +36,11 @@ type Dataset struct { Attachments []*Attachment `xorm:"-"` } +type DatasetWithStar struct { + Dataset + IsStaring bool +} + func (d *Dataset) IsPrivate() bool { switch d.Status { case DatasetStatusPrivate: @@ -91,33 +97,37 @@ type SearchDatasetOptions struct { OwnerID int64 RepoID int64 IncludePublic bool + Category string + Task string + License string ListOptions SearchOrderBy IsOwner bool } func CreateDataset(dataset *Dataset) (err error) { - if _, err = x.Insert(dataset); err != nil { + + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { return err } - return nil -} - -func CreateDefaultDatasetToRepo(repo *Repository) (err error) { - dataset := &Dataset{RepoID: repo.ID} - has, err := x.Get(dataset) + datasetByRepoId := &Dataset{RepoID: dataset.RepoID} + has, err := sess.Get(datasetByRepoId) if err != nil { return err } - if !has { - dataset.Status = DatasetStatusPrivate - dataset.Title = repo.Name - if err = CreateDataset(dataset); err != nil { - return err - } + if has { + return fmt.Errorf("The dataset already exists.") } - return nil + + if _, err = sess.Insert(dataset); err != nil { + return err + } + return sess.Commit() + } func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) { @@ -130,7 +140,18 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { cond = cond.And(builder.Neq{"dataset.status": DatasetStatusDeleted}) if len(opts.Keyword) > 0 { - cond = cond.And(builder.Like{"dataset.title", opts.Keyword}) + cond = cond.And(builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword})) + } + + if len(opts.Category) > 0 { + cond = cond.And(builder.Eq{"dataset.category": opts.Category}) + } + + if len(opts.Task) > 0 { + cond = cond.And(builder.Eq{"dataset.task": opts.Task}) + } + if len(opts.License) > 0 { + cond = cond.And(builder.Eq{"dataset.license": opts.License}) } if opts.RepoID > 0 { @@ -139,12 +160,13 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) + cond = cond.And(builder.Eq{"attachment.is_private": false}) if opts.OwnerID > 0 { if len(opts.Keyword) == 0 { cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID}) } else { subCon := builder.NewCond() - subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Like{"dataset.title", opts.Keyword}) + subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword})) cond = cond.Or(subCon) } @@ -153,6 +175,7 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID}) if !opts.IsOwner { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) + cond = cond.And(builder.Eq{"attachment.is_private": false}) } } @@ -169,14 +192,20 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da defer sess.Close() datasets := make(DatasetList, 0, opts.PageSize) + selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars" - count, err := sess.Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).Count(new(Dataset)) + count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). + Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). + Where(cond).Count(new(Dataset)) if err != nil { return nil, 0, fmt.Errorf("Count: %v", err) } - sess.Select("dataset.*").Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).OrderBy(opts.SearchOrderBy.String()) + sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id"). + Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). + Where(cond).OrderBy(opts.SearchOrderBy.String()) + if opts.PageSize > 0 { sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) } @@ -231,13 +260,23 @@ func getDatasetAttachments(e Engine, typeCloudBrain int, isSigned bool, user *Us sort.Sort(sortedRels) // Select attachments - err = e. - Asc("dataset_id"). - In("dataset_id", sortedRels.ID). - And("type = ?", typeCloudBrain). - Find(&attachments, Attachment{}) - if err != nil { - return err + if typeCloudBrain == -1 { + err = e. + Asc("dataset_id"). + In("dataset_id", sortedRels.ID). + Find(&attachments, Attachment{}) + if err != nil { + return err + } + } else { + err = e. + Asc("dataset_id"). + In("dataset_id", sortedRels.ID). + And("type = ?", typeCloudBrain). + Find(&attachments, Attachment{}) + if err != nil { + return err + } } // merge join @@ -301,9 +340,6 @@ func GetDatasetByID(id int64) (*Dataset, error) { } func GetDatasetByRepo(repo *Repository) (*Dataset, error) { - if err := CreateDefaultDatasetToRepo(repo); err != nil { - return nil, err - } dataset := &Dataset{RepoID: repo.ID} has, err := x.Get(dataset) if err != nil { @@ -316,6 +352,12 @@ func GetDatasetByRepo(repo *Repository) (*Dataset, error) { } } +func GetDatasetStarByUser(user *User) ([]*DatasetStar, error) { + datasetStars := make([]*DatasetStar, 0) + err := x.Cols("id", "uid", "dataset_id", "created_unix").Where("uid=?", user.ID).Find(&datasetStars) + return datasetStars, err +} + func DeleteDataset(datasetID int64, uid int64) error { var err error sess := x.NewSession() diff --git a/models/dataset_star.go b/models/dataset_star.go new file mode 100644 index 000000000..4b22c2855 --- /dev/null +++ b/models/dataset_star.go @@ -0,0 +1,70 @@ +package models + +import "code.gitea.io/gitea/modules/timeutil" + +type DatasetStar struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"UNIQUE(s)"` + DatasetID int64 `xorm:"UNIQUE(s)"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +// StarRepo or unstar repository. +func StarDataset(userID, datasetID int64, star bool) error { + sess := x.NewSession() + defer sess.Close() + + if err := sess.Begin(); err != nil { + return err + } + + if star { + if isDatasetStaring(sess, userID, datasetID) { + return nil + } + + if _, err := sess.Insert(&DatasetStar{UID: userID, DatasetID: datasetID}); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars + 1 WHERE id = ?", datasetID); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars + 1 WHERE id = ?", userID); err != nil { + return err + } + } else { + if !isDatasetStaring(sess, userID, datasetID) { + return nil + } + + if _, err := sess.Delete(&DatasetStar{0, userID, datasetID, 0}); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars - 1 WHERE id = ?", datasetID); err != nil { + return err + } + if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars - 1 WHERE id = ?", userID); err != nil { + return err + } + } + + return sess.Commit() +} + +func IsDatasetStaringByRepoId(userID, repoID int64) bool { + dataset, _ := GetDatasetByRepo(&Repository{ID: repoID}) + if dataset == nil { + return false + } + return isDatasetStaring(x, userID, dataset.ID) +} + +func IsDatasetStaring(userID, datasetID int64) bool { + return isDatasetStaring(x, userID, datasetID) + +} + +func isDatasetStaring(e Engine, userID, datasetID int64) bool { + has, _ := e.Get(&DatasetStar{0, userID, datasetID, 0}) + return has +} diff --git a/models/models.go b/models/models.go index 2d8c0fd05..362d46618 100755 --- a/models/models.go +++ b/models/models.go @@ -129,6 +129,7 @@ func init() { new(LanguageStat), new(EmailHash), new(Dataset), + new(DatasetStar), new(Cloudbrain), new(FileChunk), new(BlockChain), diff --git a/models/repo.go b/models/repo.go index 3c03c6ea3..b5d4921e4 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1281,10 +1281,6 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, opts ...Cr return fmt.Errorf("copyDefaultWebhooksToRepo: %v", err) } - if err = CreateDefaultDatasetToRepo(repo); err != nil { - return fmt.Errorf("models.CreateDefaultDatasetToRepo: %v", err) - } - return nil } @@ -1602,6 +1598,34 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e if err != nil { return err } + //If repo has become private, we need set dataset and dataset_file to private + _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ + Status: 0, + }) + if err != nil { + return err + } + + dataset, err := GetDatasetByRepo(repo) + if err != nil { + return err + } + _, err = e.Where("dataset_id = ?", dataset.ID).Cols("is_private").Update(&Attachment{ + IsPrivate: true, + }) + if err != nil { + return err + } + + } else { + //If repo has become public, we need set dataset to public + _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ + Status: 1, + }) + if err != nil { + return err + } + } // Create/Remove git-daemon-export-ok for git-daemon... @@ -2692,7 +2716,7 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi log.Error("ReadLatestFileInRepo error when OpenRepository,error=%v", err) return nil, err } - commitID, err := gitRepo.GetBranchCommitID(refName) + _, err = gitRepo.GetBranchCommitID(refName) if err != nil { log.Error("ReadLatestFileInRepo error when GetBranchCommitID,error=%v", err) return nil, err @@ -2724,5 +2748,9 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi if n >= 0 { buf = buf[:n] } - return &RepoFile{CommitId: commitID, Content: buf}, nil + commitId := "" + if blob != nil { + commitId = fmt.Sprint(blob.ID) + } + return &RepoFile{CommitId: commitId, Content: buf}, nil } diff --git a/models/user.go b/models/user.go index f7857248b..f72462051 100755 --- a/models/user.go +++ b/models/user.go @@ -153,10 +153,11 @@ type User struct { UseCustomAvatar bool // Counters - NumFollowers int - NumFollowing int `xorm:"NOT NULL DEFAULT 0"` - NumStars int - NumRepos int + NumFollowers int + NumFollowing int `xorm:"NOT NULL DEFAULT 0"` + NumStars int + NumDatasetStars int `xorm:"NOT NULL DEFAULT 0"` + NumRepos int // For organization NumTeams int diff --git a/modules/auth/dataset.go b/modules/auth/dataset.go index 577637273..71b5ac938 100755 --- a/modules/auth/dataset.go +++ b/modules/auth/dataset.go @@ -9,11 +9,10 @@ import ( type CreateDatasetForm struct { Title string `binding:"Required"` Category string `binding:"Required"` - Description string `binding:"Required;MaxSize(254)"` + Description string `binding:"Required"` License string `binding:"Required;MaxSize(64)"` Task string `binding:"Required;MaxSize(64)"` ReleaseID int64 `xorm:"INDEX"` - Private bool Files []string } @@ -25,11 +24,23 @@ type EditDatasetForm struct { ID int64 `binding:"Required"` Title string `binding:"Required"` Category string `binding:"Required"` - Description string `binding:"Required;MaxSize(254)"` + Description string `binding:"Required"` License string `binding:"Required;MaxSize(64)"` Task string `binding:"Required;MaxSize(64)"` - Private bool - ReleaseID int64 `xorm:"INDEX"` + ReleaseID int64 `xorm:"INDEX"` Files []string - Type string `binding:"Required"` + Type string `binding:"Required"` +} + +func (f *EditDatasetForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) +} + +type EditAttachmentForm struct { + ID int64 `binding:"Required"` + Description string +} + +func (f *EditAttachmentForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) } diff --git a/modules/context/repo.go b/modules/context/repo.go index 64f02c921..7c425c8c0 100755 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -475,6 +475,8 @@ func RepoAssignment() macaron.Handler { if ctx.IsSigned { ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID) ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID) + + ctx.Data["IsStaringDataset"] = models.IsDatasetStaringByRepoId(ctx.User.ID, repo.ID) } if repo.IsFork { diff --git a/modules/dataset/dataset.go b/modules/dataset/dataset.go new file mode 100644 index 000000000..a180af184 --- /dev/null +++ b/modules/dataset/dataset.go @@ -0,0 +1,17 @@ +package dataset + +func GetResourceType(cloudbrainType int) string { + if cloudbrainType == 0 { + return "CPU/GPU" + } else { + return "NPU" + } +} + +func GetStatusText(isPrivate bool) string { + if isPrivate { + return "dataset.private" + } else { + return "dataset.public" + } +} diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go index b740b1167..e30d0100c 100755 --- a/modules/modelarts/modelarts.go +++ b/modules/modelarts/modelarts.go @@ -51,6 +51,8 @@ const ( DataUrl = "data_url" ResultUrl = "result_url" CkptUrl = "ckpt_url" + DeviceTarget = "device_target" + Ascend = "Ascend" PerPage = 10 IsLatestVersion = "1" NotLatestVersion = "0" diff --git a/modules/setting/setting.go b/modules/setting/setting.go index ae6604ec9..7dc8167bd 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -165,6 +165,7 @@ var ( ExplorePagingNum int ContributorPagingNum int IssuePagingNum int + DatasetPagingNum int RepoSearchPagingNum int MembersPagingNum int FeedMaxCommitNum int @@ -207,6 +208,7 @@ var ( ExplorePagingNum: 20, ContributorPagingNum: 50, IssuePagingNum: 10, + DatasetPagingNum: 5, RepoSearchPagingNum: 10, MembersPagingNum: 20, FeedMaxCommitNum: 5, @@ -512,9 +514,9 @@ var ( ProfileID string PoolInfos string Flavor string - DebugHost string - ImageInfos string - Capacity int + DebugHost string + ImageInfos string + Capacity int //train-job ResourcePools string Engines string diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 3d31b611c..77c6fca8d 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -23,6 +23,8 @@ import ( "time" "unicode" + "code.gitea.io/gitea/modules/dataset" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/emoji" @@ -86,20 +88,22 @@ func NewFuncMap() []template.FuncMap { "AllowedReactions": func() []string { return setting.UI.Reactions }, - "AvatarLink": models.AvatarLink, - "Safe": Safe, - "SafeJS": SafeJS, - "Str2html": Str2html, - "TimeSince": timeutil.TimeSince, - "TimeSinceUnix": timeutil.TimeSinceUnix, - "TimeSinceUnix1": timeutil.TimeSinceUnix1, - "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, - "RawTimeSince": timeutil.RawTimeSince, - "FileSize": base.FileSize, - "PrettyNumber": base.PrettyNumber, - "Subtract": base.Subtract, - "EntryIcon": base.EntryIcon, - "MigrationIcon": MigrationIcon, + "AvatarLink": models.AvatarLink, + "Safe": Safe, + "SafeJS": SafeJS, + "Str2html": Str2html, + "TimeSince": timeutil.TimeSince, + "TimeSinceUnix": timeutil.TimeSinceUnix, + "TimeSinceUnix1": timeutil.TimeSinceUnix1, + "AttachmentResourceType": dataset.GetResourceType, + "AttachmentStatus": dataset.GetStatusText, + "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, + "RawTimeSince": timeutil.RawTimeSince, + "FileSize": base.FileSize, + "PrettyNumber": base.PrettyNumber, + "Subtract": base.Subtract, + "EntryIcon": base.EntryIcon, + "MigrationIcon": MigrationIcon, "Add": func(a, b int) int { return a + b }, @@ -340,11 +344,13 @@ func NewTextFuncMap() []texttmpl.FuncMap { "AppDomain": func() string { return setting.Domain }, - "TimeSince": timeutil.TimeSince, - "TimeSinceUnix": timeutil.TimeSinceUnix, - "TimeSinceUnix1": timeutil.TimeSinceUnix1, - "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, - "RawTimeSince": timeutil.RawTimeSince, + "TimeSince": timeutil.TimeSince, + "TimeSinceUnix": timeutil.TimeSinceUnix, + "TimeSinceUnix1": timeutil.TimeSinceUnix1, + "TimeSinceUnixShort": timeutil.TimeSinceUnixShort, + "RawTimeSince": timeutil.RawTimeSince, + "AttachmentResourceType": dataset.GetResourceType, + "AttachmentStatus": dataset.GetStatusText, "DateFmtLong": func(t time.Time) string { return t.Format(time.RFC1123Z) }, @@ -746,5 +752,5 @@ func licenses() []string { // Dataset tasks func tasks() []string { - return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_recognition", "speech_synthesis"} + return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"} } diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 75f12cf97..b1a25494a 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -285,6 +285,20 @@ code_search_results = Search results for '%s' code_last_indexed_at = Last indexed %s save=Save cancel=Cancel +hot_repo=Hot Repositories +active_repo=Active Repositories +all_fields = All fields +large_model = Large model +ai_development_tools = AI tools +computer_version = Computer version +natural_language_processing = NLP +machine_learning = Machine learning +neural_networks = Neural networks +autopilot = Autopilot +robot = Robot +federated_learning = Federated learning +data_mining = Data mining +RISC-V_development = RISC-V development [auth] create_new_account = Register Account @@ -721,8 +735,13 @@ alert = To initiate a cloud brain task, please upload the dataset in zip format. dataset = Dataset dataset_setting= Dataset Setting title = Name +title_format_err=Name can only contain number,letter,'-','_' or '.', and can be up to 100 characters long. description = Description +description_format_err=Description's length can be up to 1024 characters long. create_dataset = Create Dataset +create_dataset_fail=Failed to create dataset. +query_dataset_fail=Failed to query dataset. +edit_attachment_fail=Failed to update description. show_dataset= Dataset edit_dataset= Edit Dataset update_dataset= Update Dataset @@ -741,7 +760,8 @@ private = private public = public dir = directory back = back -copy_url=copy download url +copy_url=Copy Download Url +copy_md5 = Copy MD5 directory=preview of the datasets create_label_task=create label task visibility = visibility @@ -792,12 +812,49 @@ category.computer_vision= computer vision category.natural_language_processing= natural language processing category.speech_processing= speech processing category.computer_vision_natural_language_processing= computer vision and natural language processing -attachment.delete= delete this version of dataset +attachment.delete= Delete this version of dataset attachment.delete_desc= Are you sure you will delete this version of dataset, once deleted can not be recovery public= public private= private -delete= delete - +delete= Delete +select_dataset=Select Dataset +current_project=Current Project +owner_dataset=Owner Dataset +public_dataset=Public Dataset +I_liked = I Liked +use = Use +create_new_dataset = Create New Dataset +dataset_name = Dataset Name +dataset_description = Dataset Description +select_category = Select Category +select_task = Select Research Direction/Application Area +dataset_name_tooltips = Please enter letters, numbers, _ and - up to 100 characters. +dataset_no_create = No dataset has been created yet +dataset_explain = Dataset: CloudBrain I provides CPU/GPU resources, Cloudbrain II provides Ascend NPU resources, and the data set used for debugging also needs to be uploaded to the corresponding environment; +dataset_instructions_for_use = Instructions for use: You can refer to Qizhi AI Collaboration Platform +dataset_camp_course = Newcomer Training Camp Course; +dataset_upload = Upload +dataset_file_name = File Name +dataset_available_clusters = Available Clusters +dataset_upload_time = Upload Time +download = Download +modify_description = Modify Description +set_public = Set Public +set_private = Set Private +annotation = Annotation +upload_dataset_file = Upload Dataset File +file_description = File Description +data_upload = Dataset Upload +illustrate = Illustrate +illustrate.only = Only Datasets In +illustrate.zip = zip/tar.gz Format +illustrate.fisrt_end = Can Initiate Cloudbrain Tasks +modify_dataset = Modify Dataset +modify_dataset_description = Modify Dataset Description +search_dataset = Search Dataset Files +unzip_tooltips = If it has not been decompressed for a long time, please check whether the compressed package has encrypted files or file errors +zip_failed = Decompression failed, please check whether the compressed package is encrypted or contact technical support +dataset_desc = The description should not exceed 1024 characters [repo] owner = Owner repo_name = Repository Name @@ -827,7 +884,7 @@ repo_label_helpe = Press Enter to complete issue_labels = Issue Labels issue_labels_helper = Select an issue label set. license = License -license_helper = Select a license file. +license_helper = Select a license file readme = README readme_helper = Select a README file template. auto_init = Initialize Repository (Adds .gitignore, License and README) @@ -860,6 +917,7 @@ model_noright=No right model_rename=Duplicate model name, please modify model name. debug=Debug +debug_again=Restart stop=Stop delete=Delete more=More @@ -867,7 +925,7 @@ gpu_type_all=All model_download=Model Download submit_image=Submit Image download=Download - +score=Score cloudbrain=Cloudbrain cloudbrain.new=New cloudbrain @@ -882,7 +940,7 @@ cloudbrain1 = cloudbrain1 cloudbrain2 = cloudbrain2 cloudbrain_selection = select cloudbrain cloudbrain_platform_selection = Select the cloudbrain platform you want to use: -confirm_choice = confirm +confirm_choice = Confirm cloudbran1_tips = Only data in zip format can create cloudbrain tasks cloudbrain_creator=Creator cloudbrain_task = Task Name @@ -989,13 +1047,28 @@ cloudbrain.benchmark.evaluate_child_type=Child Type cloudbrain.benchmark.evaluate_mirror=Mirror cloudbrain.benchmark.evaluate_train=Train Script cloudbrain.benchmark.evaluate_test=Test Script +cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} + modelarts.infer_job_model = Model modelarts.infer_job_model_file = Model File modelarts.infer_job = Inference Job modelarts.infer_job.model_version = Model/Version modelarts.infer_job.select_model = Select Model +modelarts.infer_job.boot_file_helper=The startup file is the entry file for your program execution and must end in.py.Such as inference.py, main.py, example/inference. Py, case/main.py. modelarts.infer_job.tooltip = The model has been deleted and cannot be viewed. + +debug_task_not_created = Debug task has not been created +train_task_not_created = Train task has not been created +inference_job_not_created = Inference job has not been created +model_Evaluation_not_created = Model evaluation has not been created +repo_not_initialized = Code version: You have not initialized the code repository, please initialized first ; +debug_task_running_limit =Running time: no more than 4 hours, it will automatically stop if it exceeds 4 hours; +dataset_desc = Dataset: Cloud Brain 1 provides CPU/GPU,Cloud Brain 2 provides Ascend NPU.And dataset also needs to be uploaded to the corresponding environment; +platform_instructions = Instructions for use: You can refer to the Xiaobai training camp course of Qizhi AI collaboration platform. +model_not_exist = Model file: You do not have a model file yet, please generate and export the model through the training task first ; +benchmark_leaderboards = Benchmark leaderboards + model.manage.import_new_model=Import New Model model.manage.create_error=Equal Name and Version has existed. model.manage.model_name = Model Name @@ -1077,6 +1150,7 @@ unstar = Unstar star = Star fork = Fork download_archive = Download Repository +star_fail=Failed to %s the dataset. no_desc = No Description no_label = No labels @@ -2831,4 +2905,24 @@ benchmark_path = Benchmark script path snn4imagenet_path = Snn4imagenet script path brainscore_path = Brainscore script path start_command = Start command -choose_mirror = select mirror +choose_mirror = select mirror or enter mirror path +select_dataset = select dataset +specification = specification +select_specification = select specification +description = description + +job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-). +dataset_path_rule = The dataset location is stored in the environment variable data_url, and the training output path is stored in the environment variable train_url. +view_sample = View sample +inference_output_path_rule = The inference output path is stored in the environment variable result_url. +model_file_path_rule=The model file location is stored in the environment variable ckpt_url + +delete_task = Delete task +task_delete_confirm = Are you sure you want to delete this task? Once this task is deleted, it cannot be recovered. +operate_confirm = confirm +operate_cancel = cancel + +gpu_num = GPU +cpu_num = CPU +memory = Memory +shared_memory = Shared Memory diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index afb7a45f5..ece7f7bdf 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -287,6 +287,20 @@ code_search_results=“%s” 的搜索结果是 code_last_indexed_at=最后索引于 %s save=保存 cancel=取消 +hot_repo=热门项目 +active_repo=活跃项目 +all_fields = 全部领域 +large_model = 大模型 +ai_development_tools = AI开发工具 +computer_version = 计算机视觉 +natural_language_processing = 自然语言处理 +machine_learning = 机器学习 +neural_networks = 神经网络 +autopilot = 自动驾驶 +robot = 机器人 +federated_learning = 联邦学习 +data_mining = 数据挖掘 +RISC-V_development = RISC-V开发 [auth] create_new_account=注册帐号 @@ -724,8 +738,14 @@ alert=如果要发起云脑任务,请上传zip格式的数据集 dataset=数据集 dataset_setting=数据集设置 title=名称 +title_format_err=名称最多允许输入100个字符,只允许字母,数字,中划线 (‘-’),下划线 (‘_’) 和点 (‘.’) 。 description=描述 +description_format_err=描述最多允许输入1024个字符。 create_dataset=创建数据集 +create_dataset_fail=创建数据集失败。 +query_dataset_fail=查询数据集失败。 +edit_attachment_fail=修改描述失败。 + show_dataset=数据集 edit_dataset=编辑数据集 update_dataset=更新数据集 @@ -801,6 +821,44 @@ attachment.delete_desc= 你确定要删除该版本的数据集么?一旦删 public=公有 private=私有 delete=删除 +select_dataset=选择数据集 +current_project=当前项目 +owner_dataset=我的数据集 +public_dataset=公开数据集 +I_liked=我收藏的 +use=使用 +create_new_dataset = 新建数据集 +dataset_name=数据集名称 +dataset_description = 数据集描述 +select_category = 选择分类 +select_task = 选择研究方向/应用领域 +dataset_name_tooltips = 请输入字母、数字、_和-,最长100个字符。 +dataset_no_create = 还未创建过数据集 +dataset_explain = 数据集:云脑1提供 CPU / GPU 资源,云脑2提供 Ascend NPU 资源,调试使用的数据集也需要上传到对应的环境; +dataset_instructions_for_use = 使用说明:可以参考启智AI协作平台 +dataset_camp_course = 小白训练营课程 +dataset_upload = 上传 +dataset_file_name = 文件名称 +dataset_available_clusters = 可用集群 +dataset_upload_time = 上传时间 +download = 下载 +modify_description = 修改描述 +set_public = 设为公开 +set_private = 设为私有 +annotation = 标注 +upload_dataset_file = 上传数据集文件 +file_description = 文件描述 +data_upload = 数据上传 +illustrate = 说明 +illustrate.only = 只有 +illustrate.zip = zip/tar.gz格式 +illustrate.fisrt_end = 的数据集才能发起云脑任务 +modify_dataset = 修改数据集 +modify_dataset_description = 修改数据集文件描述 +search_dataset = 搜索数据集文件 +unzip_tooltips = 如果长时间未解压,请检查压缩包是否有加密文件或者文件错误 +zip_failed = 解压失败,请检查压缩包是否有加密或者联系技术支持人员。 +dataset_desc = 描述字数不超过1024个字符 [repo] owner=拥有者 @@ -831,7 +889,7 @@ repo_label_helpe=输入完成后回车键完成标签确定。 issue_labels=任务标签 issue_labels_helper=选择一个任务标签集 license=授权许可 -license_helper=选择授权许可文件。 +license_helper=选择授权许可文件 readme=自述 readme_helper=选择自述文件模板。 auto_init=初始化存储库 (添加. gitignore、许可证和自述文件) @@ -872,6 +930,7 @@ gpu_type_all=全部 model_download=结果下载 submit_image=提交镜像 download=模型下载 +score=评分 cloudbrain=云脑 cloudbrain.new=新建任务 @@ -995,7 +1054,7 @@ cloudbrain.benchmark.evaluate_child_type=子类型 cloudbrain.benchmark.evaluate_mirror=镜像 cloudbrain.benchmark.evaluate_train=训练程序 cloudbrain.benchmark.evaluate_test=测试程序 - +cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} modelarts.infer_job_model = 模型名称 modelarts.infer_job_model_file = 模型文件 @@ -1005,6 +1064,18 @@ modelarts.infer_job.select_model = 选择模型 modelarts.infer_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。比如inference.py、main.py、example/inference.py、case/main.py。 modelarts.infer_job.tooltip = 该模型已删除,无法查看。 + +debug_task_not_created = 未创建过调试任务 +train_task_not_created = 未创建过训练任务 +inference_job_not_created = 未创建过推理任务 +model_Evaluation_not_created = 未创建过评测任务 +repo_not_initialized = 代码版本:您还没有初始化代码仓库,请先创建代码版本; +debug_task_running_limit = 运行时长:最长不超过4个小时,超过4个小时将自动停止; +dataset_desc = 数据集:云脑1提供 CPU / GPU 资源,云脑2提供 Ascend NPU 资源,调试使用的数据集也需要上传到对应的环境; +platform_instructions = 使用说明:可以参考启智AI协作平台小白训练营课程。 +model_not_exist = 模型文件:您还没有模型文件,请先通过训练任务产生并 导出模型 ; +benchmark_leaderboards = 基准测试排行榜 + model.manage.import_new_model=导入新模型 model.manage.create_error=相同的名称和版本的模型已经存在。 model.manage.model_name = 模型名称 @@ -1086,6 +1157,8 @@ unstar=取消点赞 star=点赞 fork=派生 download_archive=下载此项目 +star_fail=%s失败。 + no_desc=暂无描述 no_label = 暂无标签 @@ -2840,3 +2913,24 @@ snn4imagenet_path = snn4imagenet脚本存放路径 brainscore_path = brainscore脚本存放路径 start_command = 启动命令 choose_mirror = 选择镜像或输入镜像地址 +select_dataset = 选择数据集 +specification = 规格 +select_specification = 选择资源规格 +description = 描述 + +job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。 +dataset_path_rule = 数据集位置存储在环境变量data_url中,训练输出路径存储在环境变量train_url中。 +view_sample = 查看样例 +inference_output_path_rule = 推理输出路径存储在环境变量result_url中。 +model_file_path_rule = 模型文件位置存储在环境变量ckpt_url中。 + +delete_task = 删除任务 +task_delete_confirm = 你确认删除该任务么?此任务一旦删除不可恢复。 +operate_confirm = 确定操作 +operate_cancel = 取消操作 + +gpu_num = GPU数 +cpu_num = CPU数 +memory = 内存 +shared_memory = 共享内存 + diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go index f92259c3d..b2f529dfb 100755 --- a/routers/api/v1/repo/cloudbrain.go +++ b/routers/api/v1/repo/cloudbrain.go @@ -6,6 +6,7 @@ package repo import ( + "code.gitea.io/gitea/modules/timeutil" "net/http" "sort" "time" @@ -77,9 +78,17 @@ func GetCloudbrainTask(ctx *context.APIContext) { job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP job.ContainerID = taskRes.TaskStatuses[0].ContainerID job.Status = taskRes.TaskStatuses[0].State + + if job.StartTime == 0 && !taskRes.TaskStatuses[0].StartAt.IsZero() { + job.StartTime = timeutil.TimeStamp(taskRes.TaskStatuses[0].StartAt.Unix()) + } } if result.JobStatus.State != string(models.JobWaiting) { + if job.EndTime == 0 && models.IsCloudBrainOneDebugJobTerminal(job.Status) { + job.EndTime = timeutil.TimeStampNow() + } + job.ComputeAndSetDuration() err = models.UpdateJob(job) if err != nil { log.Error("UpdateJob failed:", err) diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go index 893f2a32c..d7d011e07 100755 --- a/routers/api/v1/repo/modelarts.go +++ b/routers/api/v1/repo/modelarts.go @@ -6,12 +6,11 @@ package repo import ( + "code.gitea.io/gitea/modules/timeutil" "net/http" "strconv" "strings" - "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" @@ -67,8 +66,14 @@ func GetModelArtsNotebook2(ctx *context.APIContext) { ctx.NotFound(err) return } - + if job.StartTime == 0 && result.Lease.CreateTime > 0 { + job.StartTime = timeutil.TimeStamp(result.Lease.CreateTime / 1000) + } job.Status = result.Status + if job.EndTime == 0 && models.IsModelArtsDebugJobTerminal(job.Status) { + job.EndTime = timeutil.TimeStampNow() + } + job.ComputeAndSetDuration() err = models.UpdateJob(job) if err != nil { log.Error("UpdateJob failed:", err) @@ -133,16 +138,17 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { ctx.NotFound(err) return } - + if job.StartTime == 0 && result.StartTime > 0 { + job.StartTime = timeutil.TimeStamp(result.StartTime / 1000) + } job.Status = modelarts.TransTrainJobStatus(result.IntStatus) - job.Duration = result.Duration + job.Duration = result.Duration / 1000 job.TrainJobDuration = result.TrainJobDuration - if result.Duration != 0 { - job.TrainJobDuration = util.AddZero(result.Duration/3600000) + ":" + util.AddZero(result.Duration%3600000/60000) + ":" + util.AddZero(result.Duration%60000/1000) + job.TrainJobDuration = models.ConvertDurationToStr(job.Duration) - } else { - job.TrainJobDuration = "00:00:00" + if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 { + job.EndTime = job.StartTime.Add(job.Duration) } err = models.UpdateTrainJobVersion(job) @@ -366,16 +372,17 @@ func GetModelArtsInferenceJob(ctx *context.APIContext) { ctx.NotFound(err) return } - + if job.StartTime == 0 && result.StartTime > 0 { + job.StartTime = timeutil.TimeStamp(result.StartTime / 1000) + } job.Status = modelarts.TransTrainJobStatus(result.IntStatus) - job.Duration = result.Duration + job.Duration = result.Duration / 1000 job.TrainJobDuration = result.TrainJobDuration - if result.Duration != 0 { - job.TrainJobDuration = util.AddZero(result.Duration/3600000) + ":" + util.AddZero(result.Duration%3600000/60000) + ":" + util.AddZero(result.Duration%60000/1000) + job.TrainJobDuration = models.ConvertDurationToStr(result.Duration) - } else { - job.TrainJobDuration = "00:00:00" + if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 { + job.EndTime = job.StartTime.Add(job.Duration) } err = models.UpdateInferenceJob(job) diff --git a/routers/api/v1/repo/repo_dashbord.go b/routers/api/v1/repo/repo_dashbord.go index b1f344d55..2c42f8a16 100644 --- a/routers/api/v1/repo/repo_dashbord.go +++ b/routers/api/v1/repo/repo_dashbord.go @@ -292,41 +292,41 @@ func getFileName(ctx *context.Context, beginTime time.Time, endTime time.Time, p func allProjectsPeroidHeader(ctx *context.Context) map[string]string { - return map[string]string{"A1": ctx.Tr("admin.repos.id"), "B1": ctx.Tr("admin.repos.projectName"), "C1": ctx.Tr("repo.owner"), "D1": ctx.Tr("admin.repos.isPrivate"), "E1": ctx.Tr("admin.repos.isFork"), "F1": ctx.Tr("admin.repos.isMirror"), "G1": ctx.Tr("admin.repos.openi"), "H1": ctx.Tr("admin.repos.visit"), "I1": ctx.Tr("admin.repos.download"), "J1": ctx.Tr("admin.repos.pr"), "K1": ctx.Tr("admin.repos.commit"), - "L1": ctx.Tr("admin.repos.watches"), "M1": ctx.Tr("admin.repos.stars"), "N1": ctx.Tr("admin.repos.forks"), "O1": ctx.Tr("admin.repos.issues"), "P1": ctx.Tr("admin.repos.closedIssues"), "Q1": ctx.Tr("admin.repos.contributor"), "R1": ctx.Tr("admin.repos.create")} + return map[string]string{"A1": ctx.Tr("admin.repos.id"), "B1": ctx.Tr("admin.repos.projectName"), "C1": ctx.Tr("repo.owner"), "D1": ctx.Tr("admin.repos.isPrivate"), "E1": ctx.Tr("admin.repos.openi"), "F1": ctx.Tr("admin.repos.visit"), "G1": ctx.Tr("admin.repos.download"), "H1": ctx.Tr("admin.repos.pr"), "I1": ctx.Tr("admin.repos.commit"), + "J1": ctx.Tr("admin.repos.watches"), "K1": ctx.Tr("admin.repos.stars"), "L1": ctx.Tr("admin.repos.forks"), "M1": ctx.Tr("admin.repos.issues"), "N1": ctx.Tr("admin.repos.closedIssues"), "O1": ctx.Tr("admin.repos.contributor"), "P1": ctx.Tr("admin.repos.isFork"), "Q1": ctx.Tr("admin.repos.isMirror"), "R1": ctx.Tr("admin.repos.create")} } func allProjectsPeroidValues(row int, rs *models.RepoStatistic, ctx *context.Context) map[string]string { - return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): getBoolDisplay(rs.IsFork, ctx), getCellName("F", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("G", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), - getCellName("H", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("I", row): strconv.FormatInt(rs.NumDownloads, 10), getCellName("J", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("K", row): strconv.FormatInt(rs.NumCommits, 10), - getCellName("L", row): strconv.FormatInt(rs.NumWatches, 10), getCellName("M", row): strconv.FormatInt(rs.NumStars, 10), getCellName("N", row): strconv.FormatInt(rs.NumForks, 10), getCellName("O", row): strconv.FormatInt(rs.NumIssues, 10), - getCellName("P", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("Q", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("R", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT), + return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), + getCellName("F", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("G", row): strconv.FormatInt(rs.NumDownloads, 10), getCellName("H", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("I", row): strconv.FormatInt(rs.NumCommits, 10), + getCellName("J", row): strconv.FormatInt(rs.NumWatches, 10), getCellName("K", row): strconv.FormatInt(rs.NumStars, 10), getCellName("L", row): strconv.FormatInt(rs.NumForks, 10), getCellName("M", row): strconv.FormatInt(rs.NumIssues, 10), + getCellName("N", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("O", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("P", row): getBoolDisplay(rs.IsFork, ctx), getCellName("Q", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("R", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT), } } func allProjectsOpenIHeader() map[string]string { - return map[string]string{"A1": "ID", "B1": "项目名称", "C1": "拥有者", "D1": "私有", "E1": "迁移", "F1": "镜像", "G1": "OpenI指数", - "H1": "影响力", "I1": "成熟度", "J1": "活跃度", "K1": "项目健康度", "L1": "团队健康度", "M1": "项目发展趋势", - "N1": "关注数", "O1": "点赞数", "P1": "派生数", "Q1": "代码下载量", "R1": "评论数", "S1": "浏览量", "T1": "已解决任务数", "U1": "版本发布数量", "V1": "有效开发年龄", - "W1": "数据集", "X1": "模型数", "Y1": "百科页面数量", "Z1": "提交数", "AA1": "任务数", "AB1": "PR数", "AC1": "版本发布数量", "AD1": "任务完成比例", "AE1": "贡献者数", "AF1": "关键贡献者数", - "AG1": "新人增长量", "AH1": "代码规模增长量", "AI1": "任务增长量", "AJ1": "新人增长量", "AK1": "提交增长量", "AL1": "评论增长量", "AM1": "项目创建时间", + return map[string]string{"A1": "ID", "B1": "项目名称", "C1": "拥有者", "D1": "私有", "E1": "OpenI指数", + "F1": "影响力", "G1": "成熟度", "H1": "活跃度", "I1": "项目健康度", "J1": "团队健康度", "K1": "项目发展趋势", + "L1": "关注数", "M1": "点赞数", "N1": "派生数", "O1": "代码下载量", "P1": "评论数", "Q1": "浏览量", "R1": "已解决任务数", "S1": "版本发布数量", "T1": "有效开发年龄", + "U1": "数据集", "V1": "模型数", "W1": "百科页面数量", "X1": "提交数", "Y1": "任务数", "Z1": "PR数", "AA1": "版本发布数量", "AB1": "任务完成比例", "AC1": "贡献者数", "AD1": "关键贡献者数", + "AE1": "新人增长量", "AF1": "代码规模增长量", "AG1": "任务增长量", "AH1": "新人增长量", "AI1": "提交增长量", "AJ1": "评论增长量", "AK1": "迁移", "AL1": "镜像", "AM1": "项目创建时间", } } func allProjectsOpenIValues(row int, rs *models.RepoStatistic, ctx *context.Context) map[string]string { - return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): getBoolDisplay(rs.IsFork, ctx), getCellName("F", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("G", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), - getCellName("H", row): strconv.FormatFloat(rs.Impact, 'f', 2, 64), getCellName("I", row): strconv.FormatFloat(rs.Completeness, 'f', 2, 64), getCellName("J", row): strconv.FormatFloat(rs.Liveness, 'f', 2, 64), getCellName("K", row): strconv.FormatFloat(rs.ProjectHealth, 'f', 2, 64), getCellName("L", row): strconv.FormatFloat(rs.TeamHealth, 'f', 2, 64), getCellName("M", row): strconv.FormatFloat(rs.Growth, 'f', 2, 64), - getCellName("N", row): strconv.FormatInt(rs.NumWatches, 10), getCellName("O", row): strconv.FormatInt(rs.NumStars, 10), getCellName("P", row): strconv.FormatInt(rs.NumForks, 10), getCellName("Q", row): strconv.FormatInt(rs.NumDownloads, 10), + return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), + getCellName("F", row): strconv.FormatFloat(rs.Impact, 'f', 2, 64), getCellName("G", row): strconv.FormatFloat(rs.Completeness, 'f', 2, 64), getCellName("H", row): strconv.FormatFloat(rs.Liveness, 'f', 2, 64), getCellName("I", row): strconv.FormatFloat(rs.ProjectHealth, 'f', 2, 64), getCellName("J", row): strconv.FormatFloat(rs.TeamHealth, 'f', 2, 64), getCellName("K", row): strconv.FormatFloat(rs.Growth, 'f', 2, 64), + getCellName("L", row): strconv.FormatInt(rs.NumWatches, 10), getCellName("M", row): strconv.FormatInt(rs.NumStars, 10), getCellName("N", row): strconv.FormatInt(rs.NumForks, 10), getCellName("O", row): strconv.FormatInt(rs.NumDownloads, 10), - getCellName("R", row): strconv.FormatInt(rs.NumComments, 10), getCellName("S", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("T", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("U", row): strconv.FormatInt(rs.NumVersions, 10), - getCellName("V", row): strconv.FormatInt(rs.NumDevMonths, 10), getCellName("W", row): strconv.FormatInt(rs.DatasetSize, 10), getCellName("X", row): strconv.FormatInt(rs.NumModels, 10), getCellName("Y", row): strconv.FormatInt(rs.NumWikiViews, 10), - getCellName("Z", row): strconv.FormatInt(rs.NumCommits, 10), getCellName("AA", row): strconv.FormatInt(rs.NumIssues, 10), getCellName("AB", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("AC", row): strconv.FormatInt(rs.NumVersions, 10), - getCellName("AD", row): strconv.FormatFloat(float64(rs.IssueFixedRate), 'f', 2, 64), getCellName("AE", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("AF", row): strconv.FormatInt(rs.NumKeyContributor, 10), getCellName("AG", row): strconv.FormatInt(rs.NumContributorsGrowth, 10), - getCellName("AH", row): strconv.FormatInt(rs.NumCommitLinesGrowth, 10), getCellName("AI", row): strconv.FormatInt(rs.NumIssuesGrowth, 10), getCellName("AJ", row): strconv.FormatInt(rs.NumContributorsGrowth, 10), getCellName("AK", row): strconv.FormatInt(rs.NumCommitsGrowth, 10), getCellName("AL", row): strconv.FormatInt(rs.NumCommentsGrowth, 10), getCellName("AM", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT), + getCellName("P", row): strconv.FormatInt(rs.NumComments, 10), getCellName("Q", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("R", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("S", row): strconv.FormatInt(rs.NumVersions, 10), + getCellName("T", row): strconv.FormatInt(rs.NumDevMonths, 10), getCellName("U", row): strconv.FormatInt(rs.DatasetSize, 10), getCellName("V", row): strconv.FormatInt(rs.NumModels, 10), getCellName("W", row): strconv.FormatInt(rs.NumWikiViews, 10), + getCellName("X", row): strconv.FormatInt(rs.NumCommits, 10), getCellName("Y", row): strconv.FormatInt(rs.NumIssues, 10), getCellName("Z", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("AA", row): strconv.FormatInt(rs.NumVersions, 10), + getCellName("AB", row): strconv.FormatFloat(float64(rs.IssueFixedRate), 'f', 2, 64), getCellName("AC", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("AD", row): strconv.FormatInt(rs.NumKeyContributor, 10), getCellName("AE", row): strconv.FormatInt(rs.NumContributorsGrowth, 10), + getCellName("AF", row): strconv.FormatInt(rs.NumCommitLinesGrowth, 10), getCellName("AG", row): strconv.FormatInt(rs.NumIssuesGrowth, 10), getCellName("AH", row): strconv.FormatInt(rs.NumContributorsGrowth, 10), getCellName("AI", row): strconv.FormatInt(rs.NumCommitsGrowth, 10), getCellName("AJ", row): strconv.FormatInt(rs.NumCommentsGrowth, 10), getCellName("AK", row): getBoolDisplay(rs.IsFork, ctx), getCellName("AL", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("AM", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT), } } diff --git a/routers/home.go b/routers/home.go index 2db8d2112..c33d7a049 100755 --- a/routers/home.go +++ b/routers/home.go @@ -274,10 +274,11 @@ func ExploreDatasets(ctx *context.Context) { // ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled var ( - datasets []*models.Dataset - count int64 - err error - orderBy models.SearchOrderBy + datasets []*models.Dataset + datasetsWithStar []*models.DatasetWithStar + count int64 + err error + orderBy models.SearchOrderBy ) page := ctx.QueryInt("page") if page <= 0 { @@ -301,6 +302,10 @@ func ExploreDatasets(ctx *context.Context) { orderBy = models.SearchOrderBySizeReverse case "downloadtimes": orderBy = models.SearchOrderByDownloadTimes + case "moststars": + orderBy = models.SearchOrderByStarsReverse + case "feweststars": + orderBy = models.SearchOrderByStars default: ctx.Data["SortType"] = "recentupdate" orderBy = models.SearchOrderByRecentUpdated @@ -308,6 +313,9 @@ func ExploreDatasets(ctx *context.Context) { keyword := strings.Trim(ctx.Query("q"), " ") + category := ctx.Query("category") + task := ctx.Query("task") + license := ctx.Query("license") var ownerID int64 if ctx.User != nil && !ctx.User.IsAdmin { ownerID = ctx.User.ID @@ -316,25 +324,40 @@ func ExploreDatasets(ctx *context.Context) { Keyword: keyword, IncludePublic: true, SearchOrderBy: orderBy, + Category: category, + Task: task, + License: license, OwnerID: ownerID, ListOptions: models.ListOptions{ Page: page, - PageSize: setting.UI.ExplorePagingNum, + PageSize: 30, }, } datasets, count, err = models.SearchDataset(opts) + if err != nil { ctx.ServerError("SearchDatasets", err) return } + for _, dataset := range datasets { + if !ctx.IsSigned { + datasetsWithStar = append(datasetsWithStar, &models.DatasetWithStar{Dataset: *dataset, IsStaring: false}) + } else { + datasetsWithStar = append(datasetsWithStar, &models.DatasetWithStar{Dataset: *dataset, IsStaring: models.IsDatasetStaring(ctx.User.ID, dataset.ID)}) + } + + } pager := context.NewPagination(int(count), opts.PageSize, page, 5) ctx.Data["Keyword"] = opts.Keyword + ctx.Data["Category"] = category + ctx.Data["Task"] = task + ctx.Data["License"] = license pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager - ctx.Data["Datasets"] = datasets + ctx.Data["Datasets"] = datasetsWithStar ctx.Data["Total"] = count ctx.Data["PageIsDatasets"] = true ctx.HTML(200, tplExploreDataset) diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index 668169110..96f17b74b 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -15,6 +15,10 @@ import ( "strconv" "strings" + "code.gitea.io/gitea/modules/auth" + + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/labelmsg" @@ -30,8 +34,10 @@ import ( const ( //result of decompress - DecompressSuccess = "0" - DecompressFailed = "1" + DecompressSuccess = "0" + DecompressFailed = "1" + tplAttachmentUpload base.TplName = "repo/attachment/upload" + tplAttachmentEdit base.TplName = "repo/attachment/edit" ) type CloudBrainDataset struct { @@ -63,6 +69,40 @@ func renderAttachmentSettings(ctx *context.Context) { ctx.Data["AttachmentMaxFiles"] = setting.Attachment.MaxFiles } +func UploadAttachmentUI(ctx *context.Context) { + ctx.Data["datasetId"] = ctx.Query("datasetId") + ctx.Data["PageIsDataset"] = true + + ctx.HTML(200, tplAttachmentUpload) + +} + +func EditAttachmentUI(ctx *context.Context) { + + id, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + ctx.Data["PageIsDataset"] = true + attachment, _ := models.GetAttachmentByID(id) + if attachment == nil { + ctx.Error(404, "The attachment does not exits.") + } + ctx.Data["Attachment"] = attachment + ctx.HTML(200, tplAttachmentEdit) + +} + +func EditAttachment(ctx *context.Context, form auth.EditAttachmentForm) { + + err := models.UpdateAttachmentDescription(&models.Attachment{ + ID: form.ID, + Description: form.Description, + }) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.edit_attachment_fail"))) + } + ctx.JSON(http.StatusOK, models.BaseOKMessage) + +} + // UploadAttachment response for uploading issue's attachment func UploadAttachment(ctx *context.Context) { if !setting.Attachment.Enabled { @@ -241,14 +281,20 @@ func GetAttachment(ctx *context.Context) { } if dataSet != nil { - isPermit, err := models.GetUserDataSetPermission(dataSet, ctx.User) - if err != nil { - ctx.Error(http.StatusInternalServerError, "GetUserDataSetPermission", err.Error()) - return - } - if !isPermit { - ctx.Error(http.StatusNotFound) + if !ctx.IsSigned { + ctx.SetCookie("redirect_to", setting.AppSubURL+ctx.Req.URL.RequestURI(), 0, setting.AppSubURL) + ctx.Redirect(setting.AppSubURL + "/user/login") return + } else { + isPermit, err := models.GetUserDataSetPermission(dataSet, ctx.User) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetUserDataSetPermission", err.Error()) + return + } + if !isPermit { + ctx.Error(http.StatusNotFound) + return + } } } @@ -387,11 +433,17 @@ func AddAttachment(ctx *context.Context) { ctx.Error(404, "attachment has not been uploaded") return } + datasetId := ctx.QueryInt64("dataset_id") + dataset, err := models.GetDatasetByID(datasetId) + if err != nil { + ctx.Error(404, "dataset does not exist.") + return + } attachment, err := models.InsertAttachment(&models.Attachment{ UUID: uuid, UploaderID: ctx.User.ID, - IsPrivate: true, + IsPrivate: dataset.IsPrivate(), Name: fileName, Size: ctx.QueryInt64("size"), DatasetID: ctx.QueryInt64("dataset_id"), @@ -798,6 +850,9 @@ func CompleteMultipart(ctx *context.Context) { typeCloudBrain := ctx.QueryInt("type") fileName := ctx.Query("file_name") + log.Warn("uuid:" + uuid) + log.Warn("typeCloudBrain:" + strconv.Itoa(typeCloudBrain)) + err := checkTypeCloudBrain(typeCloudBrain) if err != nil { ctx.ServerError("checkTypeCloudBrain failed", err) @@ -835,22 +890,24 @@ func CompleteMultipart(ctx *context.Context) { ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) return } - + dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id")) + log.Warn("insert attachment to datasetId:" + strconv.FormatInt(dataset.ID, 10)) attachment, err := models.InsertAttachment(&models.Attachment{ - UUID: uuid, - UploaderID: ctx.User.ID, - IsPrivate: true, - Name: fileName, - Size: ctx.QueryInt64("size"), - DatasetID: ctx.QueryInt64("dataset_id"), - Type: typeCloudBrain, + UUID: uuid, + UploaderID: ctx.User.ID, + IsPrivate: dataset.IsPrivate(), + Name: fileName, + Size: ctx.QueryInt64("size"), + DatasetID: ctx.QueryInt64("dataset_id"), + Description: ctx.Query("description"), + Type: typeCloudBrain, }) if err != nil { ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) return } - dataset, _ := models.GetDatasetByID(attachment.DatasetID) + repository, _ := models.GetRepositoryByID(dataset.RepoID) notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 6e88b266d..0905efd54 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -2,9 +2,11 @@ package repo import ( "bufio" + "code.gitea.io/gitea/modules/timeutil" "encoding/json" "errors" "fmt" + "github.com/unknwon/i18n" "io" "net/http" "os" @@ -45,6 +47,10 @@ var ( benchmarkResourceSpecs *models.ResourceSpecs ) +const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" + +var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) + var jobNamePattern = regexp.MustCompile(`^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$`) // MustEnableDataset check if repository enable internal cb @@ -130,12 +136,7 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { } ctx.Data["benchmark_categories"] = categories.Category - if benchmarkTypes == nil { - if err := json.Unmarshal([]byte(setting.BenchmarkTypes), &benchmarkTypes); err != nil { - log.Error("json.Unmarshal BenchmarkTypes(%s) failed:%v", setting.BenchmarkTypes, err, ctx.Data["MsgID"]) - } - } - ctx.Data["benchmark_types"] = benchmarkTypes.BenchmarkType + ctx.Data["benchmark_types"] = GetBenchmarkTypes(ctx).BenchmarkType if gpuInfos == nil { json.Unmarshal([]byte(setting.GpuTypes), &gpuInfos) @@ -162,6 +163,8 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { ctx.Data["brainscore_path"] = cloudbrain.BrainScoreMountPath ctx.Data["is_brainscore_enabled"] = setting.IsBrainScoreEnabled + ctx.Data["cloudbraintype"] = models.TypeCloudBrainOne + return nil } @@ -339,13 +342,6 @@ func CloudBrainRestart(ctx *context.Context) { } func CloudBrainBenchMarkShow(ctx *context.Context) { - if benchmarkTypes == nil { - if err := json.Unmarshal([]byte(setting.BenchmarkTypes), &benchmarkTypes); err != nil { - log.Error("json.Unmarshal BenchmarkTypes(%s) failed:%v", setting.BenchmarkTypes, err, ctx.Data["MsgID"]) - ctx.ServerError(err.Error(), err) - return - } - } cloudBrainShow(ctx, tplCloudBrainBenchmarkShow) } @@ -380,6 +376,9 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) { task.Status = taskRes.TaskStatuses[0].State task.ContainerID = taskRes.TaskStatuses[0].ContainerID task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP + if task.StartTime == 0 && !taskRes.TaskStatuses[0].StartAt.IsZero() { + task.StartTime = timeutil.TimeStamp(taskRes.TaskStatuses[0].StartAt.Unix()) + } err = models.UpdateJob(task) if err != nil { ctx.Data["error"] = err.Error() @@ -405,14 +404,8 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) { task.User = user } - var duration int64 - if task.Status == string(models.JobRunning) { - duration = time.Now().Unix() - int64(task.CreatedUnix) - } else { - duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix) - } if task.BenchmarkTypeID > 0 { - for _, benchmarkType := range benchmarkTypes.BenchmarkType { + for _, benchmarkType := range GetBenchmarkTypes(ctx).BenchmarkType { if task.BenchmarkTypeID == benchmarkType.Id { ctx.Data["BenchmarkTypeName"] = benchmarkType.First for _, benchmarkChildType := range benchmarkType.Second { @@ -425,8 +418,16 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) { } } } - - ctx.Data["duration"] = util.AddZero(duration/3600000) + ":" + util.AddZero(duration%3600000/60000) + ":" + util.AddZero(duration%60000/1000) + if task.TrainJobDuration == "" { + var duration int64 + if task.Status == string(models.JobRunning) { + duration = time.Now().Unix() - int64(task.CreatedUnix) + } else { + duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix) + } + task.TrainJobDuration = models.ConvertDurationToStr(duration) + } + ctx.Data["duration"] = task.TrainJobDuration ctx.Data["task"] = task ctx.Data["jobName"] = task.JobName ctx.Data["displayJobName"] = task.DisplayJobName @@ -489,6 +490,10 @@ func CloudBrainStop(ctx *context.Context) { } task.Status = string(models.JobStopped) + if task.EndTime == 0 { + task.EndTime = timeutil.TimeStampNow() + } + task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"]) @@ -582,6 +587,10 @@ func logErrorAndUpdateJobStatus(err error, taskInfo *models.Cloudbrain) { log.Warn("Failed to stop cloudBrain job:"+taskInfo.JobID, err) } else { taskInfo.Status = string(models.JobStopped) + if taskInfo.EndTime == 0 { + taskInfo.EndTime = timeutil.TimeStampNow() + } + taskInfo.ComputeAndSetDuration() err = models.UpdateJob(taskInfo) if err != nil { log.Warn("UpdateJob failed", err) @@ -953,6 +962,13 @@ func SyncCloudbrainStatus() { task.Status = taskRes.TaskStatuses[0].State if task.Status != string(models.JobWaiting) { task.Duration = time.Now().Unix() - taskRes.TaskStatuses[0].StartAt.Unix() + if task.StartTime == 0 && !taskRes.TaskStatuses[0].StartAt.IsZero() { + task.StartTime = timeutil.TimeStamp(taskRes.TaskStatuses[0].StartAt.Unix()) + } + if task.EndTime == 0 && models.IsCloudBrainOneDebugJobTerminal(task.Status) { + task.EndTime = timeutil.TimeStampNow() + } + task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.JobName, err) @@ -973,6 +989,10 @@ func SyncCloudbrainStatus() { continue } task.Status = string(models.JobStopped) + if task.EndTime == 0 { + task.EndTime = timeutil.TimeStampNow() + } + task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.JobName, err) @@ -991,7 +1011,13 @@ func SyncCloudbrainStatus() { if result != nil { task.Status = result.Status - + if task.StartTime == 0 && result.Lease.CreateTime > 0 { + task.StartTime = timeutil.TimeStamp(result.Lease.CreateTime / 1000) + } + if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) { + task.EndTime = timeutil.TimeStampNow() + } + task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.JobName, err) @@ -1007,14 +1033,15 @@ func SyncCloudbrainStatus() { if result != nil { task.Status = modelarts.TransTrainJobStatus(result.IntStatus) - task.Duration = result.Duration + task.Duration = result.Duration / 1000 task.TrainJobDuration = result.TrainJobDuration - if result.Duration != 0 { - task.TrainJobDuration = util.AddZero(result.Duration/3600000) + ":" + util.AddZero(result.Duration%3600000/60000) + ":" + util.AddZero(result.Duration%60000/1000) - - } else { - task.TrainJobDuration = "00:00:00" + if task.StartTime == 0 && result.StartTime > 0 { + task.StartTime = timeutil.TimeStamp(result.StartTime / 1000) + } + task.TrainJobDuration = models.ConvertDurationToStr(task.Duration) + if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 { + task.EndTime = task.StartTime.Add(task.Duration) } err = models.UpdateJob(task) @@ -1059,26 +1086,22 @@ func CloudBrainBenchmarkIndex(ctx *context.Context) { return } - if benchmarkTypes == nil { - if err := json.Unmarshal([]byte(setting.BenchmarkTypes), &benchmarkTypes); err != nil { - ctx.ServerError("Get BenchmarkTypes faild:", err) - return - } - } - for i, task := range ciTasks { ciTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain) ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource - var duration int64 - if task.Status == string(models.JobRunning) { - duration = time.Now().Unix() - int64(task.Cloudbrain.CreatedUnix) - } else { - duration = int64(task.Cloudbrain.UpdatedUnix) - int64(task.Cloudbrain.CreatedUnix) + if ciTasks[i].TrainJobDuration == "" { + var duration int64 + if task.Status == string(models.JobRunning) { + duration = time.Now().Unix() - int64(task.Cloudbrain.CreatedUnix) + } else { + duration = int64(task.Cloudbrain.UpdatedUnix) - int64(task.Cloudbrain.CreatedUnix) + } + ciTasks[i].TrainJobDuration = models.ConvertDurationToStr(duration) } - ciTasks[i].TrainJobDuration = util.AddZero(duration/3600000) + ":" + util.AddZero(duration%3600000/60000) + ":" + util.AddZero(duration%60000/1000) + ciTasks[i].BenchmarkTypeName = "" if task.BenchmarkTypeID > 0 { - for _, benchmarkType := range benchmarkTypes.BenchmarkType { + for _, benchmarkType := range GetBenchmarkTypes(ctx).BenchmarkType { if task.BenchmarkTypeID == benchmarkType.Id { ciTasks[i].BenchmarkTypeRankLink = benchmarkType.RankLink ciTasks[i].BenchmarkTypeName = benchmarkType.First @@ -1102,15 +1125,8 @@ func GetChildTypes(ctx *context.Context) { benchmarkTypeID := ctx.QueryInt("benchmark_type_id") re := make(map[string]interface{}) for { - if benchmarkTypes == nil { - if err := json.Unmarshal([]byte(setting.BenchmarkTypes), &benchmarkTypes); err != nil { - log.Error("json.Unmarshal BenchmarkTypes(%s) failed:%v", setting.BenchmarkTypes, err, ctx.Data["MsgID"]) - re["errMsg"] = "system error" - break - } - } var isExist bool - for _, benchmarkType := range benchmarkTypes.BenchmarkType { + for _, benchmarkType := range GetBenchmarkTypes(ctx).BenchmarkType { if benchmarkTypeID == benchmarkType.Id { isExist = true re["child_types"] = benchmarkType.Second @@ -1141,17 +1157,11 @@ func CloudBrainBenchmarkNew(ctx *context.Context) { ctx.HTML(200, tplCloudBrainBenchmarkNew) } -func getBenchmarkAttachment(benchmarkTypeID, benchmarkChildTypeID int) (*models.BenchmarkDataset, error) { +func getBenchmarkAttachment(benchmarkTypeID, benchmarkChildTypeID int, ctx *context.Context) (*models.BenchmarkDataset, error) { var childInfo *models.BenchmarkDataset - if benchmarkTypes == nil { - if err := json.Unmarshal([]byte(setting.BenchmarkTypes), &benchmarkTypes); err != nil { - log.Error("json.Unmarshal BenchmarkTypes(%s) failed:%v", setting.BenchmarkTypes, err) - return childInfo, err - } - } var isExist bool - for _, benchmarkType := range benchmarkTypes.BenchmarkType { + for _, benchmarkType := range GetBenchmarkTypes(ctx).BenchmarkType { if benchmarkType.Id == benchmarkTypeID { for _, childType := range benchmarkType.Second { if childType.Id == benchmarkChildTypeID { @@ -1265,7 +1275,7 @@ func CloudBrainBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainF return } - childInfo, err := getBenchmarkAttachment(benchmarkTypeID, benchmarkChildTypeID) + childInfo, err := getBenchmarkAttachment(benchmarkTypeID, benchmarkChildTypeID, ctx) if err != nil { log.Error("getBenchmarkAttachment failed:%v", err, ctx.Data["MsgID"]) cloudBrainNewDataPrepare(ctx) @@ -1395,3 +1405,17 @@ func BenchmarkDel(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/cloudbrain/benchmark") } } + +func GetBenchmarkTypes(ctx *context.Context) *models.BenchmarkTypes { + var lang = ctx.Locale.Language() + if benchmarkTypesMap[lang] == nil { + var val = i18n.Tr(lang, BENCHMARK_TYPE_CODE) + var tempType *models.BenchmarkTypes + if err := json.Unmarshal([]byte(val), &tempType); err != nil { + log.Error("json.Unmarshal BenchmarkTypes(%s) failed:%v", val, err, ctx.Data["MsgID"]) + return &models.BenchmarkTypes{} + } + benchmarkTypesMap[lang] = tempType + } + return benchmarkTypesMap[lang] +} diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go index 7d59ab486..d23722372 100755 --- a/routers/repo/dataset.go +++ b/routers/repo/dataset.go @@ -1,7 +1,14 @@ package repo import ( + "encoding/json" + "fmt" + "net/http" + "regexp" "sort" + "strconv" + "strings" + "unicode/utf8" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" @@ -12,9 +19,14 @@ import ( ) const ( - tplIndex base.TplName = "repo/datasets/index" + tplIndex base.TplName = "repo/datasets/index" + tplDatasetCreate base.TplName = "repo/datasets/create" + tplDatasetEdit base.TplName = "repo/datasets/edit" + taskstplIndex base.TplName = "repo/datasets/tasks/index" ) +var titlePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`) + // MustEnableDataset check if repository enable internal dataset func MustEnableDataset(ctx *context.Context) { if !ctx.Repo.CanRead(models.UnitTypeDatasets) { @@ -84,43 +96,34 @@ func QueryDataSet(ctx *context.Context) []*models.Attachment { attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo) ctx.Data["SortType"] = ctx.Query("sort") - switch ctx.Query("sort") { - case "newest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) - case "oldest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix < attachments[j].CreatedUnix - }) - default: - ctx.Data["SortType"] = "newest" - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) - } + + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].CreatedUnix > attachments[j].CreatedUnix + }) + return attachments } func DatasetIndex(ctx *context.Context) { log.Info("dataset index 1") MustEnableDataset(ctx) + ctx.Data["PageIsDataset"] = true repo := ctx.Repo.Repository dataset, err := models.GetDatasetByRepo(repo) + ctx.Data["CanWrite"] = ctx.Repo.CanWrite(models.UnitTypeDatasets) if err != nil { - log.Error("query dataset, not found repo.") - ctx.NotFound("GetDatasetByRepo", err) + log.Warn("query dataset, not found.") + ctx.HTML(200, tplIndex) return } + cloudbrainType := -1 + if ctx.Query("type") != "" { - if ctx.Query("type") == "" { - log.Error("query dataset, not found param type") - ctx.NotFound("type error", nil) - return + cloudbrainType = ctx.QueryInt("type") } - err = models.GetDatasetAttachments(ctx.QueryInt("type"), ctx.IsSigned, ctx.User, dataset) + err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset) if err != nil { ctx.ServerError("GetDatasetAttachments", err) return @@ -128,53 +131,138 @@ func DatasetIndex(ctx *context.Context) { attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo) - ctx.Data["SortType"] = ctx.Query("sort") - switch ctx.Query("sort") { - case "newest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) - case "oldest": - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix < attachments[j].CreatedUnix - }) - default: - ctx.Data["SortType"] = "newest" - sort.Slice(attachments, func(i, j int) bool { - return attachments[i].CreatedUnix > attachments[j].CreatedUnix - }) + sort.Slice(attachments, func(i, j int) bool { + return attachments[i].CreatedUnix > attachments[j].CreatedUnix + }) + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 } + pagesize := ctx.QueryInt("pagesize") + if pagesize <= 0 { + pagesize = 10 + } + pager := context.NewPagination(len(attachments), pagesize, page, 5) + + pageAttachments := getPageAttachments(attachments, page, pagesize) + + //load attachment creator + for _, attachment := range pageAttachments { + uploader, _ := models.GetUserByID(attachment.UploaderID) + attachment.Uploader = uploader + } + + ctx.Data["Page"] = pager - ctx.Data["PageIsDataset"] = true ctx.Data["Title"] = ctx.Tr("dataset.show_dataset") ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets" ctx.Data["dataset"] = dataset - ctx.Data["Attachments"] = attachments + ctx.Data["Attachments"] = pageAttachments ctx.Data["IsOwner"] = true ctx.Data["StoreType"] = setting.Attachment.StoreType - ctx.Data["Type"] = ctx.QueryInt("type") + ctx.Data["Type"] = cloudbrainType renderAttachmentSettings(ctx) ctx.HTML(200, tplIndex) } +func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment { + begin := (page - 1) * pagesize + end := (page) * pagesize + + if begin > len(attachments)-1 { + return nil + } + if end > len(attachments)-1 { + return attachments[begin:] + } else { + return attachments[begin:end] + } + +} + +func CreateDataset(ctx *context.Context) { + + MustEnableDataset(ctx) + ctx.Data["PageIsDataset"] = true + + ctx.HTML(200, tplDatasetCreate) +} + +func EditDataset(ctx *context.Context) { + + MustEnableDataset(ctx) + ctx.Data["PageIsDataset"] = true + datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + + dataset, _ := models.GetDatasetByID(datasetId) + if dataset == nil { + ctx.Error(http.StatusNotFound, "") + return + } + ctx.Data["Dataset"] = dataset + + ctx.HTML(200, tplDatasetEdit) +} + +func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) { + + dataset := &models.Dataset{} + + if !titlePattern.MatchString(form.Title) { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) + return + } + if utf8.RuneCountInString(form.Description) > 1024 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err"))) + return + } + + dataset.RepoID = ctx.Repo.Repository.ID + dataset.UserID = ctx.User.ID + dataset.Category = form.Category + dataset.Task = form.Task + dataset.Title = form.Title + dataset.License = form.License + dataset.Description = form.Description + dataset.DownloadTimes = 0 + if ctx.Repo.Repository.IsPrivate { + dataset.Status = 0 + } else { + dataset.Status = 1 + } + err := models.CreateDataset(dataset) + if err != nil { + log.Error("fail to create dataset", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail"))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) { ctx.Data["PageIsDataset"] = true ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset") + if !titlePattern.MatchString(form.Title) { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) + return + } + if utf8.RuneCountInString(form.Description) > 1024 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err"))) + return + } + rel, err := models.GetDatasetByID(form.ID) ctx.Data["dataset"] = rel if err != nil { - ctx.ServerError("GetDataset", err) - return - } - - if ctx.HasError() { - ctx.Data["Error"] = true - ctx.HTML(200, tplIndex) + log.Error("failed to query dataset", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail"))) return } @@ -184,9 +272,236 @@ func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) { rel.Task = form.Task rel.License = form.License if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil { - ctx.Data["Error"] = true - ctx.HTML(200, tplIndex) - log.Error("%v", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail"))) } - ctx.Redirect(ctx.Repo.RepoLink + "/datasets?type=" + form.Type) + ctx.JSON(http.StatusOK, models.BaseOKMessage) +} + +func DatasetAction(ctx *context.Context) { + var err error + datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + switch ctx.Params(":action") { + case "star": + err = models.StarDataset(ctx.User.ID, datasetId, true) + case "unstar": + err = models.StarDataset(ctx.User.ID, datasetId, false) + + } + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } + +} + +func CurrentRepoDataset(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + + repo := ctx.Repo.Repository + var datasetIDs []int64 + dataset, err := models.GetDatasetByRepo(repo) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err))) + return + } + datasetIDs = append(datasetIDs, dataset.ID) + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: true, + DatasetIDs: datasetIDs, + Type: cloudbrainType, + NeedIsPrivate: false, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) +} + +func MyDatasets(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + + uploaderID := ctx.User.ID + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: false, + UploaderID: uploaderID, + Type: cloudbrainType, + NeedIsPrivate: false, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) +} + +func PublicDataset(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: false, + NeedIsPrivate: true, + IsPrivate: false, + Type: cloudbrainType, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) +} + +func MyFavoriteDataset(ctx *context.Context) { + page := ctx.QueryInt("page") + cloudbrainType := ctx.QueryInt("type") + keyword := strings.Trim(ctx.Query("q"), " ") + var datasetIDs []int64 + datasetStars, err := models.GetDatasetStarByUser(ctx.User) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err))) + log.Error("GetDatasetStarByUser failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + for i, _ := range datasetStars { + datasetIDs = append(datasetIDs, datasetStars[i].DatasetID) + } + + datasets, count, err := models.Attachments(&models.AttachmentsOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.DatasetPagingNum, + }, + Keyword: keyword, + NeedDatasetIDs: true, + DatasetIDs: datasetIDs, + NeedIsPrivate: true, + IsPrivate: false, + Type: cloudbrainType, + JustNeedZipFile: true, + NeedRepoInfo: true, + }) + if err != nil { + ctx.ServerError("datasets", err) + return + } + + data, err := json.Marshal(datasets) + if err != nil { + log.Error("json.Marshal failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + ctx.JSON(200, map[string]string{ + "result_code": "0", + "data": string(data), + "count": strconv.FormatInt(count, 10), + }) + +} + +func GetDatasetStatus(ctx *context.Context) { + + var ( + err error + ) + + UUID := ctx.Params(":uuid") + attachment, err := models.GetAttachmentByUUID(UUID) + if err != nil { + log.Error("GetDatasetStarByUser failed:", err.Error()) + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "error_msg": err.Error(), + "data": "", + }) + return + } + + ctx.JSON(200, map[string]string{ + "result_code": "0", + "UUID": UUID, + "AttachmentStatus": fmt.Sprint(attachment.DecompressState), + }) } diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 9c670e203..7d4c203bc 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -2,6 +2,7 @@ package repo import ( "archive/zip" + "code.gitea.io/gitea/modules/timeutil" "encoding/json" "errors" "io" @@ -133,6 +134,8 @@ func notebookNewDataPrepare(ctx *context.Context) error { } ctx.Data["flavors"] = modelarts.FlavorInfos.FlavorInfo + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo + return nil } @@ -408,6 +411,10 @@ func NotebookManage(ctx *context.Context) { } task.Status = res.Status + if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) { + task.EndTime = timeutil.TimeStampNow() + } + task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"]) @@ -580,6 +587,7 @@ func trainJobNewDataPrepare(ctx *context.Context) error { return err } ctx.Data["config_list"] = configList.ParaConfigs + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } @@ -746,6 +754,7 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error { ctx.Data["uuid"] = task.Uuid ctx.Data["flavor_code"] = task.FlavorCode ctx.Data["engine_id"] = task.EngineID + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom) if err != nil { @@ -953,17 +962,9 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) return } - //todo: del local code? - var parameters models.Parameters param := make([]models.Parameter, 0) - param = append(param, models.Parameter{ - Label: modelarts.TrainUrl, - Value: outputObsPath, - }, models.Parameter{ - Label: modelarts.DataUrl, - Value: dataPath, - }) + existDeviceTarget := false if len(params) != 0 { err := json.Unmarshal([]byte(params), ¶meters) if err != nil { @@ -974,6 +975,9 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) } for _, parameter := range parameters.Parameter { + if parameter.Label == modelarts.DeviceTarget { + existDeviceTarget = true + } if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl { param = append(param, models.Parameter{ Label: parameter.Label, @@ -982,9 +986,22 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) } } } + if !existDeviceTarget { + param = append(param, models.Parameter{ + Label: modelarts.DeviceTarget, + Value: modelarts.Ascend, + }) + } //save param config if isSaveParam == "on" { + saveparams := append(param, models.Parameter{ + Label: modelarts.TrainUrl, + Value: outputObsPath, + }, models.Parameter{ + Label: modelarts.DataUrl, + Value: dataPath, + }) if form.ParameterTemplateName == "" { log.Error("ParameterTemplateName is empty") trainJobNewDataPrepare(ctx) @@ -1006,7 +1023,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) EngineID: int64(engineID), LogUrl: logObsPath, PoolID: poolID, - Parameter: param, + Parameter: saveparams, }) if err != nil { @@ -1032,7 +1049,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) LogUrl: logObsPath, PoolID: poolID, Uuid: uuid, - Parameters: parameters.Parameter, + Parameters: param, CommitID: commitID, IsLatestVersion: isLatestVersion, BranchName: branch_name, @@ -1168,13 +1185,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ var parameters models.Parameters param := make([]models.Parameter, 0) - param = append(param, models.Parameter{ - Label: modelarts.TrainUrl, - Value: outputObsPath, - }, models.Parameter{ - Label: modelarts.DataUrl, - Value: dataPath, - }) + existDeviceTarget := true if len(params) != 0 { err := json.Unmarshal([]byte(params), ¶meters) if err != nil { @@ -1183,8 +1194,10 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ ctx.RenderWithErr("运行参数错误", tplModelArtsTrainJobVersionNew, &form) return } - for _, parameter := range parameters.Parameter { + if parameter.Label == modelarts.DeviceTarget { + existDeviceTarget = true + } if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl { param = append(param, models.Parameter{ Label: parameter.Label, @@ -1193,9 +1206,22 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ } } } + if !existDeviceTarget { + param = append(param, models.Parameter{ + Label: modelarts.DeviceTarget, + Value: modelarts.Ascend, + }) + } //save param config if isSaveParam == "on" { + saveparams := append(param, models.Parameter{ + Label: modelarts.TrainUrl, + Value: outputObsPath, + }, models.Parameter{ + Label: modelarts.DataUrl, + Value: dataPath, + }) if form.ParameterTemplateName == "" { log.Error("ParameterTemplateName is empty") versionErrorDataPrepare(ctx, form) @@ -1217,7 +1243,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ EngineID: int64(engineID), LogUrl: logObsPath, PoolID: poolID, - Parameter: parameters.Parameter, + Parameter: saveparams, }) if err != nil { @@ -1228,12 +1254,6 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ } } - if err != nil { - log.Error("getFlavorNameByEngineID(%s) failed:%v", engineID, err.Error()) - ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobVersionNew, &form) - return - } - task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, PreVersionName) if err != nil { log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", jobID, err.Error()) @@ -1257,7 +1277,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ PoolID: poolID, Uuid: uuid, Params: form.Params, - Parameters: parameters.Parameter, + Parameters: param, PreVersionId: task.VersionID, CommitID: commitID, BranchName: branch_name, @@ -1782,7 +1802,6 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference return } - //todo: del local code? var parameters models.Parameters param := make([]models.Parameter, 0) param = append(param, models.Parameter{ @@ -1792,6 +1811,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference Label: modelarts.CkptUrl, Value: "s3:/" + ckptUrl, }) + existDeviceTarget := false if len(params) != 0 { err := json.Unmarshal([]byte(params), ¶meters) if err != nil { @@ -1802,6 +1822,9 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference } for _, parameter := range parameters.Parameter { + if parameter.Label == modelarts.DeviceTarget { + existDeviceTarget = true + } if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl { param = append(param, models.Parameter{ Label: parameter.Label, @@ -1810,6 +1833,12 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference } } } + if !existDeviceTarget { + param = append(param, models.Parameter{ + Label: modelarts.DeviceTarget, + Value: modelarts.Ascend, + }) + } req := &modelarts.GenerateInferenceJobReq{ JobName: jobName, @@ -1977,6 +2006,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { New: MODEL_LATEST, }) ctx.Data["MODEL_COUNT"] = model_count + ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo return nil } diff --git a/routers/repo/setting.go b/routers/repo/setting.go index 5b057dbe5..af28f3290 100644 --- a/routers/repo/setting.go +++ b/routers/repo/setting.go @@ -245,10 +245,6 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) { // This section doesn't require repo_name/RepoName to be set in the form, don't show it // as an error on the UI for this action ctx.Data["Err_RepoName"] = nil - if err := models.CreateDefaultDatasetToRepo(repo); err != nil { - ctx.ServerError("CreateDefaultDatasetToRepo", err) - return - } if form.EnableDataset && !models.UnitTypeDatasets.UnitGlobalDisabled() { units = append(units, models.RepoUnit{ diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 84467f03c..dc131a638 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -589,6 +589,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/delete", repo.DeleteAttachment) m.Get("/get_pre_url", repo.GetPresignedPutObjectURL) m.Post("/add", repo.AddAttachment) + + m.Post("/edit", bindIgnErr(auth.EditAttachmentForm{}), repo.EditAttachment) m.Post("/private", repo.UpdatePublicAttachment) m.Get("/get_chunks", repo.GetSuccessChunks) m.Get("/new_multipart", repo.NewMultipart) @@ -981,7 +983,24 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/datasets", func() { m.Get("", reqRepoDatasetReader, repo.DatasetIndex) - m.Post("", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost) + m.Put("/:id/:action", reqRepoDatasetReader, repo.DatasetAction) + m.Get("/create", reqRepoDatasetWriter, repo.CreateDataset) + m.Post("/create", reqRepoDatasetWriter, bindIgnErr(auth.CreateDatasetForm{}), repo.CreateDatasetPost) + m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset) + m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost) + m.Get("/current_repo", repo.CurrentRepoDataset) + m.Get("/my_datasets", repo.MyDatasets) + m.Get("/public_datasets", repo.PublicDataset) + m.Get("/my_favorite", repo.MyFavoriteDataset) + + m.Group("/status", func() { + m.Get("/:uuid", repo.GetDatasetStatus) + }) + + m.Group("/attachments", func() { + m.Get("/upload", repo.UploadAttachmentUI) + m.Get("/edit/:id", repo.EditAttachmentUI) + }, reqSignIn) m.Group("/dirs", func() { m.Get("/:uuid", reqRepoDatasetReader, repo.DirIndex) diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl index 9aac97e70..39b2c21de 100644 --- a/templates/admin/cloudbrain/list.tmpl +++ b/templates/admin/cloudbrain/list.tmpl @@ -43,26 +43,26 @@ {{$.i18n.Tr "repo.cloudbrain_status_runtime"}}
你确认删除该任务么?此任务一旦删除不可恢复。
+{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
你确认删除该任务么?此任务一旦删除不可恢复。
+{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
| 训练程序 | - +
train.py
@@ -314,19 +314,19 @@ td, th {
测试程序
|
-
+
|
test.py
| ||||||||||||||||||||||||||||||||
| - {{$.i18n.Tr "repo.modelarts.train_job.description"}} + {{$.i18n.Tr "repo.modelarts.train_job.description"}} | - +
{{.Description}}
@@ -336,9 +336,9 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.train_job.standard"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.standard"}}
|
-
+
|
{{$.resource_spec}}
@@ -348,9 +348,9 @@ td, th {
- 创建者
+ 创建者
|
-
+
|
{{.User.Name}}
@@ -359,9 +359,9 @@ td, th {
- 子类型
+ 子类型
|
-
+
|
{{$.BenchmarkChildTypeName}}
@@ -373,7 +373,7 @@ td, th {
-
+
@@ -386,11 +386,11 @@ td, th {
-
+
-
+
-
+
@@ -400,24 +400,24 @@ td, th {
- 删除任务
+ {{.i18n.Tr "cloudbrain.delete_task"}}
-
+
-
你确认删除该任务么?此任务一旦删除不可恢复。 +{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
@@ -116,6 +117,7 @@
{{template "repo/header" .}}
+
{{template "base/alert" .}}
-
+
-
-
-
-
-
-
+
+ {{template "custom/select_dataset" .}}
-
@@ -354,4 +347,5 @@
$('#store_category').attr("value", selected_value)
})
+
diff --git a/templates/repo/datasets/create.tmpl b/templates/repo/datasets/create.tmpl
new file mode 100644
index 000000000..a65a5f351
--- /dev/null
+++ b/templates/repo/datasets/create.tmpl
@@ -0,0 +1,69 @@
+
+
+
+{{template "base/head" .}}
+
+
+
+
+
+
+
+
+ {{template "repo/header" .}}
+
+{{template "base/footer" .}}
diff --git a/templates/repo/datasets/edit.tmpl b/templates/repo/datasets/edit.tmpl
new file mode 100644
index 000000000..2c0577eb6
--- /dev/null
+++ b/templates/repo/datasets/edit.tmpl
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+ + {{.i18n.Tr "dataset.create_new_dataset"}} ++
+
+
+
+
+
+{{template "base/head" .}}
+
+
+
+
+
+
+
+
+ {{template "repo/header" .}}
+
+{{template "base/footer" .}}
diff --git a/templates/repo/datasets/index.tmpl b/templates/repo/datasets/index.tmpl
index 65ba2bb6e..a38e86525 100755
--- a/templates/repo/datasets/index.tmpl
+++ b/templates/repo/datasets/index.tmpl
@@ -6,145 +6,341 @@
margin: -1px;
background: #FFF !important;
}
+
+.dataset_title{
+ font-size: 14px;
+ max-width: 80%;
+ display: inline-block !important;
+ margin-left: 6px !important;
+ padding-right: 0 !important;
+}
+.wrapper {
+ display: flex;
+ overflow: hidden;
+ padding: 0 1rem;
+ }
+ .exp{
+ display: none;
+ }
+ .exp:checked+.text{
+ max-height: none;
+ }
+ .exp:checked+.text::after{
+ visibility: hidden;
+ }
+ .exp:checked+.text .btn::before{
+ visibility: hidden;
+ }
+ .exp:checked+.text .btn::after{
+ content:'{{$.i18n.Tr "org.fold"}}'
+ }
+
+ .wrapper>.text {
+ font-family: SourceHanSansSC-regular;
+ font-size: 14px;
+ color: #101010;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ text-align: justify;
+ position: relative;
+ line-height: 1.5;
+ max-height: 3em;
+ transition: .3s max-height;
+ word-wrap: break-word;
+ word-break: break-all;
+ }
+ .wrapper>.text::before {
+ content: '';
+ height: calc(100% - 20px);
+ float: right;
+ }
+ .wrapper>.text::after {
+ content: '';
+ width: 999vw;
+ height: 999vw;
+ position: absolute;
+ box-shadow: inset calc(100px - 999vw) calc(30px - 999vw) 0 0 #fff;
+ margin-left: -100px;
+ }
+ .btn{
+ position: relative;
+ float: right;
+ clear: both;
+ margin-left: 20px;
+ font-size: 14px;
+ padding: 0 8px;
+ background: #3F51B5;
+ line-height: 20px;
+ border-radius: 4px;
+ color: #fff;
+ cursor: pointer;
+ /* margin-top: -30px; */
+ }
+ .btn::after{
+ content:'{{$.i18n.Tr "org.unfold"}}'
+ }
+ .btn::before{
+ content: '...';
+ position: absolute;
+ left: -5px;
+ color: #333;
+ transform: translateX(-100%)
+ }
+
+ .el-button--text{color:#0366d6 ;}
+ .heart-stroke{
+ stroke: #666;
+ stroke-width: 2;
+ fill: #fff
+ }
+ .stars_active{
+ fill: #FA8C16 !important;
+ stroke:#FA8C16 !important
+ }
+ .diy-popper{
+ max-width: 400px;
+ }
-
+
+
+
+
+ + {{.i18n.Tr "dataset.modify_dataset"}} ++ + +
+
+
+
+
+
{{template "repo/header" .}}
-
-
- {{$.i18n.Tr "repo.modelarts.train_job.start_time"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.start_time"}}
|
-
+
|
{{TimeSinceUnix1 .CreatedUnix}}
@@ -238,9 +238,9 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.train_job.dura_time"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.dura_time"}}
|
-
+
|
{{.TrainJobDuration}}
@@ -248,23 +248,23 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.train_job.AI_driver"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.AI_driver"}}
|
|
-
+
{{.EngineName}}
{{$.i18n.Tr "repo.model.manage.description"}}
|
-
+
|
{{if .Description}}
@@ -279,7 +279,7 @@ td, th {
创建人
|
-
+
|
{{$.userName}}
@@ -288,7 +288,7 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.train_job.compute_node"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.compute_node"}}
|
|
@@ -304,19 +304,19 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.infer_job_model"}}
+ {{$.i18n.Tr "repo.modelarts.infer_job_model"}}
|
|
{{.ModelName}}
{{$.i18n.Tr "repo.modelarts.version"}}:{{.ModelVersion}}
-
+
- {{$.i18n.Tr "repo.modelarts.infer_job_model_file"}}
+ {{$.i18n.Tr "repo.modelarts.infer_job_model_file"}}
|
|
@@ -328,10 +328,10 @@ td, th {
{{$.i18n.Tr "repo.modelarts.model_label"}}
|
-
+
|
-
+
{{if .LabelName}}
{{range $.labelName}}
{{.}}
@@ -342,7 +342,7 @@ td, th {
{{$.i18n.Tr "repo.modelarts.code_version"}}
@@ -358,7 +358,7 @@ td, th {
|
{{$.i18n.Tr "repo.modelarts.train_job.start_file"}}
|
-
+
|
{{.BootFile}}
@@ -367,9 +367,9 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.infer_dataset"}}
+ {{$.i18n.Tr "repo.modelarts.infer_dataset"}}
|
-
+
|
{{.DatasetName}}
@@ -378,9 +378,9 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}}
|
-
+
|
{{if .Parameters}}
@@ -393,9 +393,9 @@ td, th {
- {{$.i18n.Tr "repo.modelarts.train_job.standard"}}
+ {{$.i18n.Tr "repo.modelarts.train_job.standard"}}
|
-
+
| "
html += "
{{.FlavorName}}
@@ -407,10 +407,10 @@ td, th {
-
+
-
+
-
+
-
+
@@ -434,34 +434,34 @@ td, th {
-
+
-
+
{{end}}
-
-
-
-
-
+
+
+
+
+
- 删除任务
+ {{.i18n.Tr "cloudbrain.delete_task"}}
-
+
-
你确认删除该任务么?此任务一旦删除不可恢复。 +{{.i18n.Tr "cloudbrain.task_delete_confirm"}} ' + data.Content)
+ $(`#log${version_name}`).append('"
html += "" + data.Dirs[i].ModTime + ""
html += " | "
html += " |
你确认删除该任务么?此任务一旦删除不可恢复。
+{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
你确认删除该任务么?此任务一旦删除不可恢复。
+{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
说明:
+