diff --git a/models/cloudbrain.go b/models/cloudbrain.go index f32ad5870..5091a8762 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -137,6 +137,8 @@ type Cloudbrain struct { Type int `xorm:"INDEX"` BenchmarkTypeID int BenchmarkChildTypeID int + CardType string + Cluster string VersionID int64 //版本id VersionName string `xorm:"INDEX"` //当前版本 @@ -600,6 +602,7 @@ type SpecialPool struct { Pool []*GpuInfo `json:"pool"` JobType []string `json:"jobType"` ResourceSpec []*ResourceSpec `json:"resourceSpecs"` + Flavor []*FlavorInfo `json:"flavor"` } type ImageInfosModelArts struct { @@ -1677,6 +1680,37 @@ func GetCloudbrainsNeededStopByUserID(userID int64) ([]*Cloudbrain, error) { return cloudBrains, err } +func GetWaittingTop() ([]*CloudbrainInfo, error) { + sess := x.NewSession() + defer sess.Close() + var cond = builder.NewCond() + cond = cond.And( + builder.Eq{"cloudbrain.status": string(JobWaiting)}, + ) + sess.OrderBy("cloudbrain.created_unix ASC limit 1") + cloudbrains := make([]*CloudbrainInfo, 0, 1) + if err := sess.Table(&Cloudbrain{}).Where(cond). + Find(&cloudbrains); err != nil { + log.Info("find error.") + } + return cloudbrains, nil +} +func GetModelartsReDebugTaskByJobId(jobID string) ([]*Cloudbrain, error) { + sess := x.NewSession() + defer sess.Close() + var cond = builder.NewCond() + cond = cond.And( + builder.Eq{"cloudbrain.job_id": jobID}, + ) + sess.OrderBy("cloudbrain.created_unix ASC limit 1") + cloudbrains := make([]*Cloudbrain, 0, 10) + if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). + Find(&cloudbrains); err != nil { + log.Info("find error.") + } + return cloudbrains, nil +} + func GetCloudbrainsNeededStopByRepoID(repoID int64) ([]*Cloudbrain, error) { cloudBrains := make([]*Cloudbrain, 0) err := x.Cols("job_id", "status", "type", "job_type", "version_id", "start_time").Where("repo_id=? AND status !=?", repoID, string(JobStopped)).Find(&cloudBrains) @@ -1926,7 +1960,8 @@ func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { } if (opts.IsLatestVersion) != "" { - cond = cond.And(builder.Or(builder.And(builder.Eq{"cloudbrain.is_latest_version": opts.IsLatestVersion}, builder.Eq{"cloudbrain.job_type": "TRAIN"}), builder.Neq{"cloudbrain.job_type": "TRAIN"})) + cond = cond.And(builder.Or(builder.And(builder.Eq{"cloudbrain.is_latest_version": opts.IsLatestVersion}, + builder.Eq{"cloudbrain.job_type": "TRAIN"}), builder.Neq{"cloudbrain.job_type": "TRAIN"})) } if len(opts.CloudbrainIDs) > 0 { @@ -1964,7 +1999,8 @@ func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { } else { lowerKeyWord := strings.ToLower(opts.Keyword) - cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) + cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, + builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) count, err = sess.Table(&Cloudbrain{}).Unscoped().Where(cond). Join("left", "`user`", condition).Count(new(CloudbrainInfo)) @@ -2042,7 +2078,8 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er } sess.OrderBy("cloudbrain.created_unix DESC") cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) - if err := sess.Cols("status", "type", "job_type", "train_job_duration", "duration", "compute_resource", "created_unix", "start_time", "end_time").Table(&Cloudbrain{}).Unscoped().Where(cond). + if err := sess.Cols("status", "type", "job_type", "train_job_duration", "duration", "compute_resource", + "created_unix", "start_time", "end_time").Table(&Cloudbrain{}).Unscoped().Where(cond). Find(&cloudbrains); err != nil { return nil, 0, fmt.Errorf("Find: %v", err) } @@ -2097,6 +2134,112 @@ func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) { return datasetInfos, datasetNames, nil } +var ( + SpecsMapInitFlag = false + CloudbrainDebugResourceSpecsMap map[int]*ResourceSpec + CloudbrainTrainResourceSpecsMap map[int]*ResourceSpec + CloudbrainInferenceResourceSpecsMap map[int]*ResourceSpec + CloudbrainBenchmarkResourceSpecsMap map[int]*ResourceSpec + CloudbrainSpecialResourceSpecsMap map[int]*ResourceSpec + GpuInfosMapInitFlag = false + CloudbrainDebugGpuInfosMap map[string]*GpuInfo + CloudbrainTrainGpuInfosMap map[string]*GpuInfo + CloudbrainInferenceGpuInfosMap map[string]*GpuInfo + CloudbrainBenchmarkGpuInfosMap map[string]*GpuInfo + CloudbrainSpecialGpuInfosMap map[string]*GpuInfo +) + +func InitCloudbrainOneResourceSpecMap() { + if CloudbrainDebugResourceSpecsMap == nil || len(CloudbrainDebugResourceSpecsMap) == 0 { + t := ResourceSpecs{} + json.Unmarshal([]byte(setting.ResourceSpecs), &t) + CloudbrainDebugResourceSpecsMap = make(map[int]*ResourceSpec, len(t.ResourceSpec)) + for _, spec := range t.ResourceSpec { + CloudbrainDebugResourceSpecsMap[spec.Id] = spec + } + } + if CloudbrainTrainResourceSpecsMap == nil || len(CloudbrainTrainResourceSpecsMap) == 0 { + t := ResourceSpecs{} + json.Unmarshal([]byte(setting.TrainResourceSpecs), &t) + CloudbrainTrainResourceSpecsMap = make(map[int]*ResourceSpec, len(t.ResourceSpec)) + for _, spec := range t.ResourceSpec { + CloudbrainTrainResourceSpecsMap[spec.Id] = spec + } + } + if CloudbrainInferenceResourceSpecsMap == nil || len(CloudbrainInferenceResourceSpecsMap) == 0 { + t := ResourceSpecs{} + json.Unmarshal([]byte(setting.InferenceResourceSpecs), &t) + CloudbrainInferenceResourceSpecsMap = make(map[int]*ResourceSpec, len(t.ResourceSpec)) + for _, spec := range t.ResourceSpec { + CloudbrainInferenceResourceSpecsMap[spec.Id] = spec + } + } + if CloudbrainBenchmarkResourceSpecsMap == nil || len(CloudbrainBenchmarkResourceSpecsMap) == 0 { + t := ResourceSpecs{} + json.Unmarshal([]byte(setting.BenchmarkResourceSpecs), &t) + CloudbrainBenchmarkResourceSpecsMap = make(map[int]*ResourceSpec, len(t.ResourceSpec)) + for _, spec := range t.ResourceSpec { + CloudbrainBenchmarkResourceSpecsMap[spec.Id] = spec + } + } + if CloudbrainSpecialResourceSpecsMap == nil || len(CloudbrainSpecialResourceSpecsMap) == 0 { + t := SpecialPools{} + json.Unmarshal([]byte(setting.SpecialPools), &t) + for _, pool := range t.Pools { + CloudbrainSpecialResourceSpecsMap = make(map[int]*ResourceSpec, len(pool.ResourceSpec)) + for _, spec := range pool.ResourceSpec { + CloudbrainSpecialResourceSpecsMap[spec.Id] = spec + } + } + } + SpecsMapInitFlag = true +} + +func InitCloudbrainOneGpuInfoMap() { + if CloudbrainDebugGpuInfosMap == nil || len(CloudbrainDebugGpuInfosMap) == 0 { + t := GpuInfos{} + json.Unmarshal([]byte(setting.GpuTypes), &t) + CloudbrainDebugGpuInfosMap = make(map[string]*GpuInfo, len(t.GpuInfo)) + for _, GpuInfo := range t.GpuInfo { + CloudbrainDebugGpuInfosMap[GpuInfo.Queue] = GpuInfo + } + } + if CloudbrainTrainGpuInfosMap == nil || len(CloudbrainTrainGpuInfosMap) == 0 { + t := GpuInfos{} + json.Unmarshal([]byte(setting.TrainGpuTypes), &t) + CloudbrainTrainGpuInfosMap = make(map[string]*GpuInfo, len(t.GpuInfo)) + for _, GpuInfo := range t.GpuInfo { + CloudbrainTrainGpuInfosMap[GpuInfo.Queue] = GpuInfo + } + } + if CloudbrainInferenceGpuInfosMap == nil || len(CloudbrainInferenceGpuInfosMap) == 0 { + t := GpuInfos{} + json.Unmarshal([]byte(setting.InferenceGpuTypes), &t) + CloudbrainInferenceGpuInfosMap = make(map[string]*GpuInfo, len(t.GpuInfo)) + for _, GpuInfo := range t.GpuInfo { + CloudbrainInferenceGpuInfosMap[GpuInfo.Queue] = GpuInfo + } + } + if CloudbrainBenchmarkGpuInfosMap == nil || len(CloudbrainBenchmarkGpuInfosMap) == 0 { + t := GpuInfos{} + json.Unmarshal([]byte(setting.BenchmarkGpuTypes), &t) + CloudbrainBenchmarkGpuInfosMap = make(map[string]*GpuInfo, len(t.GpuInfo)) + for _, GpuInfo := range t.GpuInfo { + CloudbrainBenchmarkGpuInfosMap[GpuInfo.Queue] = GpuInfo + } + } + if CloudbrainSpecialGpuInfosMap == nil || len(CloudbrainSpecialGpuInfosMap) == 0 { + t := SpecialPools{} + json.Unmarshal([]byte(setting.SpecialPools), &t) + for _, pool := range t.Pools { + CloudbrainSpecialGpuInfosMap = make(map[string]*GpuInfo, len(pool.Pool)) + for _, GpuInfo := range pool.Pool { + CloudbrainSpecialGpuInfosMap[GpuInfo.Queue] = GpuInfo + } + } + } + GpuInfosMapInitFlag = true +} func GetNewestJobsByAiCenter() ([]int64, error) { ids := make([]int64, 0) return ids, x. diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go index 03cd7d2bc..e3ac5e963 100644 --- a/models/cloudbrain_static.go +++ b/models/cloudbrain_static.go @@ -29,6 +29,11 @@ type TaskDetail struct { RepoAlias string `json:"RepoAlias"` RepoID int64 `json:"RepoID"` IsDelete bool `json:"IsDelete"` + CardNum int `json:"CardNum"` + CardType string `json:"CardType"` + CardDuration string `json:"CardDuration"` + AiCenter string `json:"AiCenter"` + FlavorName string `json:"FlavorName"` } func GetDebugOnePeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { @@ -206,21 +211,6 @@ func GetAllStatusCloudBrain() map[string]int { return cloudBrainStatusResult } -func GetWaittingTop() ([]*CloudbrainInfo, error) { - sess := x.NewSession() - defer sess.Close() - var cond = builder.NewCond() - cond = cond.And( - builder.Eq{"cloudbrain.status": string(JobWaiting)}, - ) - sess.OrderBy("cloudbrain.created_unix ASC limit 10") - cloudbrains := make([]*CloudbrainInfo, 0, 10) - if err := sess.Table(&Cloudbrain{}).Where(cond). - Find(&cloudbrains); err != nil { - log.Info("find error.") - } - return cloudbrains, nil -} func GetRunningTop() ([]*CloudbrainInfo, error) { sess := x.NewSession() defer sess.Close() diff --git a/models/dbsql/dataset_foreigntable_for_es.sql b/models/dbsql/dataset_foreigntable_for_es.sql index 02e5f0ddf..4a0cae70e 100644 --- a/models/dbsql/dataset_foreigntable_for_es.sql +++ b/models/dbsql/dataset_foreigntable_for_es.sql @@ -158,16 +158,20 @@ DROP TRIGGER IF EXISTS es_update_dataset on public.dataset; CREATE OR REPLACE FUNCTION public.update_dataset() RETURNS trigger AS $def$ BEGIN - UPDATE public.dataset_es - SET description=NEW.description, - title=NEW.title, - category=NEW.category, - task=NEW.task, - download_times=NEW.download_times, - updated_unix=NEW.updated_unix, - file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false), - file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false) - where id=NEW.id; + if (NEW.status=0) then + delete from public.dataset_es where id=NEW.id; + elsif (NEW.status=1) then + UPDATE public.dataset_es + SET description=NEW.description, + title=NEW.title, + category=NEW.category, + task=NEW.task, + download_times=NEW.download_times, + updated_unix=NEW.updated_unix, + file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false), + file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false) + where id=NEW.id; + end if; return new; END $def$ diff --git a/models/dbsql/repo_foreigntable_for_es.sql b/models/dbsql/repo_foreigntable_for_es.sql index e927eb7c2..82c8781af 100644 --- a/models/dbsql/repo_foreigntable_for_es.sql +++ b/models/dbsql/repo_foreigntable_for_es.sql @@ -461,7 +461,7 @@ $def$ if not OLD.is_private and NEW.is_private then delete from public.issue_es where repo_id=NEW.id; - delete from public.dataset_es where repo_id=NEW.id; + -- delete from public.dataset_es where repo_id=NEW.id; delete from public.repository_es where id=NEW.id; end if; diff --git a/models/org.go b/models/org.go index c956f1f89..45d13fc1b 100755 --- a/models/org.go +++ b/models/org.go @@ -470,6 +470,14 @@ func isOrganizationMember(e Engine, orgID, uid int64) (bool, error) { Exist() } +func IsOrganizationMemberByOrgName(orgName string, uid int64) (bool, error) { + org, _ := GetOrgByName(orgName) + if org != nil { + return IsOrganizationMember(org.ID, uid) + } + return false, nil +} + // IsPublicMembership returns true if given user public his/her membership. func IsPublicMembership(orgID, uid int64) (bool, error) { return x. diff --git a/models/repo.go b/models/repo.go index 4770e5415..feb6fd3ef 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1603,13 +1603,6 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e if err != nil { return err } - //If repo has become private, we need set dataset and dataset_file to private - _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ - Status: 0, - }) - if err != nil { - return err - } dataset, err := GetDatasetByRepo(repo) if err != nil && !IsErrNotExist(err) { @@ -1624,6 +1617,14 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e } } + //If repo has become private, we need set dataset and dataset_file to private + _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ + Status: 0, + }) + if err != nil { + return err + } + } else { //If repo has become public, we need set dataset to public _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index 6e7921d75..b6fffca0e 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -238,6 +238,9 @@ func GetAllUserPublicRepoKPIStats(startTime time.Time, endTime time.Time) (map[s CommitLines: 0, } } + if value.Email == "1250125907@qq.com" || value.Email == "peiyongyu-34@163.com" { + log.Info("repo path=" + repository.RepoPath()) + } authors[key].Commits += value.Commits authors[key].CommitLines += value.CommitLines diff --git a/models/user_analysis_for_activity.go b/models/user_analysis_for_activity.go index e69eecae0..2066697d2 100644 --- a/models/user_analysis_for_activity.go +++ b/models/user_analysis_for_activity.go @@ -6,6 +6,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/timeutil" + "xorm.io/builder" ) type UserBusinessAnalysisForActivity struct { @@ -195,7 +196,7 @@ func queryPullRequestPublic(start_unix int64, end_unix int64, publicAllRepo map[ sess := x.NewSession() defer sess.Close() resultMap := make(map[int64]int) - cond := "pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix) + cond := "issue.created_unix>=" + fmt.Sprint(start_unix) + " and issue.created_unix<=" + fmt.Sprint(end_unix) count, err := sess.Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).Count(new(Issue)) if err != nil { log.Info("query issue error. return.") @@ -435,3 +436,16 @@ func queryUserModelPublic(start_unix int64, end_unix int64, publicAllRepo map[in } return resultMap } + +func QueryUserLoginInfo(userIds []int64) []*UserLoginLog { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + var cond = builder.NewCond() + cond = cond.And(builder.In("u_id", userIds)) + statictisSess.Select("*").Table(new(UserLoginLog)).Where(cond) + loginList := make([]*UserLoginLog, 0) + + statictisSess.Find(&loginList) + + return loginList +} diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 47036e2e9..a36bd4736 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -110,9 +110,9 @@ type UserBusinessAnalysisAll struct { } type UserBusinessAnalysis struct { - ID int64 `xorm:"pk"` - - CountDate int64 `xorm:"pk"` + ID int64 `xorm:"pk"` + DataDate string `xorm:"pk"` + CountDate int64 `xorm:"NULL"` //action :ActionMergePullRequest // 11 CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"` @@ -171,8 +171,6 @@ type UserBusinessAnalysis struct { //user Name string `xorm:"NOT NULL"` - DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` @@ -411,6 +409,42 @@ func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusi return userBusinessAnalysisReturnList, allCount } +func QueryDataForUserDefineFromDb(opts *UserBusinessAnalysisQueryOptions, key string) ([]*UserBusinessAnalysis, int64) { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + cond := "data_date='" + key + "'" + allCount, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) + if err == nil { + if allCount > 0 { + userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) + if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("id desc").Limit(opts.PageSize, (opts.Page-1)*opts.PageSize). + Find(&userBusinessAnalysisList); err != nil { + return nil, 0 + } + return userBusinessAnalysisList, allCount + } + } + return nil, 0 +} + +func WriteDataToDb(dataList []*UserBusinessAnalysis, key string) { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + log.Info("write to db, size=" + fmt.Sprint(len(dataList))) + userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) + for _, data := range dataList { + data.DataDate = key + userBusinessAnalysisList = append(userBusinessAnalysisList, data) + if len(userBusinessAnalysisList) > BATCH_INSERT_SIZE { + statictisSess.Insert(userBusinessAnalysisList) + userBusinessAnalysisList = make([]*UserBusinessAnalysis, 0) + } + } + if len(userBusinessAnalysisList) > 0 { + statictisSess.Insert(userBusinessAnalysisList) + } +} + func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wikiCountMap map[string]int) ([]*UserBusinessAnalysis, int64) { log.Info("start to count other user info data") sess := x.NewSession() @@ -954,6 +988,9 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, statictisSess := xStatistic.NewSession() defer statictisSess.Close() + log.Info("truncate all data from table:user_business_analysis ") + statictisSess.Exec("TRUNCATE TABLE user_business_analysis") + cond := "type != 1" count, err := sess.Where(cond).Count(new(User)) if err != nil { @@ -1103,6 +1140,7 @@ func updateNewUserAcitivity(currentUserActivity map[int64]map[int64]int64, userA ",activate_regist_user=" + fmt.Sprint(useMetrics.ActivateRegistUser) + ",not_activate_regist_user=" + fmt.Sprint(useMetrics.CurrentDayRegistUser-useMetrics.ActivateRegistUser) + ",current_day_regist_user=" + fmt.Sprint(useMetrics.CurrentDayRegistUser) + + ",activate_index=" + fmt.Sprint(float64(useMetrics.ActivateRegistUser)/float64(useMetrics.CurrentDayRegistUser)) + ",data_date='" + time.Unix(key, 0).Format("2006-01-02") + "'" + " where count_date=" + fmt.Sprint(key) diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 6cbb97999..1872375da 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -326,7 +326,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error { ReadOnly: true, }, }) - } else { + } else if len(req.DatasetInfos) > 1 { for _, dataset := range req.DatasetInfos { volumes = append(volumes, models.Volume{ HostPath: models.StHostPath{ @@ -466,11 +466,14 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e log.Error("no such resourceSpecId(%d)", task.ResourceSpecId, ctx.Data["MsgID"]) return errors.New("no such resourceSpec") } - - datasetInfos, _, err := models.GetDatasetInfo(task.Uuid) - if err != nil { - log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"]) - return err + var datasetInfos map[string]models.DatasetInfo + if task.Uuid != "" { + var err error + datasetInfos, _, err = models.GetDatasetInfo(task.Uuid) + if err != nil { + log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"]) + return err + } } volumes := []models.Volume{ @@ -510,24 +513,25 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e }, }, } - - if len(datasetInfos) == 1 { - volumes = append(volumes, models.Volume{ - HostPath: models.StHostPath{ - Path: datasetInfos[task.Uuid].DataLocalPath, - MountPath: DataSetMountPath, - ReadOnly: true, - }, - }) - } else { - for _, dataset := range datasetInfos { + if datasetInfos != nil { + if len(datasetInfos) == 1 { volumes = append(volumes, models.Volume{ HostPath: models.StHostPath{ - Path: dataset.DataLocalPath, - MountPath: DataSetMountPath + "/" + dataset.Name, + Path: datasetInfos[task.Uuid].DataLocalPath, + MountPath: DataSetMountPath, ReadOnly: true, }, }) + } else { + for _, dataset := range datasetInfos { + volumes = append(volumes, models.Volume{ + HostPath: models.StHostPath{ + Path: dataset.DataLocalPath, + MountPath: DataSetMountPath + "/" + dataset.Name, + ReadOnly: true, + }, + }) + } } } @@ -547,7 +551,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e GPUNumber: resourceSpec.GpuNum, MemoryMB: resourceSpec.MemMiB, ShmMB: resourceSpec.ShareMemMiB, - Command: GetCloudbrainDebugCommand(),//Command, + Command: GetCloudbrainDebugCommand(), //Command, NeedIBDevice: false, IsMainRole: false, UseNNI: false, diff --git a/modules/git/repo_stats_custom.go b/modules/git/repo_stats_custom.go index 1a7b657d5..52d91e77a 100644 --- a/modules/git/repo_stats_custom.go +++ b/modules/git/repo_stats_custom.go @@ -62,7 +62,7 @@ func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (m after := startTime.Format(time.RFC3339) until := endTime.Format(time.RFC3339) - args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until=='%s'", until)} + args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until='%s'", until)} stdout, err := NewCommand(args...).RunInDirBytes(repoPath) if err != nil { return nil, err diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go index 1f39d0fac..8dcf1b1a9 100755 --- a/modules/modelarts/modelarts.go +++ b/modules/modelarts/modelarts.go @@ -67,9 +67,11 @@ const ( ) var ( - poolInfos *models.PoolInfos - FlavorInfos *models.FlavorInfos - ImageInfos *models.ImageInfosModelArts + poolInfos *models.PoolInfos + FlavorInfos *models.FlavorInfos + ImageInfos *models.ImageInfosModelArts + TrainFlavorInfos *Flavor + SpecialPools *models.SpecialPools ) type GenerateTrainJobReq struct { @@ -799,3 +801,9 @@ func GetNotebookImageName(imageId string) (string, error) { return imageName, nil } + +func InitSpecialPool() { + if SpecialPools == nil && setting.ModelArtsSpecialPools != "" { + json.Unmarshal([]byte(setting.ModelArtsSpecialPools), &SpecialPools) + } +} diff --git a/modules/setting/setting.go b/modules/setting/setting.go index f63088091..8a8a4a052 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -540,11 +540,12 @@ var ( ImageInfos string Capacity int //train-job - ResourcePools string - Engines string - EngineVersions string - FlavorInfos string - TrainJobFLAVORINFOS string + ResourcePools string + Engines string + EngineVersions string + FlavorInfos string + TrainJobFLAVORINFOS string + ModelArtsSpecialPools string //grampus config Grampus = struct { @@ -1422,6 +1423,7 @@ func NewContext() { EngineVersions = sec.Key("Engine_Versions").MustString("") FlavorInfos = sec.Key("FLAVOR_INFOS").MustString("") TrainJobFLAVORINFOS = sec.Key("TrainJob_FLAVOR_INFOS").MustString("") + ModelArtsSpecialPools = sec.Key("SPECIAL_POOL").MustString("") sec = Cfg.Section("elk") ElkUrl = sec.Key("ELKURL").MustString("") diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 105abf006..cb0ef205f 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -1098,6 +1098,9 @@ modelarts.createtime=CreateTime modelarts.version_nums = Version Nums modelarts.version = Version modelarts.computing_resources=compute Resources +modelarts.ai_center=Ai Center +modelarts.card_type=Card Type +modelarts.cluster=Cluster modelarts.notebook=Debug Task modelarts.train_job=Train Task modelarts.train_job.new_debug= New Debug Task @@ -3105,6 +3108,8 @@ select_dataset = select dataset specification = specification select_specification = select specification description = description +wrong_specification=You cannot use this specification, please choose another item. + job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-). dataset_path_rule = The dataset location is stored in the environment variable data_url, and the training output path is stored in the environment variable train_url. diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 1717f5cfd..652371690 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -1108,6 +1108,9 @@ modelarts.deletetime=删除时间 modelarts.version_nums=版本数 modelarts.version=版本 modelarts.computing_resources=计算资源 +modelarts.ai_center=智算中心 +modelarts.card_type=卡类型 +modelarts.cluster=集群 modelarts.notebook=调试任务 modelarts.train_job=训练任务 modelarts.train_job.new_debug=新建调试任务 @@ -3119,6 +3122,9 @@ select_dataset = 选择数据集 specification = 规格 select_specification = 选择资源规格 description = 描述 +card_duration = 运行卡时 +card_type = 卡类型 +wrong_specification=您目前不能使用这个资源规格,请选择其他资源规格。 job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。 dataset_path_rule = 数据集位置存储在环境变量data_url中,训练输出路径存储在环境变量train_url中。 diff --git a/public/self/css/notebook/notebook.css b/public/self/css/notebook/notebook.css index c75c33865..0e4979660 100644 --- a/public/self/css/notebook/notebook.css +++ b/public/self/css/notebook/notebook.css @@ -1,6 +1,7 @@ .nb-notebook { line-height: 1.5; - margin-left: 7em; + margin-left: 6em; + } .nb-stdout, .nb-stderr { @@ -15,6 +16,7 @@ .nb-cell + .nb-cell { margin-top: 0.5em; + max-width: 100%; } .nb-output table { @@ -40,6 +42,11 @@ padding-left: 1em; } +.nb-notebook img { + max-width: 80%; + padding: 3px; +} + .nb-cell { position: relative; } @@ -60,7 +67,8 @@ } .nb-output img { - max-width: 100%; + max-width: 80%; + padding: 3px; } .nb-output:before, .nb-input:before { diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go index 8cfe10795..5876baf18 100755 --- a/routers/admin/cloudbrains.go +++ b/routers/admin/cloudbrains.go @@ -10,6 +10,7 @@ import ( "github.com/360EntSecGroup-Skylar/excelize/v2" "code.gitea.io/gitea/modules/modelarts" + "code.gitea.io/gitea/routers/repo" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" @@ -89,6 +90,10 @@ func CloudBrains(ctx *context.Context) { ciTasks[i].CanDebug = true ciTasks[i].CanDel = true ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource + ciTasks[i].Cloudbrain.AiCenter = repo.GetCloudbrainAiCenter(task.Cloudbrain, ctx) + _, cardType, _ := repo.GetCloudbrainCardNumAndType(task.Cloudbrain) + ciTasks[i].Cloudbrain.CardType = cardType + ciTasks[i].Cloudbrain.Cluster = repo.GetCloudbrainCluster(task.Cloudbrain, ctx) } pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) @@ -188,11 +193,19 @@ func DownloadCloudBrains(ctx *context.Context) { } func allValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[string]string { - return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): rs.JobType, getCellName("C", row): rs.Status, getCellName("D", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT), getCellName("E", row): getDurationTime(rs), - getCellName("F", row): rs.ComputeResource, getCellName("G", row): rs.Name, getCellName("H", row): getRepoPathName(rs), getCellName("I", row): rs.JobName, + return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): repo.GetCloudbrainCluster(rs.Cloudbrain, ctx), + getCellName("C", row): rs.JobType, getCellName("D", row): rs.Status, getCellName("E", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT), + getCellName("F", row): getDurationTime(rs), getCellName("G", row): rs.ComputeResource, + getCellName("H", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), getCellName("I", row): getCloudbrainCardType(rs), + getCellName("J", row): rs.Name, getCellName("K", row): getRepoPathName(rs), getCellName("L", row): rs.JobName, } } +func getCloudbrainCardType(rs *models.CloudbrainInfo) string { + _, cardType, _ := repo.GetCloudbrainCardNumAndType(rs.Cloudbrain) + return cardType +} + func getRepoPathName(rs *models.CloudbrainInfo) string { if rs.Repo != nil { return rs.Repo.OwnerName + "/" + rs.Repo.Alias @@ -225,7 +238,11 @@ func getTotalPage(total int64, pageSize int) int { func allHeader(ctx *context.Context) map[string]string { - return map[string]string{"A1": ctx.Tr("repo.cloudbrain_task"), "B1": ctx.Tr("repo.cloudbrain_task_type"), "C1": ctx.Tr("repo.modelarts.status"), "D1": ctx.Tr("repo.modelarts.createtime"), "E1": ctx.Tr("repo.modelarts.train_job.dura_time"), "F1": ctx.Tr("repo.modelarts.computing_resources"), "G1": ctx.Tr("repo.cloudbrain_creator"), "H1": ctx.Tr("repo.repo_name"), "I1": ctx.Tr("repo.cloudbrain_task_name")} + return map[string]string{"A1": ctx.Tr("repo.cloudbrain_task"), "B1": ctx.Tr("repo.modelarts.cluster"), + "C1": ctx.Tr("repo.cloudbrain_task_type"), "D1": ctx.Tr("repo.modelarts.status"), "E1": ctx.Tr("repo.modelarts.createtime"), + "F1": ctx.Tr("repo.modelarts.train_job.dura_time"), "G1": ctx.Tr("repo.modelarts.computing_resources"), + "H1": ctx.Tr("repo.modelarts.ai_center"), "I1": ctx.Tr("repo.modelarts.card_type"), "J1": ctx.Tr("repo.cloudbrain_creator"), + "K1": ctx.Tr("repo.repo_name"), "L1": ctx.Tr("repo.cloudbrain_task_name")} } diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 3d9452f93..aa51c6e1a 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -571,6 +571,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/query_user_yesterday", operationReq, repo_ext.QueryUserStaticYesterday) m.Get("/query_user_all", operationReq, repo_ext.QueryUserStaticAll) m.Get("/query_user_activity", operationReq, repo_ext.QueryUserActivity) + m.Get("/query_user_login", operationReq, repo_ext.QueryUserLoginInfo) //cloudbrain board m.Group("/cloudbrainboard", func() { m.Get("/downloadAll", repo.DownloadCloudBrainBoard) diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go index eb86a8293..c632f3c8b 100755 --- a/routers/api/v1/repo/cloudbrain_dashboard.go +++ b/routers/api/v1/repo/cloudbrain_dashboard.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/routers/repo" "github.com/360EntSecGroup-Skylar/excelize/v2" ) @@ -735,6 +736,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) { var taskDetail models.TaskDetail taskDetail.ID = ciTasks[i].Cloudbrain.ID + taskDetail.JobID = ciTasks[i].Cloudbrain.JobID taskDetail.JobName = ciTasks[i].JobName taskDetail.DisplayJobName = ciTasks[i].DisplayJobName taskDetail.Status = ciTasks[i].Status @@ -751,46 +753,12 @@ func GetCloudbrainsDetailData(ctx *context.Context) { taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias } - if ciTasks[i].Cloudbrain.Status == string(models.JobWaiting) { - if ciTasks[i].Cloudbrain.DeletedAt != nilTime { - WaitTimeInt := ciTasks[i].Cloudbrain.UpdatedUnix.AsTime().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix() - taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt) - if WaitTimeInt < 0 { - taskDetail.WaitTime = "00:00:00" - } - } else { - if ciTasks[i].Cloudbrain.StartTime.AsTime().Unix() == 0 { - WaitTimeInt := time.Now().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix() - taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt) - if WaitTimeInt < 0 { - taskDetail.WaitTime = "00:00:00" - } - } else { - WaitTimeInt := ciTasks[i].Cloudbrain.StartTime.AsTime().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix() - taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt) - if WaitTimeInt < 0 { - taskDetail.WaitTime = "00:00:00" - } - } - } - } else if ciTasks[i].Cloudbrain.Status == string(models.JobStopped) && ciTasks[i].Cloudbrain.StartTime.AsTime().Unix() == 0 { - WaitTimeInt := ciTasks[i].Cloudbrain.EndTime.AsTime().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix() - taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt) - if WaitTimeInt < 0 { - taskDetail.WaitTime = "00:00:00" - - } - } else { - WaitTimeInt := ciTasks[i].Cloudbrain.StartTime.AsTime().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix() - taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt) - if WaitTimeInt < 0 { - taskDetail.WaitTime = "00:00:00" - } - } + taskDetail.CardNum, taskDetail.CardType, _ = repo.GetCloudbrainCardNumAndType(ciTasks[i].Cloudbrain) + taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) + taskDetail.AiCenter = repo.GetCloudbrainAiCenter(ciTasks[i].Cloudbrain, ctx) + taskDetail.FlavorName, _ = repo.GetCloudbrainFlavorName(ciTasks[i].Cloudbrain) - if ciTasks[i].Cloudbrain.Type == models.TypeCloudBrainTwo || (ciTasks[i].Cloudbrain.Type == models.TypeCloudBrainOne && ciTasks[i].Cloudbrain.JobType == "TRAIN") { - taskDetail.JobID = ciTasks[i].Cloudbrain.JobID - } + taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) if ciTasks[i].Cloudbrain.DeletedAt != nilTime { taskDetail.IsDelete = true @@ -813,6 +781,17 @@ func GetCloudbrainsDetailData(ctx *context.Context) { }) } +func getCloudbrainAiCenter(task models.Cloudbrain, ctx *context.Context) string { + if task.Type == models.TypeCloudBrainOne { + return ctx.Tr("repo.cloudbrain1") + } else if task.Type == models.TypeCloudBrainTwo { + return ctx.Tr("repo.cloudbrain2") + } else if task.Type == models.TypeC2Net { + return task.AiCenter + } + return "" +} + func GetCloudbrainsCreateHoursData(ctx *context.Context) { recordCloudbrain, err := models.GetRecordBeginTime() if err != nil { @@ -1247,18 +1226,23 @@ func allCloudbrainHeader(ctx *context.Context) map[string]string { return map[string]string{"A1": ctx.Tr("repo.cloudbrain_task"), "B1": ctx.Tr("repo.cloudbrain_type"), "C1": ctx.Tr("repo.modelarts.status"), "D1": ctx.Tr("repo.cloudbrain_task_type"), "E1": ctx.Tr("repo.modelarts.createtime"), "F1": ctx.Tr("repo.modelarts.train_job.wait_time"), "G1": ctx.Tr("repo.modelarts.train_job.dura_time"), - "H1": ctx.Tr("repo.modelarts.train_job.start_time"), - "I1": ctx.Tr("repo.modelarts.train_job.end_time"), "J1": ctx.Tr("repo.modelarts.computing_resources"), - "K1": ctx.Tr("repo.cloudbrain_creator"), "L1": ctx.Tr("repo.repo_name"), "M1": ctx.Tr("repo.cloudbrain_task_name"), "N1": ctx.Tr("repo.modelarts.deletetime")} + "H1": ctx.Tr("cloudbrain.card_duration"), + "I1": ctx.Tr("repo.modelarts.train_job.start_time"), "J1": ctx.Tr("repo.modelarts.train_job.end_time"), + "K1": ctx.Tr("repo.modelarts.computing_resources"), "L1": ctx.Tr("cloudbrain.card_type"), + "M1": ctx.Tr("repo.grampus.train_job.ai_center"), "N1": ctx.Tr("cloudbrain.resource_specification"), + "O1": ctx.Tr("repo.cloudbrain_creator"), "P1": ctx.Tr("repo.repo_name"), "Q1": ctx.Tr("repo.cloudbrain_task_name"), + "R1": ctx.Tr("repo.modelarts.deletetime")} } func allCloudbrainValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[string]string { return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): getCloudbrainType(rs, ctx), getCellName("C", row): rs.Status, getCellName("D", row): rs.JobType, - getCellName("E", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT), getCellName("F", row): getBrainWaitTime(rs), - getCellName("G", row): rs.TrainJobDuration, getCellName("H", row): getBrainStartTime(rs), - getCellName("I", row): getBrainEndTime(rs), - getCellName("J", row): rs.ComputeResource, getCellName("K", row): rs.Name, getCellName("L", row): getBrainRepo(rs), - getCellName("M", row): rs.JobName, getCellName("N", row): getBrainDeleteTime(rs), + getCellName("E", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT), getCellName("F", row): repo.GetCloudbrainWaitTime(rs.Cloudbrain), + getCellName("G", row): rs.TrainJobDuration, getCellName("H", row): repo.GetCloudbrainCardDuration(rs.Cloudbrain), + getCellName("I", row): getBrainStartTime(rs), + getCellName("J", row): getBrainEndTime(rs), getCellName("K", row): rs.ComputeResource, getCellName("L", row): getCloudbrainCardType(rs), + getCellName("M", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), getCellName("N", row): getCloudbrainFlavorName(rs), + getCellName("O", row): rs.Name, getCellName("P", row): getBrainRepo(rs), + getCellName("Q", row): rs.JobName, getCellName("R", row): getBrainDeleteTime(rs), } } func getBrainRepo(rs *models.CloudbrainInfo) string { @@ -1285,19 +1269,6 @@ func getBrainEndTime(rs *models.CloudbrainInfo) string { } } -func getBrainWaitTime(rs *models.CloudbrainInfo) string { - var waitTime int64 - if rs.Cloudbrain.Status == string(models.JobWaiting) { - waitTime = time.Now().Unix() - rs.Cloudbrain.CreatedUnix.AsTime().Unix() - } else { - waitTime = int64(rs.Cloudbrain.StartTime - rs.Cloudbrain.CreatedUnix) - } - if waitTime <= 0 { - return "00:00:00" - } else { - return models.ConvertDurationToStr(waitTime) - } -} func getCloudbrainType(rs *models.CloudbrainInfo, ctx *context.Context) string { if rs.Cloudbrain.Type == models.TypeCloudBrainOne { return ctx.Tr("repo.cloudbrain1") @@ -1309,6 +1280,14 @@ func getCloudbrainType(rs *models.CloudbrainInfo, ctx *context.Context) string { return ctx.Tr("repo.cloudbrain_untype") } } +func getCloudbrainCardType(rs *models.CloudbrainInfo) string { + _, cardType, _ := repo.GetCloudbrainCardNumAndType(rs.Cloudbrain) + return cardType +} +func getCloudbrainFlavorName(rs *models.CloudbrainInfo) string { + flavorName, _ := repo.GetCloudbrainFlavorName(rs.Cloudbrain) + return flavorName +} func getBrainDeleteTime(rs *models.CloudbrainInfo) string { nilTime := time.Time{} diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 5f767086d..3e6f8e83c 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -172,25 +172,22 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { var trainGpuTypes []*models.GpuInfo for _, pool := range cloudbrain.SpecialPools.Pools { - org, _ := models.GetOrgByName(pool.Org) - if org != nil { - isOrgMember, _ := models.IsOrganizationMember(org.ID, ctx.User.ID) - if isOrgMember { - for _, jobType := range pool.JobType { - if jobType == string(models.JobTypeDebug) { - debugGpuTypes = append(debugGpuTypes, pool.Pool...) - if pool.ResourceSpec != nil { - ctx.Data["resource_specs"] = pool.ResourceSpec - } - } else if jobType == string(models.JobTypeTrain) { - trainGpuTypes = append(trainGpuTypes, pool.Pool...) - if pool.ResourceSpec != nil { - ctx.Data["train_resource_specs"] = pool.ResourceSpec - } + isOrgMember, _ := models.IsOrganizationMemberByOrgName(pool.Org, ctx.User.ID) + if isOrgMember { + for _, jobType := range pool.JobType { + if jobType == string(models.JobTypeDebug) { + debugGpuTypes = append(debugGpuTypes, pool.Pool...) + if pool.ResourceSpec != nil { + ctx.Data["resource_specs"] = pool.ResourceSpec + } + } else if jobType == string(models.JobTypeTrain) { + trainGpuTypes = append(trainGpuTypes, pool.Pool...) + if pool.ResourceSpec != nil { + ctx.Data["train_resource_specs"] = pool.ResourceSpec } } - break } + break } } @@ -227,6 +224,7 @@ func CloudBrainNew(ctx *context.Context) { ctx.ServerError("get new cloudbrain info failed", err) return } + ctx.Data["PageIsGPUDebug"] = true ctx.HTML(200, tplCloudBrainNew) } @@ -292,13 +290,17 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { return } } - - datasetInfos, datasetNames, err := models.GetDatasetInfo(uuids) - if err != nil { - log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) - cloudBrainNewDataPrepare(ctx) - ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form) - return + var datasetInfos map[string]models.DatasetInfo + var datasetNames string + //var + if uuids != "" { + datasetInfos, datasetNames, err = models.GetDatasetInfo(uuids) + if err != nil { + log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) + cloudBrainNewDataPrepare(ctx) + ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form) + return + } } command := cloudbrain.GetCloudbrainDebugCommand() @@ -2722,3 +2724,170 @@ func GetBenchmarkTypes(ctx *context.Context) *models.BenchmarkTypes { } return benchmarkTypesMap[lang] } + +func GetCloudbrainAiCenter(task models.Cloudbrain, ctx *context.Context) string { + if task.Type == models.TypeCloudBrainOne { + return ctx.Tr("repo.cloudbrain1") + } else if task.Type == models.TypeCloudBrainTwo { + return ctx.Tr("repo.cloudbrain2") + } else if task.Type == models.TypeC2Net { + return getCutStringAiCenterByAiCenter(task.AiCenter) + } + return "" +} +func getCutStringAiCenterByAiCenter(aiCenter string) string { + if aiCenter == "" { + return "" + } + index := strings.LastIndex(aiCenter, "+") + return aiCenter[index+1:] + +} +func GetCloudbrainCluster(task models.Cloudbrain, ctx *context.Context) string { + if task.Type == models.TypeCloudBrainOne || task.Type == models.TypeCloudBrainTwo { + return ctx.Tr("cloudbrain.resource_cluster_openi") + } else if task.Type == models.TypeC2Net { + return ctx.Tr("cloudbrain.resource_cluster_c2net") + } + return "" +} +func GetCloudbrainCardDuration(task models.Cloudbrain) string { + cardNum, _, _ := GetCloudbrainCardNumAndType(task) + cardDuration := models.ConvertDurationToStr(int64(cardNum) * task.Duration) + return cardDuration +} +func GetCloudbrainWaitTime(task models.Cloudbrain) string { + var waitTime string + if task.Status == string(models.JobWaiting) { + waitTimeInt := time.Now().Unix() - task.CreatedUnix.AsTime().Unix() + waitTime = models.ConvertDurationToStr(waitTimeInt) + if waitTimeInt < 0 { + waitTime = "00:00:00" + } + } else if task.Status == string(models.JobStopped) && task.StartTime.AsTime().Unix() == 0 { + waitTimeInt := task.EndTime.AsTime().Unix() - task.CreatedUnix.AsTime().Unix() + waitTime = models.ConvertDurationToStr(waitTimeInt) + if waitTimeInt < 0 { + waitTime = "00:00:00" + + } + } else { + waitTimeInt := task.StartTime.AsTime().Unix() - task.CreatedUnix.AsTime().Unix() + waitTime = models.ConvertDurationToStr(waitTimeInt) + if waitTimeInt < 0 { + waitTime = "00:00:00" + } + } + return waitTime +} + +func GetCloudbrainCardNumAndType(task models.Cloudbrain) (int, string, error) { + if !models.SpecsMapInitFlag { + models.InitCloudbrainOneResourceSpecMap() + } + if !models.GpuInfosMapInitFlag { + models.InitCloudbrainOneGpuInfoMap() + } + flavorName, err := GetCloudbrainFlavorName(task) + if err != nil { + return 0, "", nil + } + return getCardNumAndTypeByFlavorname(flavorName) +} + +func getCardNumAndTypeByFlavorname(FlavorName string) (int, string, error) { + if FlavorName == "" { + return 0, "", nil + } else { + var beginIndex = strings.Index(FlavorName, ":") + var lastIndex = strings.LastIndex(FlavorName, ":") + var endIndex = strings.Index(FlavorName, "*") + if endIndex >= (beginIndex+1) && lastIndex >= (endIndex+1) { + cardNum, err := strconv.Atoi(strings.TrimSpace(FlavorName[beginIndex+1 : endIndex])) + if err != nil { + log.Error("strconv.Atoi failed: %v", err) + return 0, "", err + } + cardType := strings.TrimSpace(FlavorName[endIndex+1 : lastIndex]) + return cardNum, cardType, err + } + return 0, "", nil + } +} + +func GetCloudbrainFlavorName(task models.Cloudbrain) (string, error) { + if task.Type == models.TypeCloudBrainOne { + resourceSpec, gpuInfo, err := getCloudBrainOneResourceSpec(task) + if err != nil { + log.Info("getCloudBrainOneResourceSpec err:", err) + return "", err + } else { + if resourceSpec == nil || gpuInfo == nil { + err := errors.New("resourceSpec or gpuInfo is nil") + return "", err + } else { + CloudbrainOneFlavorName := "GPU:" + strconv.Itoa(resourceSpec.GpuNum) + "*Nvidia-" + gpuInfo.Value + + " | CPU:" + strconv.Itoa(resourceSpec.CpuNum) + "核" + strconv.Itoa(resourceSpec.MemMiB) + "MB" + return CloudbrainOneFlavorName, nil + } + } + } else if (task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeC2Net) && task.FlavorName != "" { + replaceFlavorName := strings.ReplaceAll(task.FlavorName, ":", ":") + return replaceFlavorName, nil + } else if task.Type == models.TypeCloudBrainTwo && task.FlavorName == "" && task.FlavorCode != "" { + cloudbrainTwoFlavorName := getFlavorNameByFlavorCode(task.FlavorCode) + return cloudbrainTwoFlavorName, nil + } else if task.Type == models.TypeCloudBrainTwo && task.JobType == string(models.JobTypeDebug) && task.FlavorName == "" && task.FlavorCode == "" { + tasks, err := models.GetModelartsReDebugTaskByJobId(task.JobID) + if err != nil { + return "", err + } + if len(tasks) >= 1 { + return getFlavorNameByFlavorCode(tasks[0].FlavorCode), nil + } + return "", nil + } + return "", nil +} + +func getCloudBrainOneResourceSpec(task models.Cloudbrain) (*models.ResourceSpec, *models.GpuInfo, error) { + gpuQueueDefault := "openidebug" + if task.GpuQueue != "" { + gpuQueueDefault = task.GpuQueue + } + if task.ResourceSpecId >= 0 { + if task.JobType == string(models.JobTypeTrain) { + if models.CloudbrainTrainResourceSpecsMap[task.ResourceSpecId] != nil { + return models.CloudbrainTrainResourceSpecsMap[task.ResourceSpecId], models.CloudbrainTrainGpuInfosMap[gpuQueueDefault], nil + } else { + return models.CloudbrainSpecialResourceSpecsMap[task.ResourceSpecId], models.CloudbrainSpecialGpuInfosMap[gpuQueueDefault], nil + } + } else if task.JobType == string(models.JobTypeDebug) { + if models.CloudbrainDebugResourceSpecsMap[task.ResourceSpecId] != nil { + return models.CloudbrainDebugResourceSpecsMap[task.ResourceSpecId], models.CloudbrainDebugGpuInfosMap[gpuQueueDefault], nil + } else { + return models.CloudbrainSpecialResourceSpecsMap[task.ResourceSpecId], models.CloudbrainSpecialGpuInfosMap[gpuQueueDefault], nil + } + } else if task.JobType == string(models.JobTypeInference) { + return models.CloudbrainInferenceResourceSpecsMap[task.ResourceSpecId], models.CloudbrainInferenceGpuInfosMap[gpuQueueDefault], nil + } else if task.JobType == string(models.JobTypeBenchmark) || task.JobType == string(models.JobTypeSnn4imagenet) || task.JobType == string(models.JobTypeBrainScore) { + return models.CloudbrainBenchmarkResourceSpecsMap[task.ResourceSpecId], models.CloudbrainBenchmarkGpuInfosMap[gpuQueueDefault], nil + } + } else { + err := errors.New("ResourceSpecId is null") + return nil, nil, err + } + return nil, nil, nil +} +func getFlavorNameByFlavorCode(flavorCode string) string { + index := strings.LastIndex(flavorCode, ".") + cardNum, err := strconv.Atoi(strings.TrimSpace(flavorCode[index+1 : len(flavorCode)])) + if err != nil { + log.Error("strconv.Atoi failed: %v", err) + return "" + } + cloudbrainTwoFlavorName := "Ascend:" + strings.TrimSpace(flavorCode[index+1:len(flavorCode)]) + + "*Ascend-910(" + strconv.Itoa(cardNum*32) + "GB)|ARM:" + strconv.Itoa(cardNum*24) + + "核" + strconv.Itoa(cardNum*256) + "GB" + return cloudbrainTwoFlavorName +} diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go index f048506b1..76f2bd98c 100755 --- a/routers/repo/grampus.go +++ b/routers/repo/grampus.go @@ -66,7 +66,7 @@ func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) err ctx.Data["PageIsCloudBrain"] = true t := time.Now() - var displayJobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] ctx.Data["display_job_name"] = displayJobName //get valid images diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index ef9fe41b2..43f4a6e73 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -146,6 +146,7 @@ func notebookNewDataPrepare(ctx *context.Context) error { json.Unmarshal([]byte(setting.FlavorInfos), &modelarts.FlavorInfos) } ctx.Data["flavors"] = modelarts.FlavorInfos.FlavorInfo + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeDebug)) ctx.Data["datasetType"] = models.TypeCloudBrainTwo @@ -238,6 +239,13 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm } } + errStr := checkModelArtsSpecialPool(ctx, flavor, string(models.JobTypeDebug)) + if errStr != "" { + notebookNewDataPrepare(ctx) + ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsNotebookNew, &form) + return + } + err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, flavor, imageId) if err != nil { log.Error("GenerateNotebook2 failed, %v", err, ctx.Data["MsgID"]) @@ -286,6 +294,9 @@ func NotebookShow(ctx *context.Context) { } else { //deleted record } + if task.FlavorCode == "" { + task.FlavorCode = result.Flavor + } } datasetDownload := make([]models.DatasetDownload, 0) @@ -328,15 +339,21 @@ func NotebookShow(ctx *context.Context) { if modelarts.FlavorInfos == nil { json.Unmarshal([]byte(setting.FlavorInfos), &modelarts.FlavorInfos) } + + findSpec := false if modelarts.FlavorInfos != nil { ctx.Data["resource_spec"] = modelarts.FlavorInfos.FlavorInfo[0].Desc for _, f := range modelarts.FlavorInfos.FlavorInfo { if fmt.Sprint(f.Value) == task.FlavorCode { ctx.Data["resource_spec"] = f.Desc + findSpec = true break } } } + + setShowSpecBySpecialPoolConfig(ctx, findSpec, task) + if task.TrainJobDuration == "" { if task.Duration == 0 { var duration int64 @@ -358,6 +375,20 @@ func NotebookShow(ctx *context.Context) { ctx.HTML(200, tplModelArtsNotebookShow) } +func setShowSpecBySpecialPoolConfig(ctx *context.Context, findSpec bool, task *models.Cloudbrain) { + modelarts.InitSpecialPool() + if modelarts.SpecialPools != nil && !findSpec { + for _, pool := range modelarts.SpecialPools.Pools { + for _, flavor := range pool.Flavor { + if flavor.Value == task.FlavorCode { + ctx.Data["resource_spec"] = flavor.Desc + } + } + } + + } +} + func NotebookDebug(ctx *context.Context) { var jobID = ctx.Params(":jobid") @@ -500,6 +531,8 @@ func NotebookManage(ctx *context.Context) { Description: task.Description, CreatedUnix: createTime, UpdatedUnix: createTime, + FlavorCode: task.FlavorCode, + FlavorName: task.FlavorName, } err = models.RestartCloudbrain(task, newTask) @@ -545,7 +578,7 @@ func NotebookDel(ctx *context.Context) { var listType = ctx.Query("debugListType") task := ctx.Cloudbrain - if task.Status != string(models.ModelArtsCreateFailed) && task.Status != string(models.ModelArtsStartFailed) && task.Status != string(models.ModelArtsStopped) { + if task.Status != string(models.ModelArtsCreateFailed) && task.Status != string(models.ModelArtsStartFailed) && task.Status != string(models.ModelArtsStopped) && task.Status != string(models.ModelArtsDeleted) { log.Error("the job(%s) has not been stopped", task.JobName) ctx.RenderWithErr("the job has not been stopped", tplDebugJobIndex, nil) return @@ -658,7 +691,7 @@ func trainJobNewDataPrepare(ctx *context.Context) error { //} t := time.Now() - var displayJobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -696,6 +729,8 @@ func trainJobNewDataPrepare(ctx *context.Context) error { } ctx.Data["flavor_infos"] = flavorInfos.Info + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeTrain)) + ctx.Data["params"] = "" ctx.Data["branchName"] = ctx.Repo.BranchName @@ -710,6 +745,41 @@ func trainJobNewDataPrepare(ctx *context.Context) error { return nil } +func setSpecBySpecialPoolConfig(ctx *context.Context, jobType string) { + modelarts.InitSpecialPool() + + if modelarts.SpecialPools != nil { + for _, specialPool := range modelarts.SpecialPools.Pools { + if cloudbrain.IsElementExist(specialPool.JobType, jobType) { + + if isInOrg, _ := models.IsOrganizationMemberByOrgName(specialPool.Org, ctx.User.ID); isInOrg { + var specialFlavor []struct { + Code string + Value string + } + + if jobType == string(models.JobTypeDebug) { + ctx.Data["flavors"] = specialPool.Flavor + } else { + + for _, tempFlavor := range specialPool.Flavor { + specialFlavor = append(specialFlavor, struct { + Code string + Value string + }{Code: tempFlavor.Value, Value: tempFlavor.Desc}) + } + + ctx.Data["flavor_infos"] = specialFlavor + } + + } + + } + } + + } +} + func trainJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) error { ctx.Data["PageIsCloudBrain"] = true @@ -726,7 +796,7 @@ func trainJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModelArts //} t := time.Now() - var displayJobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -763,6 +833,7 @@ func trainJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModelArts return err } ctx.Data["flavor_infos"] = flavorInfos.Info + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeTrain)) configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom) if err != nil { @@ -857,6 +928,8 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error { } ctx.Data["flavor_infos"] = flavorInfos.Info + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeTrain)) + var Parameters modelarts.Parameters if err = json.Unmarshal([]byte(task.Parameters), &Parameters); err != nil { ctx.ServerError("json.Unmarshal failed:", err) @@ -906,7 +979,7 @@ func versionErrorDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrai } t := time.Now() - var jobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var jobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] ctx.Data["job_name"] = task.JobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -943,6 +1016,7 @@ func versionErrorDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrai return err } ctx.Data["flavor_infos"] = flavorInfos.Info + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeTrain)) var Parameters modelarts.Parameters if err = json.Unmarshal([]byte(form.Params), &Parameters); err != nil { @@ -1030,6 +1104,13 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobNew, &form) return } + + errStr := checkModelArtsSpecialPool(ctx, flavorCode, string(models.JobTypeTrain)) + if errStr != "" { + trainJobErrorNewDataPrepare(ctx, form) + ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsTrainJobNew, &form) + return + } //Determine whether the task name of the task in the project is duplicated tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeTrain), displayJobName) if err == nil { @@ -1309,6 +1390,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ canNewJob, _ := canUserCreateTrainJobVersion(ctx, latestTask.UserID) if !canNewJob { + versionErrorDataPrepare(ctx, form) ctx.RenderWithErr("user cann't new trainjob", tplModelArtsTrainJobVersionNew, &form) return } @@ -1320,6 +1402,13 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ return } + errStr := checkModelArtsSpecialPool(ctx, flavorCode, string(models.JobTypeTrain)) + if errStr != "" { + versionErrorDataPrepare(ctx, form) + ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsTrainJobVersionNew, &form) + return + } + //todo: del the codeLocalPath _, err = ioutil.ReadDir(codeLocalPath) if err == nil { @@ -1976,6 +2065,13 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference } } + errStr := checkModelArtsSpecialPool(ctx, flavorCode, string(models.JobTypeInference)) + if errStr != "" { + inferenceJobErrorNewDataPrepare(ctx, form) + ctx.RenderWithErr(ctx.Tr(errStr), tplModelArtsInferenceJobNew, &form) + return + } + //todo: del the codeLocalPath _, err = ioutil.ReadDir(codeLocalPath) if err == nil { @@ -2092,6 +2188,65 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference } ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/inference-job") } + +func checkModelArtsSpecialPool(ctx *context.Context, flavorCode string, jobType string) string { + if modelarts.SpecialPools != nil { + isMatchPool := false + + for _, specialPool := range modelarts.SpecialPools.Pools { + if cloudbrain.IsElementExist(specialPool.JobType, jobType) { + if isInOrg, _ := models.IsOrganizationMemberByOrgName(specialPool.Org, ctx.User.ID); isInOrg { + isMatchPool = true + isMatchSpec := false + for _, flavor := range specialPool.Flavor { + if flavor.Value == flavorCode { + isMatchSpec = true + break + } + } + + if !isMatchSpec { + return "cloudbrain.wrong_specification" + + } + + } + + } + } + + if !isMatchPool { + isMatchSpec := false + if jobType == string(models.JobTypeDebug) { + for _, flavor := range modelarts.FlavorInfos.FlavorInfo { + if flavor.Value == flavorCode { + isMatchSpec = true + break + } + } + } else { + + var flavorInfos modelarts.Flavor + json.Unmarshal([]byte(setting.TrainJobFLAVORINFOS), &flavorInfos) + + for _, flavor := range flavorInfos.Info { + if flavor.Code == flavorCode { + isMatchSpec = true + break + } + } + } + + if !isMatchSpec { + + return "cloudbrain.wrong_specification" + } + + } + + } + return "" +} func InferenceJobIndex(ctx *context.Context) { MustEnableModelArts(ctx) @@ -2171,7 +2326,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { ctx.Data["newInference"] = true t := time.Now() - var displayJobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -2209,6 +2364,8 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { } ctx.Data["flavor_infos"] = flavorInfos.Info + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeInference)) + ctx.Data["params"] = "" ctx.Data["branchName"] = ctx.Repo.BranchName @@ -2277,6 +2434,7 @@ func inferenceJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModel return err } ctx.Data["flavor_infos"] = flavorInfos.Info + setSpecBySpecialPoolConfig(ctx, string(models.JobTypeInference)) configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom) if err != nil { diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index ac1265d04..508addf75 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -5,6 +5,8 @@ import ( "net/http" "net/url" "os" + "strconv" + "strings" "time" "code.gitea.io/gitea/models" @@ -404,7 +406,7 @@ func queryMetrics(ctx *context.Context, tableName string, startTime time.Time, e if tableName == "public.user_business_analysis_yesterday" { mapInterface["datarecordbegintime"] = setting.RadarMap.GrowthBeginTime if len(result) > 0 { - dateTime := time.Unix(result[0].CountDate, 0) + dateTime := time.Unix(result[0].CountDate, 0).AddDate(0, 0, 1) mapInterface["lastUpdatedTime"] = dateTime.Format("2006-01-02 15:04:05") } else { mapInterface["lastUpdatedTime"] = "" @@ -450,7 +452,7 @@ func DownloadUserDefineFile(ctx *context.Context) { func QueryUserMetricsCurrentMonth(ctx *context.Context) { currentTimeNow := time.Now() - pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 0, 0, 0, 0, currentTimeNow.Location()) pageStartTime = getStartTime(pageStartTime) queryMetrics(ctx, "public.user_business_analysis_current_month", pageStartTime, pageEndTime) @@ -476,7 +478,7 @@ func QueryUserMetricsCurrentWeek(ctx *context.Context) { } pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset) pageStartTime = getStartTime(pageStartTime) - pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) queryMetrics(ctx, "public.user_business_analysis_current_week", pageStartTime, pageEndTime) } func QueryUserStaticCurrentWeek(ctx *context.Context) { @@ -490,7 +492,7 @@ func QueryUserMetricsCurrentYear(ctx *context.Context) { currentTimeNow := time.Now() pageStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location()) pageStartTime = getStartTime(pageStartTime) - pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) queryMetrics(ctx, "public.user_business_analysis_current_year", pageStartTime, pageEndTime) } func QueryUserStaticCurrentYear(ctx *context.Context) { @@ -500,7 +502,7 @@ func QueryUserMetricsLast30Day(ctx *context.Context) { currentTimeNow := time.Now() pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30) pageStartTime = getStartTime(pageStartTime) - pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) queryMetrics(ctx, "public.user_business_analysis_last30_day", pageStartTime, pageEndTime) } func QueryUserStaticLast30Day(ctx *context.Context) { @@ -518,7 +520,7 @@ func QueryUserStaticLastMonth(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_last_month", new(models.UserBusinessAnalysisLastMonth)) } func QueryUserMetricsYesterday(ctx *context.Context) { - currentTimeNow := time.Now() + currentTimeNow := time.Now().AddDate(0, 0, -1) pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local) pageStartTime = getStartTime(pageStartTime) pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) @@ -531,7 +533,7 @@ func QueryUserMetricsAll(ctx *context.Context) { currentTimeNow := time.Now() pageStartTime := time.Date(2022, 4, 5, 0, 0, 0, 0, currentTimeNow.Location()) pageStartTime = getStartTime(pageStartTime) - pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) queryMetrics(ctx, "public.user_business_analysis_all", pageStartTime, pageEndTime) } func QueryUserStaticAll(ctx *context.Context) { @@ -611,7 +613,15 @@ func QueryUserStaticDataPage(ctx *context.Context) { ctx.JSON(http.StatusOK, ctx.Tr("user.static.downloadinfo")+"/api/v1/download_user_define_file?filename="+filename) } else { mapInterface := make(map[string]interface{}) - re, count := models.QueryUserStaticDataPage(pageOpts) + key := startTime.Format("2006-01-02") + endTime.Format("2006-01-02") + log.Info("db key =" + key) + re, count := models.QueryDataForUserDefineFromDb(pageOpts, key) + if count == 0 { + wikiMap, _ := queryWikiCountMap(startTime, endTime) + re, count = models.QueryUserStaticDataForUserDefine(pageOpts, wikiMap) + models.WriteDataToDb(re, key) + } + re, count = models.QueryDataForUserDefineFromDb(pageOpts, key) mapInterface["data"] = re mapInterface["count"] = count ctx.JSON(http.StatusOK, mapInterface) @@ -839,3 +849,61 @@ func writeUserActivityToExcel(startTime time.Time, endTime time.Time, filePath s log.Info("write to file succeed, filepath=" + filePath) } } + +// URL: /api/v1/query_user_login?userId=1,2,3,4 +func QueryUserLoginInfo(ctx *context.Context) { + userId := ctx.Query("userId") + userIds := strings.Split(userId, ",") + userIdInt := make([]int64, 0) + for _, id := range userIds { + idInt, err := strconv.ParseInt(id, 10, 64) + if err == nil { + userIdInt = append(userIdInt, idInt) + } + } + result := models.QueryUserLoginInfo(userIdInt) + + xlsx := excelize.NewFile() + sheetName := ctx.Tr("用户登录信息") + index := xlsx.NewSheet(sheetName) + xlsx.DeleteSheet("Sheet1") + + excelHeader := make([]string, 0) + excelHeader = append(excelHeader, "用户ID") + excelHeader = append(excelHeader, "登录IP") + excelHeader = append(excelHeader, "登录时间") + + excelHeaderMap := make(map[string]string, 0) + var j byte + j = 0 + for _, value := range excelHeader { + excelColumn := getColumn(j) + fmt.Sprint(1) + log.Info("excelColumn=" + excelColumn) + excelHeaderMap[excelColumn] = value + j++ + } + for k, v := range excelHeaderMap { + //设置单元格的值 + xlsx.SetCellValue(sheetName, k, v) + } + for i, userLogin := range result { + row := i + 2 + rows := fmt.Sprint(row) + var tmp byte + tmp = 0 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userLogin.UId) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userLogin.IpAddr) + tmp = tmp + 1 + formatTime := userLogin.CreatedUnix.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) + } + //设置默认打开的表单 + xlsx.SetActiveSheet(index) + filename := sheetName + "_" + time.Now().Format("2006-01-02 15:04:05") + ".xlsx" + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + if _, err := xlsx.WriteTo(ctx.Resp); err != nil { + log.Info("writer exel error." + err.Error()) + } +} diff --git a/routers/repo/view.go b/routers/repo/view.go index 02004fa06..3a18e4ddf 100755 --- a/routers/repo/view.go +++ b/routers/repo/view.go @@ -484,6 +484,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st ) } else if isNoteBook { ctx.Data["FileContent"] = string(buf) + ctx.Data["FileParentURL"] = path.Dir(rawLink+"/"+ctx.Repo.TreePath) + "/" } else { // Building code view blocks with line number on server side. var fileContent string diff --git a/routers/routes/routes.go b/routers/routes/routes.go index f917aebf1..03b53c5cd 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -1136,7 +1136,7 @@ func RegisterRoutes(m *macaron.Macaron) { }) }, context.RepoRef()) m.Group("/modelmanage", func() { - m.Post("/create_model", reqRepoModelManageWriter, repo.SaveModel) + m.Post("/create_model", repo.SaveModel) m.Post("/create_model_convert", reqRepoModelManageWriter, repo.SaveModelConvert) m.Post("/create_new_model", repo.SaveNewNameModel) m.Delete("/delete_model", repo.DeleteModel) diff --git a/routers/search.go b/routers/search.go index 05074df55..628350424 100644 --- a/routers/search.go +++ b/routers/search.go @@ -313,9 +313,8 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) if err == nil { - searchJson, _ := json.Marshal(res) - log.Info("searchJson=" + string(searchJson)) esresult := makeRepoResult(res, Key, OnlyReturnNum, language) + setForkRepoOrder(esresult) resultObj.Total = resultObj.PrivateTotal + esresult.Total isNeedSort := false if len(resultObj.Result) > 0 { @@ -348,6 +347,30 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa } } +func setForkRepoOrder(esresult *SearchRes) { + forkidMap := make(map[string]int, 0) + for index, re := range esresult.Result { + if re["fork_id"] != nil { + fork_id := re["fork_id"].(string) + if _, ok := forkidMap[fork_id]; !ok { + forkidMap[fork_id] = index + } + } + } + for key, value := range forkidMap { + for index, re := range esresult.Result { + if re["id"].(string) == key { + if value < index { //swap + tmp := esresult.Result[index] + esresult.Result[index] = esresult.Result[value] + esresult.Result[value] = tmp + break + } + } + } + } +} + func sortRepo(Result []map[string]interface{}, SortBy string, ascending bool) { orderBy := "" switch SortBy { @@ -479,6 +502,7 @@ func makeRepoResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool, record["num_stars"] = recordSource["num_stars"] record["num_forks"] = recordSource["num_forks"] record["lower_alias"] = recordSource["lower_alias"] + record["fork_id"] = recordSource["fork_id"] if recordSource["topics"] != nil { topicsStr := recordSource["topics"].(string) log.Info("topicsStr=" + topicsStr) diff --git a/routers/user/home.go b/routers/user/home.go index ab64e707f..25b1c518e 100755 --- a/routers/user/home.go +++ b/routers/user/home.go @@ -23,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/repo" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" @@ -834,6 +835,11 @@ func Cloudbrains(ctx *context.Context) { ciTasks[i].CanDebug = true ciTasks[i].CanDel = true ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource + ciTasks[i].Cloudbrain.AiCenter = repo.GetCloudbrainAiCenter(task.Cloudbrain, ctx) + _, cardType, _ := repo.GetCloudbrainCardNumAndType(task.Cloudbrain) + ciTasks[i].Cloudbrain.CardType = cardType + ciTasks[i].Cloudbrain.Cluster = repo.GetCloudbrainCluster(task.Cloudbrain, ctx) + } pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) diff --git a/routers/user/profile.go b/routers/user/profile.go index 30808f235..42cdfd1a8 100755 --- a/routers/user/profile.go +++ b/routers/user/profile.go @@ -116,8 +116,8 @@ func Profile(ctx *context.Context) { } var opts = models.FindOrgMembersOpts{ - OrgID: org.ID, - PublicOnly: true, + OrgID: org.ID, + PublicOnly: true, } if ctx.User != nil { @@ -261,7 +261,7 @@ func Profile(ctx *context.Context) { IsOwner: isOwner, ListOptions: models.ListOptions{ Page: page, - PageSize: setting.UI.ExplorePagingNum, + PageSize: setting.UI.User.RepoPagingNum, }, CloudBrainType: -1, } diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl index e66f40e84..cd5913c40 100755 --- a/templates/admin/cloudbrain/list.tmpl +++ b/templates/admin/cloudbrain/list.tmpl @@ -18,7 +18,7 @@ data-all-compute="{{.i18n.Tr "admin.cloudbrain.all_computing_resources"}}" data-all-status="{{.i18n.Tr "admin.cloudbrain.all_status"}}"> {{template "admin/navbar" .}} -
+
{{template "base/alert" .}}
@@ -34,34 +34,46 @@
-
- {{$.i18n.Tr "repo.cloudbrain_task"}} +
+ {{$.i18n.Tr "repo.cloudbrain_task"}}
-
- {{$.i18n.Tr "repo.cloudbrain_task_type"}} + +
+ {{$.i18n.Tr "repo.modelarts.cluster"}}
-
+
+ {{$.i18n.Tr "repo.cloudbrain_task_type"}} +
+
{{$.i18n.Tr "repo.modelarts.status"}}
-
+
{{$.i18n.Tr "repo.modelarts.createtime"}}
-
+
{{$.i18n.Tr "repo.cloudbrain_status_runtime"}}
-
+
{{$.i18n.Tr "repo.modelarts.computing_resources"}}
-
+ +
+ {{$.i18n.Tr "repo.modelarts.ai_center"}} +
+ +
+ {{$.i18n.Tr "repo.modelarts.card_type"}} +
+
{{$.i18n.Tr "repo.cloudbrain_creator"}}
-
+
{{$.i18n.Tr "repository"}}
-
+
{{.i18n.Tr "admin.cloudbrain.cloudbrain_name"}}
-
+
{{$.i18n.Tr "repo.cloudbrain_operate"}}
@@ -78,24 +90,24 @@ {{$JobID = .JobID}} {{end}} -
+ - -
- {{.JobType}} + +
+ {{if .Cluster}}{{.Cluster}}{{else}}--{{end}}
+ style="width: 6% !important;"> @@ -131,23 +144,39 @@ style="margin-left: 0.4em;font-size: 12px;">{{.Status}}
+ +
+ {{.JobType}} +
+ -
+
{{TimeSinceUnix1 .Cloudbrain.CreatedUnix}}
-
+
{{if .TrainJobDuration}}{{.TrainJobDuration}}{{else}}--{{end}}
-
+
{{if .ComputeResource}}{{.ComputeResource}}{{else}}--{{end}}
+ +
+ {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}} +
+ +
+ + {{if .CardType}}{{.CardType}}{{else}}--{{end}} + +
-
+
{{if .User.Name}} @@ -157,16 +186,24 @@ {{end}}
-
+
- {{.JobName}} + style="overflow: hidden;text-overflow:ellipsis;width:10% !important;"> + + {{.JobName}} +
-
+
{{if eq .JobType "DEBUG"}}
@@ -235,36 +272,37 @@
-
+
{{if eq .JobType "DEBUG"}} - + {{.DisplayJobName}} {{else if eq .JobType "INFERENCE"}} - + {{.DisplayJobName}} {{else if eq .JobType "TRAIN"}} - + {{.DisplayJobName}} {{else if eq .JobType "BENCHMARK"}} - + {{.DisplayJobName}} {{end}}
- -
- {{.JobType}} + +
+ {{if .Cluster}}{{.Cluster}}{{else}}--{{end}}
+ style="width: 6% !important;"> {{.Status}}
+ +
+ {{.JobType}} +
+ -
+
{{TimeSinceUnix1 .Cloudbrain.CreatedUnix}}
-
+
{{if .TrainJobDuration}}{{.TrainJobDuration}}{{else}}--{{end}}
-
+
{{if .ComputeResource}}{{.ComputeResource}}{{else}}--{{end}}
+ +
+ {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}} +
+ +
+ + {{if .CardType}}{{.CardType}}{{else}}--{{end}} + +
-
+
{{if .User.Name}} @@ -298,15 +352,24 @@ {{end}}
-
+
--
- {{.JobName}} + style="overflow: hidden;text-overflow:ellipsis;width:10% !important;"> + + {{.JobName}} + +
-
+
{{if eq .JobType "DEBUG"}}
diff --git a/templates/admin/cloudbrain/search_dashboard.tmpl b/templates/admin/cloudbrain/search_dashboard.tmpl index 61f02f182..e4d74836d 100644 --- a/templates/admin/cloudbrain/search_dashboard.tmpl +++ b/templates/admin/cloudbrain/search_dashboard.tmpl @@ -15,7 +15,7 @@
-
+
diff --git a/templates/repo/cloudbrain/benchmark/new.tmpl b/templates/repo/cloudbrain/benchmark/new.tmpl index fb1296d27..e1b5c890e 100755 --- a/templates/repo/cloudbrain/benchmark/new.tmpl +++ b/templates/repo/cloudbrain/benchmark/new.tmpl @@ -69,8 +69,8 @@ - {{.i18n.Tr "cloudbrain.job_name_rule"}} + tabindex="3" autofocus required maxlength="36"> + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
- {{.i18n.Tr "cloudbrain.job_name_rule"}} + tabindex="3" autofocus required maxlength="36"> + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
- - {{.i18n.Tr "cloudbrain.job_name_rule"}} + + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
@@ -444,7 +444,7 @@ identifier : 'display_job_name', rules: [ { - type: 'regExp[/^[a-zA-Z0-9-_]{1,64}[a-zA-Z0-9_]$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -497,6 +497,7 @@ } validate(); $('.ui.create_train_job.green.button').click(function(e) { - send_run_para() + send_run_para(); + validate(); }) diff --git a/templates/repo/cloudbrain/inference/show.tmpl b/templates/repo/cloudbrain/inference/show.tmpl index ac11b16a9..055e403bd 100644 --- a/templates/repo/cloudbrain/inference/show.tmpl +++ b/templates/repo/cloudbrain/inference/show.tmpl @@ -381,14 +381,14 @@
- - {{.Image}} + {{.Image}}
diff --git a/templates/repo/cloudbrain/show.tmpl b/templates/repo/cloudbrain/show.tmpl index 012be8c10..0c53f7fce 100755 --- a/templates/repo/cloudbrain/show.tmpl +++ b/templates/repo/cloudbrain/show.tmpl @@ -392,14 +392,14 @@
- - {{.Image}} + {{.Image}}
diff --git a/templates/repo/cloudbrain/trainjob/new.tmpl b/templates/repo/cloudbrain/trainjob/new.tmpl index 0208398b5..3a6bc0e0e 100755 --- a/templates/repo/cloudbrain/trainjob/new.tmpl +++ b/templates/repo/cloudbrain/trainjob/new.tmpl @@ -132,8 +132,8 @@ - {{.i18n.Tr "cloudbrain.job_name_rule"}} + maxlength="36"> + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
@@ -441,7 +441,7 @@ identifier: 'display_job_name', rules: [ { - type: 'regExp[/^[a-zA-Z0-9-_]{1,64}[a-zA-Z0-9_]$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -499,6 +499,7 @@ } validate(); $('.ui.create_train_job.green.button').click(function (e) { - send_run_para() + send_run_para(); + validate(); }) \ No newline at end of file diff --git a/templates/repo/cloudbrain/trainjob/show.tmpl b/templates/repo/cloudbrain/trainjob/show.tmpl index 9a01ea6c8..e4d8ff346 100644 --- a/templates/repo/cloudbrain/trainjob/show.tmpl +++ b/templates/repo/cloudbrain/trainjob/show.tmpl @@ -384,14 +384,14 @@
- - {{.Image}} + {{.Image}}
diff --git a/templates/repo/grampus/trainjob/gpu/new.tmpl b/templates/repo/grampus/trainjob/gpu/new.tmpl index 88e8ccd9d..85fc1ef67 100755 --- a/templates/repo/grampus/trainjob/gpu/new.tmpl +++ b/templates/repo/grampus/trainjob/gpu/new.tmpl @@ -118,8 +118,8 @@
- - {{.i18n.Tr "cloudbrain.job_name_rule"}} + + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
@@ -387,7 +387,7 @@ identifier : 'display_job_name', rules: [ { - type: 'regExp[/^[a-zA-Z0-9-_]{1,64}[a-zA-Z0-9_]$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -446,6 +446,7 @@ } validate(); $('.ui.create_train_job.green.button').click(function(e) { - send_run_para() + send_run_para(); + validate(); }) \ No newline at end of file diff --git a/templates/repo/grampus/trainjob/npu/new.tmpl b/templates/repo/grampus/trainjob/npu/new.tmpl index 56d9a0f30..612c61833 100755 --- a/templates/repo/grampus/trainjob/npu/new.tmpl +++ b/templates/repo/grampus/trainjob/npu/new.tmpl @@ -115,8 +115,8 @@
- - {{.i18n.Tr "cloudbrain.job_name_rule"}} + + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
@@ -416,7 +416,7 @@ identifier : 'display_job_name', rules: [ { - type: 'regExp[/^[a-zA-Z0-9-_]{1,64}[a-zA-Z0-9_]$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -478,6 +478,7 @@ validate(); $('.ui.create_train_job.green.button').click(function(e) { get_name() - send_run_para() + send_run_para(); + validate(); }) diff --git a/templates/repo/header.tmpl b/templates/repo/header.tmpl index e5c2619e2..b36e89d4d 100755 --- a/templates/repo/header.tmpl +++ b/templates/repo/header.tmpl @@ -190,7 +190,6 @@ - diff --git a/templates/repo/modelarts/inferencejob/new.tmpl b/templates/repo/modelarts/inferencejob/new.tmpl index 358ec8cea..3ab25fba9 100644 --- a/templates/repo/modelarts/inferencejob/new.tmpl +++ b/templates/repo/modelarts/inferencejob/new.tmpl @@ -94,8 +94,8 @@
- - {{.i18n.Tr "cloudbrain.job_name_rule"}} + + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
@@ -468,7 +468,7 @@ identifier : 'display_job_name', rules: [ { - type: 'regExp[/^[a-zA-Z0-9-_]{1,64}[a-zA-Z0-9_]$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -522,6 +522,7 @@ validate(); $('.ui.create_train_job.green.button').click(function(e) { send_run_para() - get_name() + get_name(); + validate(); }) diff --git a/templates/repo/modelarts/notebook/new.tmpl b/templates/repo/modelarts/notebook/new.tmpl index b81bdfdec..3b9dcfafb 100755 --- a/templates/repo/modelarts/notebook/new.tmpl +++ b/templates/repo/modelarts/notebook/new.tmpl @@ -50,7 +50,7 @@
- +
@@ -118,7 +118,7 @@ identifier : 'display_job_name', rules: [ { - type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,36}$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -136,11 +136,11 @@ let value_task = $("input[name='display_job_name']").val() - let re = /^[a-z0-9][a-z0-9-_]{1,36}$/ + let re = /^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/ let flag = re.test(value_task) if(!flag){ $('#messageInfo').css('display','block') - let str = '只能以小写字母或数字开头且只包含小写字母、数字、_和-、最长36个字符。' + let str = '只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。' $('#messageInfo p').text(str) return false } diff --git a/templates/repo/modelarts/notebook/show.tmpl b/templates/repo/modelarts/notebook/show.tmpl index b9b75e544..2b2541900 100755 --- a/templates/repo/modelarts/notebook/show.tmpl +++ b/templates/repo/modelarts/notebook/show.tmpl @@ -359,14 +359,14 @@
- - {{.Image}} + {{.Image}}
diff --git a/templates/repo/modelarts/trainjob/new.tmpl b/templates/repo/modelarts/trainjob/new.tmpl index 0220d194e..081d39f70 100755 --- a/templates/repo/modelarts/trainjob/new.tmpl +++ b/templates/repo/modelarts/trainjob/new.tmpl @@ -122,8 +122,8 @@
- - {{.i18n.Tr "cloudbrain.job_name_rule"}} + + {{.i18n.Tr "repo.cloudbrain_jobname_err"}}
@@ -469,7 +469,7 @@ identifier: 'display_job_name', rules: [ { - type: 'regExp[/^[a-zA-Z0-9-_]{1,64}[a-zA-Z0-9_]$/]', + type: 'regExp[/^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/]', } ] }, @@ -531,6 +531,7 @@ validate(); $('.ui.create_train_job.green.button').click(function (e) { get_name() - send_run_para() + send_run_para(); + validate(); }) \ No newline at end of file diff --git a/templates/repo/view_file.tmpl b/templates/repo/view_file.tmpl index 61d04570f..91ec9b0bb 100755 --- a/templates/repo/view_file.tmpl +++ b/templates/repo/view_file.tmpl @@ -103,20 +103,23 @@ {{end}}
{{else if .FileSize}} - + {{if .IsNoteBook}} +
+ + {{else}} +
{{if .IsFileTooLarge}} - {{else if .IsNoteBook}} - {{else}} {{end}} -
{{.i18n.Tr "repo.file_too_large"}}{{.LineNums}}
    {{.FileContent}}
+ + {{end}} {{end}}
@@ -134,6 +137,9 @@ function showNoteBook(){ var isNoteBook = {{.IsNoteBook}} if (isNoteBook) { var jsonStr = "{{.FileContent}}" + nb.markdown.setOptions({ + baseUrl: {{.FileParentURL}} + }); var notebook = nb.parse(JSON.parse(jsonStr)); var rendered = notebook.render(); $("#notebook").append(rendered); diff --git a/templates/user/dashboard/cloudbrains.tmpl b/templates/user/dashboard/cloudbrains.tmpl index dc0337316..8019bc51d 100755 --- a/templates/user/dashboard/cloudbrains.tmpl +++ b/templates/user/dashboard/cloudbrains.tmpl @@ -20,7 +20,7 @@ data-all-compute="{{.i18n.Tr "admin.cloudbrain.all_computing_resources"}}" data-all-status="{{.i18n.Tr "admin.cloudbrain.all_status"}}">
{{template "admin/cloudbrain/search_dashboard" .}} -
+
{{template "base/alert" .}}
@@ -30,30 +30,42 @@
-
- {{$.i18n.Tr "repo.cloudbrain_task"}} +
+ {{$.i18n.Tr "repo.cloudbrain_task"}}
-
+ +
+ {{$.i18n.Tr "repo.modelarts.cluster"}} +
+
{{$.i18n.Tr "repo.modelarts.status"}}
-
- {{$.i18n.Tr "repo.cloudbrain_task_type"}} +
+ {{$.i18n.Tr "repo.cloudbrain_task_type"}}
-
+
{{$.i18n.Tr "repo.modelarts.createtime"}}
-
+
{{$.i18n.Tr "repo.cloudbrain_status_runtime"}}
-
+
{{$.i18n.Tr "repo.modelarts.computing_resources"}}
- -
+ + +
+ {{$.i18n.Tr "repo.modelarts.ai_center"}} +
+ +
+ {{$.i18n.Tr "repo.modelarts.card_type"}} +
+
{{$.i18n.Tr "repository"}}
-
+
{{$.i18n.Tr "repo.cloudbrain_operate"}}
@@ -70,48 +82,52 @@ {{$JobID = .JobID}} {{end}} -
+ - + +
+ {{if .Cluster}}{{.Cluster}}{{else}}--{{end}} +
+ style="width: 8% !important;"> @@ -123,32 +139,44 @@ {{$JobType := $.i18n.Tr (printf "cloudbrain.%s" .JobType)}} -
+
{{$JobType}}
-
+
{{TimeSinceUnix1 .Cloudbrain.CreatedUnix}}
-
+
{{if .TrainJobDuration}}{{.TrainJobDuration}}{{else}}--{{end}}
-
+
{{if .ComputeResource}}{{.ComputeResource}}{{else}}--{{end}}
+ + +
+ {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}} +
+ +
+ + {{if .CardType}}{{.CardType}}{{else}}--{{end}} + +
-
+ -
+
{{if eq .JobType "DEBUG"}}
@@ -226,33 +254,37 @@
-
+
{{if eq .JobType "DEBUG"}} - + {{.DisplayJobName}} {{else if eq .JobType "INFERENCE"}} - + {{.DisplayJobName}} {{else if eq .JobType "TRAIN"}} - + {{.DisplayJobName}} {{else if eq .JobType "BENCHMARK"}} - + {{.DisplayJobName}} {{end}}
- + +
+ {{if .Cluster}}{{.Cluster}}{{else}}--{{end}} +
+ style="padding-left: 2.2rem !important; width: 8% !important;"> {{$JobType := $.i18n.Tr (printf "cloudbrain.%s" .JobType)}} -
+
{{$JobType}}
-
+
{{TimeSinceUnix1 .Cloudbrain.CreatedUnix}}
-
+
{{if .TrainJobDuration}}{{.TrainJobDuration}}{{else}}--{{end}}
-
+
{{if .ComputeResource}}{{.ComputeResource}}{{else}}--{{end}}
- + + +
+ {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}} +
+ +
+ + {{if .CardType}}{{.CardType}}{{else}}--{{end}} + +
-
+
--
-
+
{{if eq .JobType "DEBUG"}}
diff --git a/web_src/js/components/MinioUploader.vue b/web_src/js/components/MinioUploader.vue index 167bb8c5a..e657f6f9f 100755 --- a/web_src/js/components/MinioUploader.vue +++ b/web_src/js/components/MinioUploader.vue @@ -135,10 +135,10 @@ export default { allUploadLength(len){ if(len===this.uploadFiles.length){ setTimeout(() => { - this.dropzoneUploader.removeAllFiles(true) - this.btnFlag = false - this.$emit('setcluster',this.btnFlag) - }, 2000); + this.dropzoneUploader.removeAllFiles(true) + this.btnFlag = false + this.$emit('setcluster',this.btnFlag) + }, 2000); } } }, @@ -254,7 +254,7 @@ export default { (currentChunk / chunks) * 100 ).toFixed(2)}% (${currentChunk}/${chunks})`; - this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); + // this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); loadMd5Next(); return; } @@ -265,7 +265,7 @@ export default { file.size } 用时:${(new Date().getTime() - time) / 1000} s` ); - this.updateProgress(file,100) + // this.updateProgress(file,100) spark.destroy(); // 释放缓存 file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识 file.cmd5 = false; // 取消计算md5状态 @@ -297,11 +297,11 @@ export default { file.chunks = ''; this.multipartUpload(file); } else { - // 失败如何处理 - let info = "上传失败" - this.allUploadLength++ - this.uploadError(file,info) - this.allUploadFiles.push({name:file.name,status:2,info:info}) + // 失败如何处理 + let info = "上传失败" + this.allUploadLength++ + this.uploadError(file,info) + this.allUploadFiles.push({name:file.name,status:2,info:info}) return; } return; @@ -321,12 +321,16 @@ export default { this.uploadError(file,info) this.allUploadLength++ this.allUploadFiles.push({name:file.name,status:1,info:info}) + return; } - } + } console.log('文件已上传完成'); + this.allUploadLength++ + this.allUploadFiles.push({name:file.name,status:0,info:'上传成功'}) + this.updateProgress(file, 100); this.progress = 100; this.status = this.dropzoneParams.data('upload-complete'); - // this.finishUpload(file); + this.finishUpload(file); } else { // 断点续传 this.multipartUpload(file); @@ -334,6 +338,10 @@ export default { } catch (error) { this.emitDropzoneFailed(file); console.log(error); + let info = "上传失败" + this.allUploadLength++ + this.uploadError(file,info) + this.allUploadFiles.push({name:file.name,status:2,info:info}) } async function addAttachment(file) { @@ -503,6 +511,7 @@ export default { console.log(error); //this.emitDropzoneFailed(file); //console.log(error); + throw error; } } @@ -538,6 +547,11 @@ export default { await uploadChunk(e); }catch(err){ console.log(err) + let info = "上传失败" + this.allUploadLength++ + this.uploadError(file,info) + this.allUploadFiles.push({name:file.name,status:2,info:info}); + return; } fileReader.abort(); diff --git a/web_src/js/components/Model.vue b/web_src/js/components/Model.vue index cf1210d59..57c730d30 100644 --- a/web_src/js/components/Model.vue +++ b/web_src/js/components/Model.vue @@ -106,7 +106,7 @@ @@ -332,8 +332,8 @@ export default { } }, - deleteModel(id,name){ - let row={cName:name,ID:id} + deleteModel(id,name,rowKey){ + let row={cName:name,ID:id, rowKey: rowKey} let _this = this let flag=1 $('.ui.basic.modal.first') diff --git a/web_src/js/components/dataset/selectDataset.vue b/web_src/js/components/dataset/selectDataset.vue index 4c056094e..43b3701f4 100755 --- a/web_src/js/components/dataset/selectDataset.vue +++ b/web_src/js/components/dataset/selectDataset.vue @@ -75,8 +75,8 @@
@@ -727,7 +727,7 @@ export default { "currentTree", this.paramsCurrent.page ); - this.initCurrentTreeNode = [this.currentDatasetList[0].id]; + this.initCurrentTreeNode = this.currentDatasetList[0]?.id ? [this.currentDatasetList[0].id] : []; this.totalNumCurrent = parseInt(res.data.count); let setCheckedKeysList = this.currentDatasetList.reduce( (pre, cur) => { @@ -742,7 +742,7 @@ export default { ); this.$refs.currentTree.setCheckedKeys(setCheckedKeysList); }) - .catch(function (error) { + .catch((error) => { this.loadingCurrent = false; console.log(error); }); @@ -763,7 +763,7 @@ export default { "myTree", this.paramsMy.page ); - this.initMyTreeNode = [this.myDatasetList[0].id]; + this.initMyTreeNode = this.myDatasetList[0]?.id ? [this.myDatasetList[0].id] : []; this.totalNumMy = parseInt(res.data.count); let setCheckedKeysList = this.myDatasetList.reduce((pre, cur) => { cur.Attachments.forEach((item) => { @@ -775,7 +775,7 @@ export default { }, []); this.$refs.myTree.setCheckedKeys(setCheckedKeysList); }) - .catch(function (error) { + .catch((error) => { console.log(error); }); }, @@ -796,7 +796,7 @@ export default { "publicTree", this.paramsPublics.page ); - this.initPublicTreeNode = [this.publicDatasetList[0].id]; + this.initPublicTreeNode = this.publicDatasetList[0]?.id ? [this.publicDatasetList[0].id] : []; this.totalNumPublic = parseInt(res.data.count); let setCheckedKeysList = this.publicDatasetList.reduce((pre, cur) => { cur.Attachments.forEach((item) => { @@ -808,7 +808,7 @@ export default { }, []); this.$refs.publicTree.setCheckedKeys(setCheckedKeysList); }) - .catch(function (error) { + .catch((error) => { this.loadingPublic = false; console.log(error); }); @@ -830,7 +830,7 @@ export default { "favoriteTree", this.paramsFavorite.page ); - this.initFavoriteTreeNode = [this.MyFavoriteDatasetList[0].id]; + this.initFavoriteTreeNode = this.MyFavoriteDatasetList[0]?.id ? [this.MyFavoriteDatasetList[0].id] : []; this.totalNumFavorite = parseInt(res.data.count); let setCheckedKeysList = this.MyFavoriteDatasetList.reduce( (pre, cur) => { @@ -845,7 +845,7 @@ export default { ); this.$refs.favoriteTree.setCheckedKeys(setCheckedKeysList); }) - .catch(function (error) { + .catch((error) => { this.loadingFavorite = false; console.log(error); }); @@ -955,7 +955,7 @@ export default { ) { this.benchmarkNew = true; } - if (location.href.indexOf("modelarts/notebook/create") !== -1) { + if (location.href.indexOf("modelarts/notebook/create") !== -1 || location.href.indexOf("/cloudbrain/create") !== -1) { this.required = false; } window.onresize = () => {