| @@ -61,30 +61,6 @@ type AttachmentUsername struct { | |||
| Name string | |||
| } | |||
| type AttachmentInfo struct { | |||
| Attachment `xorm:"extends"` | |||
| Repo *Repository `xorm:"extends"` | |||
| RelAvatarLink string `xorm:"extends"` | |||
| UserName string `xorm:"extends"` | |||
| Recommend bool `xorm:"-"` | |||
| } | |||
| type AttachmentsOptions struct { | |||
| ListOptions | |||
| DatasetIDs []int64 | |||
| DecompressState int | |||
| Type int | |||
| UploaderID int64 | |||
| NeedDatasetIDs bool | |||
| NeedIsPrivate bool | |||
| IsPrivate bool | |||
| JustNeedZipFile bool | |||
| NeedRepoInfo bool | |||
| Keyword string | |||
| RecommendOnly bool | |||
| UserId int64 | |||
| } | |||
| func (a *Attachment) AfterUpdate() { | |||
| if a.DatasetID > 0 { | |||
| datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment)) | |||
| @@ -494,19 +470,6 @@ func getPrivateAttachments(e Engine, userID int64) ([]*AttachmentUsername, error | |||
| return attachments, nil | |||
| } | |||
| func getAllUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) { | |||
| attachments := make([]*AttachmentUsername, 0, 10) | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+ | |||
| "= `user`.id").Where("decompress_state= ? and attachment.type = ? and (uploader_id= ? or is_private = ?)", DecompressStateDone, TypeCloudBrainOne, userID, false).Find(&attachments); err != nil { | |||
| return nil, err | |||
| } | |||
| return attachments, nil | |||
| } | |||
| func GetAllUserAttachments(userID int64) ([]*AttachmentUsername, error) { | |||
| return getAllUserAttachments(x, userID) | |||
| } | |||
| func getModelArtsUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) { | |||
| attachments := make([]*AttachmentUsername, 0, 10) | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+ | |||
| @@ -601,107 +564,6 @@ func GetAllAttachmentSize() (int64, error) { | |||
| return x.SumInt(&Attachment{}, "size") | |||
| } | |||
| func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| var cond = builder.NewCond() | |||
| if opts.NeedDatasetIDs { | |||
| cond = cond.And( | |||
| builder.In("attachment.dataset_id", opts.DatasetIDs), | |||
| ) | |||
| } | |||
| if opts.UploaderID > 0 { | |||
| cond = cond.And( | |||
| builder.Eq{"attachment.uploader_id": opts.UploaderID}, | |||
| ) | |||
| } | |||
| if (opts.Type) >= 0 { | |||
| cond = cond.And( | |||
| builder.Eq{"attachment.type": opts.Type}, | |||
| ) | |||
| } | |||
| if opts.NeedIsPrivate { | |||
| cond = cond.And( | |||
| builder.Eq{"attachment.is_private": opts.IsPrivate}, | |||
| ) | |||
| } | |||
| if opts.RecommendOnly { | |||
| cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id"). | |||
| From("attachment"). | |||
| Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true"))) | |||
| } | |||
| if opts.JustNeedZipFile { | |||
| var DecompressState []int32 | |||
| DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed) | |||
| cond = cond.And( | |||
| builder.In("attachment.decompress_state", DecompressState), | |||
| ) | |||
| } | |||
| var count int64 | |||
| var err error | |||
| if len(opts.Keyword) == 0 { | |||
| count, err = sess.Where(cond).Count(new(Attachment)) | |||
| } else { | |||
| lowerKeyWord := strings.ToLower(opts.Keyword) | |||
| cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord})) | |||
| count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo)) | |||
| } | |||
| if err != nil { | |||
| return nil, 0, fmt.Errorf("Count: %v", err) | |||
| } | |||
| if opts.Page >= 0 && opts.PageSize > 0 { | |||
| var start int | |||
| if opts.Page == 0 { | |||
| start = 0 | |||
| } else { | |||
| start = (opts.Page - 1) * opts.PageSize | |||
| } | |||
| sess.Limit(opts.PageSize, start) | |||
| } | |||
| sess.OrderBy("attachment.created_unix DESC") | |||
| attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum) | |||
| if err := sess.Table(&Attachment{}).Where(cond). | |||
| Find(&attachments); err != nil { | |||
| return nil, 0, fmt.Errorf("Find: %v", err) | |||
| } | |||
| if opts.NeedRepoInfo { | |||
| for _, attachment := range attachments { | |||
| dataset, err := GetDatasetByID(attachment.DatasetID) | |||
| if err != nil { | |||
| return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) | |||
| } | |||
| attachment.Recommend = dataset.Recommend | |||
| repo, err := GetRepositoryByID(dataset.RepoID) | |||
| if err == nil { | |||
| attachment.Repo = repo | |||
| } else { | |||
| return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err) | |||
| } | |||
| user, err := GetUserByID(attachment.UploaderID) | |||
| if err == nil { | |||
| attachment.RelAvatarLink = user.RelAvatarLink() | |||
| attachment.UserName = user.Name | |||
| } else { | |||
| return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err) | |||
| } | |||
| } | |||
| } | |||
| return attachments, count, nil | |||
| } | |||
| func GetAllDatasetContributorByDatasetId(datasetId int64) ([]*User, error) { | |||
| r := make([]*User, 0) | |||
| if err := x.Select("distinct(public.user.*)").Table("attachment").Join("LEFT", "user", "public.user.ID = attachment.uploader_id").Where("attachment.dataset_id = ?", datasetId).Find(&r); err != nil { | |||
| @@ -14,3 +14,24 @@ func BaseErrorMessage(message string) BaseMessage { | |||
| 1, message, | |||
| } | |||
| } | |||
| type BaseMessageApi struct { | |||
| Code int `json:"code"` | |||
| Message string `json:"message"` | |||
| } | |||
| var BaseOKMessageApi = BaseMessageApi{ | |||
| 0, "", | |||
| } | |||
| func BaseErrorMessageApi(message string) BaseMessageApi { | |||
| return BaseMessageApi{ | |||
| 1, message, | |||
| } | |||
| } | |||
| type BaseMessageWithDataApi struct { | |||
| Code int `json:"code"` | |||
| Message string `json:"message"` | |||
| Data interface{} `json:"data"` | |||
| } | |||
| @@ -291,6 +291,13 @@ func (task *Cloudbrain) IsRunning() bool { | |||
| status == string(JobRunning) || status == GrampusStatusRunning | |||
| } | |||
| func (task *Cloudbrain) IsUserHasRight(user *User) bool { | |||
| if user == nil { | |||
| return false | |||
| } | |||
| return user.IsAdmin || user.ID == task.UserID | |||
| } | |||
| func ConvertDurationToStr(duration int64) string { | |||
| if duration <= 0 { | |||
| return DURATION_STR_ZERO | |||
| @@ -2030,10 +2037,17 @@ func GetStoppedJobWithNoStartTimeEndTime() ([]*Cloudbrain, error) { | |||
| cloudbrains := make([]*Cloudbrain, 0) | |||
| return cloudbrains, x.SQL("select * from cloudbrain where status in (?,?,?,?,?,?,?) and (start_time is null or end_time is null) limit 100", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded).Find(&cloudbrains) | |||
| } | |||
| func GetC2NetWithAiCenterWrongJob() ([]*Cloudbrain, error) { | |||
| cloudbrains := make([]*Cloudbrain, 0) | |||
| return cloudbrains, x. | |||
| In("status", ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed). | |||
| Where("type = ?", TypeC2Net). | |||
| Find(&cloudbrains) | |||
| } | |||
| func GetModelSafetyTestTask() ([]*Cloudbrain, error) { | |||
| cloudbrains := make([]*Cloudbrain, 0) | |||
| sess := x.Where("job_type = ?", string(JobTypeModelSafety)) | |||
| sess := x.Where("job_type=?", string(JobTypeModelSafety)) | |||
| err := sess.Find(&cloudbrains) | |||
| return cloudbrains, err | |||
| } | |||
| @@ -22,8 +22,8 @@ const ( | |||
| type Dataset struct { | |||
| ID int64 `xorm:"pk autoincr"` | |||
| Title string `xorm:"INDEX NOT NULL"` | |||
| Status int32 `xorm:"INDEX"` // normal_private: 0, pulbic: 1, is_delete: 2 | |||
| Title string `xorm:"INDEX NOT NULL""` | |||
| Status int32 `xorm:"INDEX""` // normal_private: 0, pulbic: 1, is_delete: 2 | |||
| Category string | |||
| Description string `xorm:"TEXT"` | |||
| DownloadTimes int64 | |||
| @@ -223,10 +223,10 @@ type Repository struct { | |||
| BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"` | |||
| // git clone and git pull total count | |||
| CloneCnt int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| CloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"clone_cnt"` | |||
| // only git clone total count | |||
| GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"git_clone_cnt"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
| UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | |||
| @@ -143,6 +143,9 @@ func InsertResourceQueue(queue ResourceQueue) (int64, error) { | |||
| func UpdateResourceQueueById(queueId int64, queue ResourceQueue) (int64, error) { | |||
| return x.ID(queueId).Update(&queue) | |||
| } | |||
| func UpdateResourceCardsTotalNum(queueId int64, queue ResourceQueue) (int64, error) { | |||
| return x.ID(queueId).Cols("cards_total_num", "remark").Update(&queue) | |||
| } | |||
| func SearchResourceQueue(opts SearchResourceQueueOptions) (int64, []ResourceQueue, error) { | |||
| var cond = builder.NewCond() | |||
| @@ -313,9 +316,6 @@ func SyncGrampusQueues(updateList []ResourceQueue, insertList []ResourceQueue, e | |||
| if _, err = sess.In("id", deleteSpcIds).Update(&ResourceSpecification{Status: SpecOffShelf}); err != nil { | |||
| return err | |||
| } | |||
| if _, err = sess.In("spec_id", deleteSpcIds).Delete(&ResourceSceneSpec{}); err != nil { | |||
| return err | |||
| } | |||
| } | |||
| } | |||
| @@ -116,7 +116,7 @@ func InsertResourceScene(r ResourceSceneReq) error { | |||
| //check | |||
| specs := make([]ResourceSpecification, 0) | |||
| cond := builder.In("id", r.SpecIds).And(builder.Eq{"status": SpecOnShelf}) | |||
| cond := builder.In("id", r.SpecIds) | |||
| if err := sess.Where(cond).Find(&specs); err != nil { | |||
| return err | |||
| } | |||
| @@ -175,7 +175,7 @@ func UpdateResourceScene(r ResourceSceneReq) error { | |||
| } | |||
| //check specification | |||
| specs := make([]ResourceSpecification, 0) | |||
| cond := builder.In("id", r.SpecIds).And(builder.Eq{"status": SpecOnShelf}) | |||
| cond := builder.In("id", r.SpecIds) | |||
| if err := sess.Where(cond).Find(&specs); err != nil { | |||
| return err | |||
| } | |||
| @@ -168,6 +168,7 @@ type FindSpecsOptions struct { | |||
| UseShareMemGiB bool | |||
| //if true,find specs no matter used or not used in scene. if false,only find specs used in scene | |||
| RequestAll bool | |||
| SpecStatus int | |||
| } | |||
| type Specification struct { | |||
| @@ -269,10 +270,6 @@ func ResourceSpecOffShelf(id int64) (int64, error) { | |||
| } | |||
| sess.Close() | |||
| }() | |||
| //delete scene spec relation | |||
| if _, err = sess.Where("spec_id = ?", id).Delete(&ResourceSceneSpec{}); err != nil { | |||
| return 0, err | |||
| } | |||
| param := ResourceSpecification{ | |||
| Status: SpecOffShelf, | |||
| @@ -317,9 +314,6 @@ func SyncGrampusSpecs(updateList []ResourceSpecification, insertList []ResourceS | |||
| if _, err = sess.Cols("status", "is_available").In("id", deleteIds).Update(&ResourceSpecification{Status: SpecOffShelf, IsAvailable: false}); err != nil { | |||
| return err | |||
| } | |||
| if _, err = sess.In("spec_id", deleteIds).Delete(&ResourceSceneSpec{}); err != nil { | |||
| return err | |||
| } | |||
| } | |||
| //update exists specs | |||
| @@ -384,6 +378,9 @@ func FindSpecs(opts FindSpecsOptions) ([]*Specification, error) { | |||
| if opts.UseShareMemGiB { | |||
| cond = cond.And(builder.Eq{"resource_specification.share_mem_gi_b": opts.ShareMemGiB}) | |||
| } | |||
| if opts.SpecStatus > 0 { | |||
| cond = cond.And(builder.Eq{"resource_specification.status": opts.SpecStatus}) | |||
| } | |||
| r := make([]*Specification, 0) | |||
| s := x.Where(cond). | |||
| Join("INNER", "resource_queue", "resource_queue.id = resource_specification.queue_id") | |||
| @@ -57,29 +57,26 @@ type CreateModelArtsTrainJobForm struct { | |||
| } | |||
| type CreateModelArtsInferenceJobForm struct { | |||
| DisplayJobName string `form:"display_job_name" binding:"Required"` | |||
| JobName string `form:"job_name" binding:"Required"` | |||
| Attachment string `form:"attachment" binding:"Required"` | |||
| BootFile string `form:"boot_file" binding:"Required"` | |||
| WorkServerNumber int `form:"work_server_number" binding:"Required"` | |||
| EngineID int `form:"engine_id" binding:"Required"` | |||
| PoolID string `form:"pool_id" binding:"Required"` | |||
| Flavor string `form:"flavor" binding:"Required"` | |||
| Params string `form:"run_para_list" binding:"Required"` | |||
| Description string `form:"description"` | |||
| IsSaveParam string `form:"is_save_para"` | |||
| ParameterTemplateName string `form:"parameter_template_name"` | |||
| PrameterDescription string `form:"parameter_description"` | |||
| BranchName string `form:"branch_name" binding:"Required"` | |||
| VersionName string `form:"version_name" binding:"Required"` | |||
| FlavorName string `form:"flaver_names" binding:"Required"` | |||
| EngineName string `form:"engine_names" binding:"Required"` | |||
| LabelName string `form:"label_names" binding:"Required"` | |||
| TrainUrl string `form:"train_url" binding:"Required"` | |||
| ModelName string `form:"model_name" binding:"Required"` | |||
| ModelVersion string `form:"model_version" binding:"Required"` | |||
| CkptName string `form:"ckpt_name" binding:"Required"` | |||
| SpecId int64 `form:"spec_id" binding:"Required"` | |||
| DisplayJobName string `form:"display_job_name" binding:"Required"` | |||
| JobName string `form:"job_name" binding:"Required"` | |||
| Attachment string `form:"attachment" binding:"Required"` | |||
| BootFile string `form:"boot_file" binding:"Required"` | |||
| WorkServerNumber int `form:"work_server_number" binding:"Required"` | |||
| EngineID int `form:"engine_id" binding:"Required"` | |||
| PoolID string `form:"pool_id" binding:"Required"` | |||
| Flavor string `form:"flavor" binding:"Required"` | |||
| Params string `form:"run_para_list" binding:"Required"` | |||
| Description string `form:"description"` | |||
| BranchName string `form:"branch_name" binding:"Required"` | |||
| VersionName string `form:"version_name" binding:"Required"` | |||
| FlavorName string `form:"flaver_names" binding:"Required"` | |||
| EngineName string `form:"engine_names" binding:"Required"` | |||
| LabelName string `form:"label_names" binding:"Required"` | |||
| TrainUrl string `form:"train_url" binding:"Required"` | |||
| ModelName string `form:"model_name" binding:"Required"` | |||
| ModelVersion string `form:"model_version" binding:"Required"` | |||
| CkptName string `form:"ckpt_name" binding:"Required"` | |||
| SpecId int64 `form:"spec_id" binding:"Required"` | |||
| } | |||
| func (f *CreateModelArtsTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { | |||
| @@ -228,7 +228,7 @@ func AdminOrImageCreaterRight(ctx *context.Context) { | |||
| } | |||
| func GenerateTask(req GenerateCloudBrainTaskReq) error { | |||
| func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) { | |||
| var versionCount int | |||
| if req.JobType == string(models.JobTypeTrain) { | |||
| versionCount = 1 | |||
| @@ -335,11 +335,11 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error { | |||
| }) | |||
| if err != nil { | |||
| log.Error("CreateJob failed:", err.Error(), req.Ctx.Data["MsgID"]) | |||
| return err | |||
| return "", err | |||
| } | |||
| if jobResult.Code != Success { | |||
| log.Error("CreateJob(%s) failed:%s", req.JobName, jobResult.Msg, req.Ctx.Data["MsgID"]) | |||
| return errors.New(jobResult.Msg) | |||
| return "", errors.New(jobResult.Msg) | |||
| } | |||
| var jobID = jobResult.Payload["jobId"].(string) | |||
| @@ -380,13 +380,13 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error { | |||
| }) | |||
| if err != nil { | |||
| return err | |||
| return "", err | |||
| } | |||
| task, err := models.GetCloudbrainByJobID(jobID) | |||
| if err != nil { | |||
| log.Error("GetCloudbrainByJobID failed: %v", err.Error()) | |||
| return err | |||
| return "", err | |||
| } | |||
| stringId := strconv.FormatInt(task.ID, 10) | |||
| @@ -401,7 +401,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error { | |||
| notification.NotifyOtherTask(req.Ctx.User, req.Ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugGPUTask) | |||
| } | |||
| return nil | |||
| return jobID, nil | |||
| } | |||
| func IsBenchmarkJob(jobType string) bool { | |||
| @@ -0,0 +1,111 @@ | |||
| package convert | |||
| import ( | |||
| "code.gitea.io/gitea/models" | |||
| api "code.gitea.io/gitea/modules/structs" | |||
| ) | |||
| func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain { | |||
| return &api.Cloudbrain{ | |||
| ID: task.ID, | |||
| JobID: task.JobID, | |||
| JobType: task.JobType, | |||
| Type: task.Type, | |||
| DisplayJobName: task.DisplayJobName, | |||
| Status: task.Status, | |||
| CreatedUnix: int64(task.CreatedUnix), | |||
| RepoID: task.RepoID, | |||
| Duration: task.Duration, | |||
| TrainJobDuration: task.TrainJobDuration, | |||
| ImageID: task.ImageID, | |||
| Image: task.Image, | |||
| Uuid: task.Uuid, | |||
| DatasetName: task.DatasetName, | |||
| ComputeResource: task.ComputeResource, | |||
| AiCenter: task.AiCenter, | |||
| BranchName: task.BranchName, | |||
| Parameters: task.Parameters, | |||
| BootFile: task.BootFile, | |||
| Description: task.Description, | |||
| ModelName: task.ModelName, | |||
| ModelVersion: task.ModelVersion, | |||
| CkptName: task.CkptName, | |||
| StartTime: int64(task.StartTime), | |||
| EndTime: int64(task.EndTime), | |||
| Spec: ToSpecification(task.Spec), | |||
| } | |||
| } | |||
| func ToAttachment(attachment *models.Attachment) *api.AttachmentShow { | |||
| return &api.AttachmentShow{ | |||
| ID: attachment.ID, | |||
| UUID: attachment.UUID, | |||
| DatasetID: attachment.DatasetID, | |||
| ReleaseID: attachment.ReleaseID, | |||
| UploaderID: attachment.UploaderID, | |||
| CommentID: attachment.CommentID, | |||
| Name: attachment.Name, | |||
| Description: attachment.Description, | |||
| DownloadCount: attachment.DownloadCount, | |||
| UseNumber: attachment.UseNumber, | |||
| Size: attachment.Size, | |||
| IsPrivate: attachment.IsPrivate, | |||
| DecompressState: attachment.DecompressState, | |||
| Type: attachment.Type, | |||
| CreatedUnix: int64(attachment.CreatedUnix), | |||
| } | |||
| } | |||
| func ToDataset(dataset *models.Dataset) *api.Dataset { | |||
| var convertAttachments []*api.AttachmentShow | |||
| for _, attachment := range dataset.Attachments { | |||
| convertAttachments = append(convertAttachments, ToAttachment(attachment)) | |||
| } | |||
| return &api.Dataset{ | |||
| ID: dataset.ID, | |||
| Title: dataset.Title, | |||
| Status: dataset.Status, | |||
| Category: dataset.Category, | |||
| Description: dataset.Description, | |||
| DownloadTimes: dataset.DownloadTimes, | |||
| UseCount: dataset.UseCount, | |||
| NumStars: dataset.NumStars, | |||
| Recommend: dataset.Recommend, | |||
| License: dataset.License, | |||
| Task: dataset.Task, | |||
| ReleaseID: dataset.ReleaseID, | |||
| UserID: dataset.UserID, | |||
| RepoID: dataset.RepoID, | |||
| Repo: &api.RepositoryShow{ | |||
| OwnerName: dataset.Repo.OwnerName, | |||
| Name: dataset.Repo.Name, | |||
| }, | |||
| CreatedUnix: int64(dataset.CreatedUnix), | |||
| UpdatedUnix: int64(dataset.UpdatedUnix), | |||
| Attachments: convertAttachments, | |||
| } | |||
| } | |||
| func ToSpecification(s *models.Specification) *api.SpecificationShow { | |||
| return &api.SpecificationShow{ | |||
| ID: s.ID, | |||
| AccCardsNum: s.AccCardsNum, | |||
| AccCardType: s.AccCardType, | |||
| CpuCores: s.CpuCores, | |||
| MemGiB: s.MemGiB, | |||
| GPUMemGiB: s.GPUMemGiB, | |||
| ShareMemGiB: s.ShareMemGiB, | |||
| ComputeResource: s.ComputeResource, | |||
| UnitPrice: s.UnitPrice, | |||
| } | |||
| } | |||
| func ToTagger(user *models.User) *api.Tagger { | |||
| return &api.Tagger{ | |||
| Name: user.Name, | |||
| RelAvatarURL: user.RelAvatarLink(), | |||
| Email: user.Email, | |||
| } | |||
| } | |||
| @@ -102,7 +102,7 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram | |||
| return datasetGrampus | |||
| } | |||
| func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) { | |||
| func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) { | |||
| createTime := timeutil.TimeStampNow() | |||
| centerID, centerName := getCentersParamter(ctx, req) | |||
| @@ -150,7 +150,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error | |||
| }) | |||
| if err != nil { | |||
| log.Error("createJob failed: %v", err.Error()) | |||
| return err | |||
| return "", err | |||
| } | |||
| jobID := jobResult.JobInfo.JobID | |||
| @@ -191,7 +191,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error | |||
| if err != nil { | |||
| log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, err.Error()) | |||
| return err | |||
| return "", err | |||
| } | |||
| var actionType models.ActionType | |||
| @@ -202,7 +202,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error | |||
| } | |||
| notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, actionType) | |||
| return nil | |||
| return jobID, nil | |||
| } | |||
| func getCentersParamter(ctx *context.Context, req *GenerateTrainJobReq) ([]string, []string) { | |||
| @@ -245,6 +245,32 @@ func GetTrainJobLog(jobID string) (string, error) { | |||
| return logContent, nil | |||
| } | |||
| func GetGrampusMetrics(jobID string) (models.GetTrainJobMetricStatisticResult, error) { | |||
| checkSetting() | |||
| client := getRestyClient() | |||
| var result models.GetTrainJobMetricStatisticResult | |||
| res, err := client.R(). | |||
| SetAuthToken(TOKEN). | |||
| Get(HOST + urlTrainJob + "/" + jobID + "/task/0/replica/0/metrics") | |||
| if err != nil { | |||
| return result, fmt.Errorf("resty GetTrainJobLog: %v", err) | |||
| } | |||
| if err = json.Unmarshal([]byte(res.String()), &result); err != nil { | |||
| log.Error("GetGrampusMetrics json.Unmarshal failed(%s): %v", res.String(), err.Error()) | |||
| return result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) | |||
| } | |||
| if res.StatusCode() != http.StatusOK { | |||
| log.Error("Call GrampusMetrics failed(%d):%s(%s)", res.StatusCode(), result.ErrorCode, result.ErrorMsg) | |||
| return result, fmt.Errorf("Call GrampusMetrics failed(%d):%d(%s)", res.StatusCode(), result.ErrorCode, result.ErrorMsg) | |||
| } | |||
| if !result.IsSuccess { | |||
| log.Error("GetGrampusMetrics(%s) failed", jobID) | |||
| return result, fmt.Errorf("GetGrampusMetrics failed:%s", result.ErrorMsg) | |||
| } | |||
| return result, nil | |||
| } | |||
| func StopJob(jobID string) (*models.GrampusStopJobResponse, error) { | |||
| checkSetting() | |||
| client := getRestyClient() | |||
| @@ -350,7 +350,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc | |||
| return nil | |||
| } | |||
| func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) { | |||
| func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) { | |||
| createTime := timeutil.TimeStampNow() | |||
| var jobResult *models.CreateTrainJobResult | |||
| var createErr error | |||
| @@ -410,17 +410,17 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error | |||
| }) | |||
| if errTemp != nil { | |||
| log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error()) | |||
| return errTemp | |||
| return "", errTemp | |||
| } | |||
| } | |||
| return createErr | |||
| return "", createErr | |||
| } | |||
| jobId := strconv.FormatInt(jobResult.JobID, 10) | |||
| jobID := strconv.FormatInt(jobResult.JobID, 10) | |||
| createErr = models.CreateCloudbrain(&models.Cloudbrain{ | |||
| Status: TransTrainJobStatus(jobResult.Status), | |||
| UserID: ctx.User.ID, | |||
| RepoID: ctx.Repo.Repository.ID, | |||
| JobID: jobId, | |||
| JobID: jobID, | |||
| JobName: req.JobName, | |||
| DisplayJobName: req.DisplayJobName, | |||
| JobType: string(models.JobTypeTrain), | |||
| @@ -458,10 +458,10 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error | |||
| if createErr != nil { | |||
| log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, createErr.Error()) | |||
| return createErr | |||
| return "", createErr | |||
| } | |||
| notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobId, req.DisplayJobName, models.ActionCreateTrainTask) | |||
| return nil | |||
| notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateTrainTask) | |||
| return jobID, nil | |||
| } | |||
| func GenerateModelConvertTrainJob(req *GenerateTrainJobReq) (*models.CreateTrainJobResult, error) { | |||
| @@ -682,7 +682,7 @@ func GetOutputPathByCount(TotalVersionCount int) (VersionOutputPath string) { | |||
| return VersionOutputPath | |||
| } | |||
| func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (err error) { | |||
| func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (jobId string, err error) { | |||
| createTime := timeutil.TimeStampNow() | |||
| var jobResult *models.CreateTrainJobResult | |||
| var createErr error | |||
| @@ -742,10 +742,10 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e | |||
| }) | |||
| if err != nil { | |||
| log.Error("InsertCloudbrainTemp failed: %v", err.Error()) | |||
| return err | |||
| return "", err | |||
| } | |||
| } | |||
| return err | |||
| return "", err | |||
| } | |||
| // attach, err := models.GetAttachmentByUUID(req.Uuid) | |||
| @@ -796,7 +796,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e | |||
| if err != nil { | |||
| log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, err.Error()) | |||
| return err | |||
| return "", err | |||
| } | |||
| if req.JobType == string(models.JobTypeModelSafety) { | |||
| task, err := models.GetCloudbrainByJobID(jobID) | |||
| @@ -807,7 +807,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e | |||
| notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateInferenceTask) | |||
| } | |||
| return nil | |||
| return jobID, nil | |||
| } | |||
| func GetNotebookImageName(imageId string) (string, error) { | |||
| @@ -609,9 +609,9 @@ var ( | |||
| AiCenterInfo string | |||
| }{} | |||
| C2NetInfos *C2NetSqInfos | |||
| CenterInfos *AiCenterInfos | |||
| C2NetMapInfo map[string]*C2NetSequenceInfo | |||
| C2NetInfos *C2NetSqInfos | |||
| CenterInfos *AiCenterInfos | |||
| C2NetMapInfo map[string]*C2NetSequenceInfo | |||
| //elk config | |||
| ElkUrl string | |||
| @@ -1451,7 +1451,7 @@ func NewContext() { | |||
| MaxDuration = sec.Key("MAX_DURATION").MustInt64(14400) | |||
| TrainGpuTypes = sec.Key("TRAIN_GPU_TYPES").MustString("") | |||
| TrainResourceSpecs = sec.Key("TRAIN_RESOURCE_SPECS").MustString("") | |||
| MaxModelSize = sec.Key("MAX_MODEL_SIZE").MustFloat64(500) | |||
| MaxModelSize = sec.Key("MAX_MODEL_SIZE").MustFloat64(200) | |||
| InferenceGpuTypes = sec.Key("INFERENCE_GPU_TYPES").MustString("") | |||
| InferenceResourceSpecs = sec.Key("INFERENCE_RESOURCE_SPECS").MustString("") | |||
| SpecialPools = sec.Key("SPECIAL_POOL").MustString("") | |||
| @@ -1655,9 +1655,9 @@ func getGrampusConfig() { | |||
| if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil { | |||
| log.Error("Unmarshal(C2NetSequence) failed:%v", err) | |||
| } | |||
| C2NetMapInfo=make(map[string]*C2NetSequenceInfo) | |||
| for _,value :=range C2NetInfos.C2NetSqInfo{ | |||
| C2NetMapInfo[value.Name]=value | |||
| C2NetMapInfo = make(map[string]*C2NetSequenceInfo) | |||
| for _, value := range C2NetInfos.C2NetSqInfo { | |||
| C2NetMapInfo[value.Name] = value | |||
| } | |||
| } | |||
| Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus") | |||
| @@ -27,3 +27,48 @@ type Attachment struct { | |||
| type EditAttachmentOptions struct { | |||
| Name string `json:"name"` | |||
| } | |||
| type Dataset struct { | |||
| ID int64 `json:"id"` | |||
| Title string `json:"title"` | |||
| Status int32 `json:"status"` | |||
| Category string `json:"category"` | |||
| Description string `json:"description"` | |||
| DownloadTimes int64 `json:"downloadTimes"` | |||
| UseCount int64 `json:"useCount"` | |||
| NumStars int `json:"numStars"` | |||
| Recommend bool `json:"recommend"` | |||
| License string `json:"license"` | |||
| Task string `json:"task"` | |||
| ReleaseID int64 `json:"releaseId"` | |||
| UserID int64 `json:"userId"` | |||
| RepoID int64 `json:"repoId"` | |||
| Repo *RepositoryShow `json:"repo"` | |||
| CreatedUnix int64 `json:"createdUnix"` | |||
| UpdatedUnix int64 `json:"updatedUnix"` | |||
| Attachments []*AttachmentShow `json:"attachments"` | |||
| } | |||
| type RepositoryShow struct { | |||
| OwnerName string `json:"ownerName"` | |||
| Name string `json:"name"` | |||
| } | |||
| type AttachmentShow struct { | |||
| ID int64 `json:"id"` | |||
| UUID string `json:"uuid"` | |||
| DatasetID int64 `json:"datasetId"` | |||
| ReleaseID int64 `json:"releaseId"` | |||
| UploaderID int64 `json:"uploaderId"` | |||
| CommentID int64 `json:"commentId"` | |||
| Name string `json:"name"` | |||
| Description string `json:"description"` | |||
| DownloadCount int64 `json:"downloadCount"` | |||
| UseNumber int64 `json:"useNumber"` | |||
| Size int64 `json:"size"` | |||
| IsPrivate bool `json:"isPrivate"` | |||
| DecompressState int32 `json:"decompressState"` | |||
| Type int `json:"type"` | |||
| CreatedUnix int64 `json:"createdUnix"` | |||
| } | |||
| @@ -0,0 +1,84 @@ | |||
| package structs | |||
| type CreateGrampusTrainJobOption struct { | |||
| DisplayJobName string `json:"display_job_name" binding:"Required"` | |||
| JobName string `json:"job_name" binding:"Required" ` | |||
| Attachment string `json:"attachment" binding:"Required"` | |||
| BootFile string `json:"boot_file" binding:"Required"` | |||
| ImageID string `json:"image_id" binding:"Required"` | |||
| Params string `json:"run_para_list" binding:"Required"` | |||
| Description string `json:"description"` | |||
| BranchName string `json:"branch_name" binding:"Required"` | |||
| EngineName string `json:"engine_name" binding:"Required"` | |||
| WorkServerNumber int `json:"work_server_number" binding:"Required"` | |||
| Image string `json:"image" binding:"Required"` | |||
| DatasetName string `json:"dataset_name" binding:"Required"` | |||
| ModelName string `json:"model_name"` | |||
| ModelVersion string `json:"model_version"` | |||
| CkptName string `json:"ckpt_name"` | |||
| LabelName string `json:"label_names"` | |||
| PreTrainModelUrl string `json:"pre_train_model_url"` | |||
| SpecId int64 `json:"spec_id" binding:"Required"` | |||
| } | |||
| type CreateTrainJobOption struct { | |||
| Type int `json:"type"` | |||
| DisplayJobName string `json:"display_job_name" binding:"Required"` | |||
| ImageID string `json:"image_id"` | |||
| Image string `json:"image" binding:"Required"` | |||
| Attachment string `json:"attachment" binding:"Required"` | |||
| DatasetName string `json:"dataset_name" binding:"Required"` | |||
| Description string `json:"description" ` | |||
| BootFile string `json:"boot_file" binding:"Required"` | |||
| BranchName string `json:"branch_name" binding:"Required"` | |||
| Params string `json:"run_para_list" binding:"Required"` | |||
| WorkServerNumber int `json:"work_server_number"` | |||
| ModelName string `json:"model_name"` | |||
| ModelVersion string `json:"model_version"` | |||
| CkptName string `json:"ckpt_name"` | |||
| LabelName string `json:"label_names"` | |||
| PreTrainModelUrl string `json:"pre_train_model_url"` | |||
| SpecId int64 `json:"spec_id" binding:"Required"` | |||
| } | |||
| type Cloudbrain struct { | |||
| ID int64 `json:"id"` | |||
| JobID string `json:"job_id"` | |||
| JobType string `json:"job_type"` | |||
| Type int `json:"type"` | |||
| DisplayJobName string `json:"display_job_name"` | |||
| Status string `json:"status"` | |||
| CreatedUnix int64 `json:"created_unix"` | |||
| RepoID int64 `json:"repo_id"` | |||
| Duration int64 `json:"duration"` //运行时长 单位秒 | |||
| TrainJobDuration string `json:"train_job_duration"` | |||
| ImageID string `json:"image_id"` //grampus image_id | |||
| Image string `json:"image"` | |||
| Uuid string `json:"uuid"` //数据集id | |||
| DatasetName string `json:"dataset_name"` | |||
| ComputeResource string `json:"compute_resource"` //计算资源,例如npu | |||
| AiCenter string `json:"ai_center"` //grampus ai center: center_id+center_name | |||
| BranchName string `json:"branch_name"` //分支名称 | |||
| Parameters string `json:"parameters"` //传给modelarts的param参数 | |||
| BootFile string `json:"boot_file"` //启动文件 | |||
| Description string `json:"description"` //描述 | |||
| ModelName string `json:"model_name"` //模型名称 | |||
| ModelVersion string `json:"model_version"` //模型版本 | |||
| CkptName string `json:"ckpt_name"` //权重文件名称 | |||
| StartTime int64 `json:"start_time"` | |||
| EndTime int64 `json:"end_time"` | |||
| Spec *SpecificationShow `json:"spec"` | |||
| } | |||
| type SpecificationShow struct { | |||
| ID int64 `json:"id"` | |||
| AccCardsNum int `json:"acc_cards_num"` | |||
| AccCardType string `json:"acc_card_type"` | |||
| CpuCores int `json:"cpu_cores"` | |||
| MemGiB float32 `json:"mem_gi_b"` | |||
| GPUMemGiB float32 `json:"gpu_mem_gi_b"` | |||
| ShareMemGiB float32 `json:"share_mem_gi_b"` | |||
| ComputeResource string `json:"compute_resource"` | |||
| UnitPrice int `json:"unit_price"` | |||
| } | |||
| @@ -0,0 +1,7 @@ | |||
| package structs | |||
| type Tagger struct { | |||
| Name string `json:"name"` | |||
| Email string `json:"email"` | |||
| RelAvatarURL string `json:"relAvatarURL"` | |||
| } | |||
| @@ -617,6 +617,7 @@ organization = Organizations | |||
| uid = Uid | |||
| u2f = Security Keys | |||
| bind_wechat = Bind WeChat | |||
| no_wechat_bind = Can not do the operation, please bind WeChat first. | |||
| wechat_bind = WeChat Binding | |||
| bind_account_information = Bind account information | |||
| bind_time = Bind Time | |||
| @@ -1036,6 +1037,7 @@ cloudbrain.time.starttime=Start run time | |||
| cloudbrain.time.endtime=End run time | |||
| cloudbrain.datasetdownload=Dataset download url | |||
| model_manager = Model | |||
| model_experience = Model Experience | |||
| model_noright=You have no right to do the operation. | |||
| model_rename=Duplicate model name, please modify model name. | |||
| @@ -622,6 +622,7 @@ organization=组织 | |||
| uid=用户 ID | |||
| u2f=安全密钥 | |||
| wechat_bind = 微信绑定 | |||
| no_wechat_bind = 不能创建任务,请先绑定微信。 | |||
| bind_wechat = 绑定微信 | |||
| bind_account_information = 绑定账号信息 | |||
| bind_time = 绑定时间 | |||
| @@ -1036,6 +1037,7 @@ datasets.desc=数据集功能 | |||
| cloudbrain_helper=使用GPU/NPU资源,开启Notebook、模型训练任务等 | |||
| model_manager = 模型 | |||
| model_experience = 模型体验 | |||
| model_noright=您没有操作权限。 | |||
| model_rename=模型名称重复,请修改模型名称 | |||
| @@ -136,6 +136,26 @@ func GetResourceSpecificationList(ctx *context.Context) { | |||
| ctx.JSON(http.StatusOK, response.SuccessWithData(list)) | |||
| } | |||
| func GetAllResourceSpecificationList(ctx *context.Context) { | |||
| queue := ctx.QueryInt64("queue") | |||
| status := ctx.QueryInt("status") | |||
| cluster := ctx.Query("cluster") | |||
| available := ctx.QueryInt("available") | |||
| list, err := resource.GetAllDistinctResourceSpecification(models.SearchResourceSpecificationOptions{ | |||
| QueueId: queue, | |||
| Status: status, | |||
| Cluster: cluster, | |||
| AvailableCode: available, | |||
| }) | |||
| if err != nil { | |||
| log.Error("GetResourceSpecificationList error.%v", err) | |||
| ctx.JSON(http.StatusOK, response.ServerError(err.Error())) | |||
| return | |||
| } | |||
| ctx.JSON(http.StatusOK, response.SuccessWithData(list)) | |||
| } | |||
| func GetResourceSpecificationScenes(ctx *context.Context) { | |||
| specId := ctx.ParamsInt64(":id") | |||
| list, err := resource.GetResourceSpecificationScenes(specId) | |||
| @@ -182,7 +202,7 @@ func UpdateResourceSpecification(ctx *context.Context, req models.ResourceSpecif | |||
| if err != nil { | |||
| log.Error("UpdateResourceSpecification error. %v", err) | |||
| ctx.JSON(http.StatusOK, response.ResponseError(err)) | |||
| ctx.JSON(http.StatusOK, response.ResponseBizError(err)) | |||
| return | |||
| } | |||
| ctx.JSON(http.StatusOK, response.Success()) | |||
| @@ -242,6 +242,15 @@ func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler { | |||
| } | |||
| } | |||
| func reqWeChat() macaron.Handler { | |||
| return func(ctx *context.Context) { | |||
| if setting.WechatAuthSwitch && ctx.User.WechatOpenId == "" { | |||
| ctx.JSON(http.StatusForbidden, models.BaseErrorMessageApi("settings.no_wechat_bind")) | |||
| return | |||
| } | |||
| } | |||
| } | |||
| // reqRepoReader user should have specific read permission or be a repo admin or a site admin | |||
| func reqRepoReader(unitType models.UnitType) macaron.Handler { | |||
| return func(ctx *context.Context) { | |||
| @@ -517,6 +526,25 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown) | |||
| m.Post("/markdown/raw", misc.MarkdownRaw) | |||
| m.Group("/images", func() { | |||
| m.Get("/public", repo.GetPublicImages) | |||
| m.Get("/custom", repo.GetCustomImages) | |||
| m.Get("/star", repo.GetStarImages) | |||
| m.Get("/npu", repo.GetNpuImages) | |||
| }, reqToken()) | |||
| m.Group("/attachments", func() { | |||
| m.Get("/:uuid", repo.GetAttachment) | |||
| m.Get("/get_chunks", repo.GetSuccessChunks) | |||
| m.Get("/new_multipart", repo.NewMultipart) | |||
| m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) | |||
| m.Post("/complete_multipart", repo.CompleteMultipart) | |||
| }, reqToken()) | |||
| // Notifications | |||
| m.Group("/notifications", func() { | |||
| m.Combo(""). | |||
| @@ -701,6 +729,13 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Combo("/repositories/:id", reqToken()).Get(repo.GetByID) | |||
| m.Group("/datasets/:username/:reponame", func() { | |||
| m.Get("/current_repo", repo.CurrentRepoDatasetMultiple) | |||
| m.Get("/my_datasets", repo.MyDatasetsMultiple) | |||
| m.Get("/public_datasets", repo.PublicDatasetMultiple) | |||
| m.Get("/my_favorite", repo.MyFavoriteDatasetMultiple) | |||
| }, reqToken(), repoAssignment()) | |||
| m.Group("/repos", func() { | |||
| m.Get("/search", repo.Search) | |||
| @@ -709,7 +744,13 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate) | |||
| m.Post("/migrate/submit", reqToken(), bind(auth.MigrateRepoForm{}), repo.MigrateSubmit) | |||
| m.Group("/specification", func() { | |||
| m.Get("", repo.GetResourceSpec) | |||
| }, reqToken()) | |||
| m.Group("/:username/:reponame", func() { | |||
| m.Get("/right", reqToken(), repo.GetRight) | |||
| m.Get("/tagger", reqToken(), repo.ListTagger) | |||
| m.Combo("").Get(reqAnyRepoReader(), repo.Get). | |||
| Delete(reqToken(), reqOwner(), repo.Delete). | |||
| Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit) | |||
| @@ -938,15 +979,23 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("/:id/log", repo.CloudbrainGetLog) | |||
| m.Get("/:id/download_log_file", repo.CloudbrainDownloadLogFile) | |||
| m.Group("/train-job", func() { | |||
| m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), context.ReferencesGitRepo(false), bind(api.CreateTrainJobOption{}), repo.CreateCloudBrain) | |||
| m.Group("/:jobid", func() { | |||
| m.Get("", repo.GetModelArtsTrainJobVersion) | |||
| m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow) | |||
| m.Get("/model_list", repo.CloudBrainModelList) | |||
| m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop) | |||
| }) | |||
| }) | |||
| m.Group("/inference-job", func() { | |||
| m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), bind(api.CreateTrainJobOption{}), context.ReferencesGitRepo(false), repo.CreateCloudBrainInferenceTask) | |||
| m.Group("/:jobid", func() { | |||
| m.Get("", repo.GetCloudBrainInferenceJob) | |||
| m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow) | |||
| m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.DelCloudBrainJob) | |||
| m.Get("/result_list", repo.InferencJobResultList) | |||
| }) | |||
| @@ -1001,6 +1050,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("", repo.GetModelArtsTrainJobVersion) | |||
| m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.GrampusStopJob) | |||
| m.Get("/log", repo_ext.GrampusGetLog) | |||
| m.Get("/metrics", repo_ext.GrampusMetrics) | |||
| m.Get("/download_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo_ext.GrampusDownloadLog) | |||
| }) | |||
| }) | |||
| @@ -0,0 +1,25 @@ | |||
| package repo | |||
| import ( | |||
| "code.gitea.io/gitea/modules/context" | |||
| routeRepo "code.gitea.io/gitea/routers/repo" | |||
| ) | |||
| func GetSuccessChunks(ctx *context.APIContext) { | |||
| routeRepo.GetSuccessChunks(ctx.Context) | |||
| } | |||
| func NewMultipart(ctx *context.APIContext) { | |||
| routeRepo.NewMultipart(ctx.Context) | |||
| } | |||
| func GetMultipartUploadUrl(ctx *context.APIContext) { | |||
| routeRepo.GetMultipartUploadUrl(ctx.Context) | |||
| } | |||
| func CompleteMultipart(ctx *context.APIContext) { | |||
| routeRepo.CompleteMultipart(ctx.Context) | |||
| } | |||
| func GetAttachment(ctx *context.APIContext) { | |||
| routeRepo.GetAttachment(ctx.Context) | |||
| } | |||
| @@ -16,6 +16,14 @@ import ( | |||
| "strings" | |||
| "time" | |||
| cloudbrainService "code.gitea.io/gitea/services/cloudbrain" | |||
| "code.gitea.io/gitea/modules/convert" | |||
| "code.gitea.io/gitea/services/cloudbrain/cloudbrainTask" | |||
| api "code.gitea.io/gitea/modules/structs" | |||
| "code.gitea.io/gitea/modules/notification" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| @@ -29,6 +37,77 @@ import ( | |||
| routerRepo "code.gitea.io/gitea/routers/repo" | |||
| ) | |||
| func CloudBrainShow(ctx *context.APIContext) { | |||
| task, err := models.GetCloudbrainByJobID(ctx.Params(":jobid")) | |||
| if err != nil { | |||
| log.Info("error:" + err.Error()) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("repo.cloudbrain_query_fail")) | |||
| return | |||
| } | |||
| cloudbrainTask.PrepareSpec4Show(task) | |||
| task.ContainerIp = "" | |||
| if cloudbrainTask.IsTaskNotStop(task) { | |||
| cloudbrainTask.SyncTaskStatus(task) | |||
| } | |||
| if task.TrainJobDuration == "" { | |||
| if task.Duration == 0 { | |||
| var duration int64 | |||
| if task.Status == string(models.JobWaiting) { | |||
| duration = 0 | |||
| } else if task.Status == string(models.JobRunning) { | |||
| duration = time.Now().Unix() - int64(task.CreatedUnix) | |||
| } else { | |||
| duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix) | |||
| } | |||
| task.Duration = duration | |||
| } | |||
| task.TrainJobDuration = models.ConvertDurationToStr(task.Duration) | |||
| } | |||
| //to unify image output | |||
| if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter { | |||
| task.ImageID = strconv.FormatInt(task.EngineID, 10) | |||
| task.Image = task.EngineName | |||
| } else if task.Type == models.TypeC2Net { | |||
| task.Image = task.EngineName | |||
| } | |||
| task.AiCenter = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx.Context) | |||
| ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)}) | |||
| } | |||
| func CreateCloudBrain(ctx *context.APIContext, option api.CreateTrainJobOption) { | |||
| if option.Type == cloudbrainTask.TaskTypeCloudbrainOne { | |||
| cloudbrainTask.CloudbrainOneTrainJobCreate(ctx.Context, option) | |||
| } | |||
| if option.Type == cloudbrainTask.TaskTypeModelArts { | |||
| cloudbrainTask.ModelArtsTrainJobNpuCreate(ctx.Context, option) | |||
| } | |||
| if option.Type == cloudbrainTask.TaskTypeGrampusGPU { | |||
| cloudbrainTask.GrampusTrainJobGpuCreate(ctx.Context, option) | |||
| } | |||
| if option.Type == cloudbrainTask.TaskTypeGrampusNPU { | |||
| cloudbrainTask.GrampusTrainJobNpuCreate(ctx.Context, option) | |||
| } | |||
| } | |||
| func CreateCloudBrainInferenceTask(ctx *context.APIContext, option api.CreateTrainJobOption) { | |||
| if option.Type == 0 { | |||
| cloudbrainTask.CloudBrainInferenceJobCreate(ctx.Context, option) | |||
| } | |||
| if option.Type == 1 { | |||
| cloudbrainTask.ModelArtsInferenceJobCreate(ctx.Context, option) | |||
| } | |||
| } | |||
| // cloudbrain get job task by jobid | |||
| func GetCloudbrainTask(ctx *context.APIContext) { | |||
| // swagger:operation GET /repos/{owner}/{repo}/cloudbrain/{jobid} cloudbrain jobTask | |||
| @@ -81,47 +160,22 @@ func GetCloudbrainTask(ctx *context.APIContext) { | |||
| "JobDuration": job.TrainJobDuration, | |||
| }) | |||
| } else { | |||
| jobResult, err := cloudbrain.GetJob(job.JobID) | |||
| if err != nil { | |||
| ctx.NotFound(err) | |||
| log.Error("GetJob failed:", err) | |||
| return | |||
| } | |||
| result, _ := models.ConvertToJobResultPayload(jobResult.Payload) | |||
| jobAfter, err := cloudbrainTask.SyncCloudBrainOneStatus(job) | |||
| if err != nil { | |||
| ctx.NotFound(err) | |||
| log.Error("ConvertToJobResultPayload failed:", err) | |||
| log.Error("Sync cloud brain one status failed:", err) | |||
| return | |||
| } | |||
| oldStatus := job.Status | |||
| job.Status = result.JobStatus.State | |||
| taskRoles := result.TaskRoles | |||
| taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) | |||
| if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) { | |||
| job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP | |||
| job.ContainerID = taskRes.TaskStatuses[0].ContainerID | |||
| job.Status = taskRes.TaskStatuses[0].State | |||
| } | |||
| if result.JobStatus.State != string(models.JobWaiting) { | |||
| models.ParseAndSetDurationFromCloudBrainOne(result, job) | |||
| if oldStatus != job.Status { | |||
| notification.NotifyChangeCloudbrainStatus(job, oldStatus) | |||
| } | |||
| err = models.UpdateJob(job) | |||
| if err != nil { | |||
| log.Error("UpdateJob failed:", err) | |||
| } | |||
| } | |||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||
| "ID": ID, | |||
| "JobName": result.Config.JobName, | |||
| "JobStatus": result.JobStatus.State, | |||
| "SubState": result.JobStatus.SubState, | |||
| "CreatedTime": time.Unix(result.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05"), | |||
| "CompletedTime": time.Unix(result.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05"), | |||
| "JobDuration": job.TrainJobDuration, | |||
| "JobName": jobAfter.JobName, | |||
| "JobStatus": jobAfter.Status, | |||
| "SubState": "", | |||
| "CreatedTime": jobAfter.CreatedUnix.Format("2006-01-02 15:04:05"), | |||
| "CompletedTime": jobAfter.UpdatedUnix.Format("2006-01-02 15:04:05"), | |||
| "JobDuration": jobAfter.TrainJobDuration, | |||
| }) | |||
| } | |||
| } | |||
| @@ -580,7 +634,7 @@ func CloudbrainGetLog(ctx *context.APIContext) { | |||
| endLine += 1 | |||
| } | |||
| } | |||
| result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath) | |||
| if result == nil { | |||
| log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"]) | |||
| @@ -595,14 +649,20 @@ func CloudbrainGetLog(ctx *context.APIContext) { | |||
| if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 { | |||
| content = content + ctx.Data["existStr"].(string) | |||
| } | |||
| logFileName := result["FileName"] | |||
| //Logs can only be downloaded if the file exists | |||
| //and the current user is an administrator or the creator of the task | |||
| canLogDownload := logFileName != nil && logFileName != "" && job.IsUserHasRight(ctx.User) | |||
| re := map[string]interface{}{ | |||
| "JobID": ID, | |||
| "LogFileName": result["FileName"], | |||
| "LogFileName": logFileName, | |||
| "StartLine": result["StartLine"], | |||
| "EndLine": result["EndLine"], | |||
| "Content": content, | |||
| "Lines": result["Lines"], | |||
| "CanLogDownload": result["FileName"] != "", | |||
| "CanLogDownload": canLogDownload, | |||
| "StartTime": job.StartTime, | |||
| } | |||
| //result := CloudbrainGetLogByJobId(job.JobID, job.JobName) | |||
| @@ -0,0 +1,123 @@ | |||
| package repo | |||
| import ( | |||
| "fmt" | |||
| "strings" | |||
| "code.gitea.io/gitea/modules/convert" | |||
| api "code.gitea.io/gitea/modules/structs" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/context" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| ) | |||
| func PublicDatasetMultiple(ctx *context.APIContext) { | |||
| opts := &models.SearchDatasetOptions{ | |||
| PublicOnly: true, | |||
| NeedAttachment: true, | |||
| CloudBrainType: ctx.QueryInt("type"), | |||
| } | |||
| datasetMultiple(ctx, opts) | |||
| } | |||
| func MyFavoriteDatasetMultiple(ctx *context.APIContext) { | |||
| opts := &models.SearchDatasetOptions{ | |||
| StarByMe: true, | |||
| DatasetIDs: models.GetDatasetIdsStarByUser(ctx.User.ID), | |||
| NeedAttachment: true, | |||
| CloudBrainType: ctx.QueryInt("type"), | |||
| } | |||
| datasetMultiple(ctx, opts) | |||
| } | |||
| func CurrentRepoDatasetMultiple(ctx *context.APIContext) { | |||
| datasetIds := models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID) | |||
| searchOrderBy := getSearchOrderByInValues(datasetIds) | |||
| opts := &models.SearchDatasetOptions{ | |||
| RepoID: ctx.Repo.Repository.ID, | |||
| NeedAttachment: true, | |||
| CloudBrainType: ctx.QueryInt("type"), | |||
| DatasetIDs: datasetIds, | |||
| SearchOrderBy: searchOrderBy, | |||
| } | |||
| datasetMultiple(ctx, opts) | |||
| } | |||
| func MyDatasetsMultiple(ctx *context.APIContext) { | |||
| opts := &models.SearchDatasetOptions{ | |||
| UploadAttachmentByMe: true, | |||
| NeedAttachment: true, | |||
| CloudBrainType: ctx.QueryInt("type"), | |||
| } | |||
| datasetMultiple(ctx, opts) | |||
| } | |||
| func datasetMultiple(ctx *context.APIContext, opts *models.SearchDatasetOptions) { | |||
| page := ctx.QueryInt("page") | |||
| if page < 1 { | |||
| page = 1 | |||
| } | |||
| pageSize := ctx.QueryInt("pageSize") | |||
| if pageSize < 1 { | |||
| pageSize = setting.UI.DatasetPagingNum | |||
| } | |||
| keyword := strings.Trim(ctx.Query("q"), " ") | |||
| opts.Keyword = keyword | |||
| if opts.SearchOrderBy.String() == "" { | |||
| opts.SearchOrderBy = models.SearchOrderByRecentUpdated | |||
| } | |||
| opts.RecommendOnly = ctx.QueryBool("recommend") | |||
| opts.ListOptions = models.ListOptions{ | |||
| Page: page, | |||
| PageSize: pageSize, | |||
| } | |||
| opts.JustNeedZipFile = true | |||
| opts.User = ctx.User | |||
| datasets, count, err := models.SearchDataset(opts) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]interface{}{ | |||
| "code": 1, | |||
| "message": err.Error(), | |||
| "data": []*api.Dataset{}, | |||
| "count": 0, | |||
| }) | |||
| return | |||
| } | |||
| var convertDatasets []*api.Dataset | |||
| for _, dataset := range datasets { | |||
| convertDatasets = append(convertDatasets, convert.ToDataset(dataset)) | |||
| } | |||
| ctx.JSON(200, map[string]interface{}{ | |||
| "code": 0, | |||
| "message": "", | |||
| "data": convertDatasets, | |||
| "count": count, | |||
| }) | |||
| } | |||
| func getSearchOrderByInValues(datasetIds []int64) models.SearchOrderBy { | |||
| if len(datasetIds) == 0 { | |||
| return "" | |||
| } | |||
| searchOrderBy := "CASE id " | |||
| for i, id := range datasetIds { | |||
| searchOrderBy += fmt.Sprintf(" WHEN %d THEN %d", id, i+1) | |||
| } | |||
| searchOrderBy += " ELSE 0 END" | |||
| return models.SearchOrderBy(searchOrderBy) | |||
| } | |||
| @@ -0,0 +1,141 @@ | |||
| package repo | |||
| import ( | |||
| "encoding/json" | |||
| "net/http" | |||
| "strconv" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/context" | |||
| "code.gitea.io/gitea/modules/grampus" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/modelarts" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| ) | |||
| type NPUImageINFO struct { | |||
| ID string `json:"id"` | |||
| Value string `json:"value"` | |||
| } | |||
| func GetPublicImages(ctx *context.APIContext) { | |||
| uid := getUID(ctx) | |||
| opts := models.SearchImageOptions{ | |||
| IncludePublicOnly: true, | |||
| UID: uid, | |||
| Keyword: ctx.Query("q"), | |||
| Topics: ctx.Query("topic"), | |||
| IncludeOfficialOnly: ctx.QueryBool("recommend"), | |||
| SearchOrderBy: "type desc, num_stars desc,id desc", | |||
| Status: models.IMAGE_STATUS_SUCCESS, | |||
| CloudbrainType: ctx.QueryInt("cloudbrainType"), | |||
| } | |||
| getImages(ctx, &opts) | |||
| } | |||
| func GetCustomImages(ctx *context.APIContext) { | |||
| uid := getUID(ctx) | |||
| opts := models.SearchImageOptions{ | |||
| UID: uid, | |||
| IncludeOwnerOnly: true, | |||
| Keyword: ctx.Query("q"), | |||
| Topics: ctx.Query("topic"), | |||
| Status: -1, | |||
| SearchOrderBy: "id desc", | |||
| } | |||
| getImages(ctx, &opts) | |||
| } | |||
| func GetStarImages(ctx *context.APIContext) { | |||
| uid := getUID(ctx) | |||
| opts := models.SearchImageOptions{ | |||
| UID: uid, | |||
| IncludeStarByMe: true, | |||
| Keyword: ctx.Query("q"), | |||
| Topics: ctx.Query("topic"), | |||
| Status: models.IMAGE_STATUS_SUCCESS, | |||
| SearchOrderBy: "id desc", | |||
| } | |||
| getImages(ctx, &opts) | |||
| } | |||
| func GetNpuImages(ctx *context.APIContext) { | |||
| cloudbrainType := ctx.QueryInt("type") | |||
| if cloudbrainType == 0 { //modelarts | |||
| getModelArtsImages(ctx) | |||
| } else { //c2net | |||
| getC2netNpuImages(ctx) | |||
| } | |||
| } | |||
| func getModelArtsImages(ctx *context.APIContext) { | |||
| var versionInfos modelarts.VersionInfo | |||
| _ = json.Unmarshal([]byte(setting.EngineVersions), &versionInfos) | |||
| var npuImageInfos []NPUImageINFO | |||
| for _, info := range versionInfos.Version { | |||
| npuImageInfos = append(npuImageInfos, NPUImageINFO{ | |||
| ID: strconv.Itoa(info.ID), | |||
| Value: info.Value, | |||
| }) | |||
| } | |||
| ctx.JSON(http.StatusOK, npuImageInfos) | |||
| } | |||
| func getC2netNpuImages(ctx *context.APIContext) { | |||
| images, err := grampus.GetImages(grampus.ProcessorTypeNPU) | |||
| var npuImageInfos []NPUImageINFO | |||
| if err != nil { | |||
| log.Error("GetImages failed:", err.Error()) | |||
| ctx.JSON(http.StatusOK, []NPUImageINFO{}) | |||
| } else { | |||
| for _, info := range images.Infos { | |||
| npuImageInfos = append(npuImageInfos, NPUImageINFO{ | |||
| ID: info.ID, | |||
| Value: info.Name, | |||
| }) | |||
| } | |||
| ctx.JSON(http.StatusOK, npuImageInfos) | |||
| } | |||
| } | |||
| func getImages(ctx *context.APIContext, opts *models.SearchImageOptions) { | |||
| page := ctx.QueryInt("page") | |||
| if page <= 0 { | |||
| page = 1 | |||
| } | |||
| pageSize := ctx.QueryInt("pageSize") | |||
| if pageSize <= 0 { | |||
| pageSize = 15 | |||
| } | |||
| opts.ListOptions = models.ListOptions{ | |||
| Page: page, | |||
| PageSize: pageSize, | |||
| } | |||
| imageList, total, err := models.SearchImage(opts) | |||
| if err != nil { | |||
| log.Error("Can not get images:%v", err) | |||
| ctx.JSON(http.StatusOK, models.ImagesPageResult{ | |||
| Count: 0, | |||
| Images: []*models.Image{}, | |||
| }) | |||
| } else { | |||
| ctx.JSON(http.StatusOK, models.ImagesPageResult{ | |||
| Count: total, | |||
| Images: imageList, | |||
| }) | |||
| } | |||
| } | |||
| func getUID(ctx *context.APIContext) int64 { | |||
| var uid int64 = -1 | |||
| if ctx.IsSigned { | |||
| uid = ctx.User.ID | |||
| } | |||
| return uid | |||
| } | |||
| @@ -0,0 +1,71 @@ | |||
| package repo | |||
| import ( | |||
| "net/http" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/context" | |||
| "code.gitea.io/gitea/modules/convert" | |||
| "code.gitea.io/gitea/modules/log" | |||
| api "code.gitea.io/gitea/modules/structs" | |||
| "code.gitea.io/gitea/routers/api/v1/utils" | |||
| ) | |||
| //标注任务可分配人员 | |||
| func ListTagger(ctx *context.APIContext) { | |||
| taggers := make([]*api.Tagger, 0) | |||
| userRemember := make(map[string]string) | |||
| collaborators, err := ctx.Repo.Repository.GetCollaborators(utils.GetListOptions(ctx)) | |||
| if err != nil { | |||
| log.Warn("ListCollaborators", err) | |||
| ctx.JSON(http.StatusOK, taggers) | |||
| return | |||
| } | |||
| for _, collaborator := range collaborators { | |||
| taggers = append(taggers, convert.ToTagger(collaborator.User)) | |||
| userRemember[collaborator.User.Name] = "" | |||
| } | |||
| teams, err := ctx.Repo.Repository.GetRepoTeams() | |||
| if err != nil { | |||
| log.Warn("ListTeams", err) | |||
| ctx.JSON(http.StatusOK, taggers) | |||
| return | |||
| } | |||
| for _, team := range teams { | |||
| team.GetMembers(&models.SearchMembersOptions{}) | |||
| for _, user := range team.Members { | |||
| if _, ok := userRemember[user.Name]; !ok { | |||
| taggers = append(taggers, convert.ToTagger(user)) | |||
| userRemember[user.Name] = "" | |||
| } | |||
| } | |||
| } | |||
| if !ctx.Repo.Owner.IsOrganization() { | |||
| if _, ok := userRemember[ctx.Repo.Owner.Name]; !ok { | |||
| taggers = append(taggers, convert.ToTagger(ctx.Repo.Owner)) | |||
| } | |||
| } | |||
| ctx.JSON(http.StatusOK, taggers) | |||
| } | |||
| func GetRight(ctx *context.APIContext) { | |||
| right := "none" | |||
| if ctx.IsUserRepoReaderSpecific(models.UnitTypeCode) { | |||
| right = "read" | |||
| } | |||
| if ctx.IsUserRepoWriter([]models.UnitType{models.UnitTypeCode}) || ctx.IsUserRepoAdmin() { | |||
| right = "write" | |||
| } | |||
| ctx.JSON(http.StatusOK, map[string]string{ | |||
| "right": right, | |||
| }) | |||
| } | |||
| @@ -12,6 +12,8 @@ import ( | |||
| "strconv" | |||
| "strings" | |||
| "code.gitea.io/gitea/services/cloudbrain/cloudbrainTask" | |||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||
| "code.gitea.io/gitea/modules/notification" | |||
| @@ -20,7 +22,6 @@ import ( | |||
| "code.gitea.io/gitea/modules/setting" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/cloudbrain" | |||
| "code.gitea.io/gitea/modules/context" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/modelarts" | |||
| @@ -109,39 +110,11 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { | |||
| } | |||
| if job.Type == models.TypeCloudBrainOne { | |||
| jobResult, err := cloudbrain.GetJob(job.JobID) | |||
| if err != nil { | |||
| ctx.NotFound(err) | |||
| log.Error("GetJob failed:", err) | |||
| return | |||
| } | |||
| result, err := models.ConvertToJobResultPayload(jobResult.Payload) | |||
| job, err = cloudbrainTask.SyncCloudBrainOneStatus(job) | |||
| if err != nil { | |||
| ctx.NotFound(err) | |||
| log.Error("ConvertToJobResultPayload failed:", err) | |||
| return | |||
| } | |||
| oldStatus := job.Status | |||
| job.Status = result.JobStatus.State | |||
| if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) { | |||
| taskRoles := result.TaskRoles | |||
| taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) | |||
| job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP | |||
| job.ContainerID = taskRes.TaskStatuses[0].ContainerID | |||
| job.Status = taskRes.TaskStatuses[0].State | |||
| } | |||
| if result.JobStatus.State != string(models.JobWaiting) { | |||
| models.ParseAndSetDurationFromCloudBrainOne(result, job) | |||
| if oldStatus != job.Status { | |||
| notification.NotifyChangeCloudbrainStatus(job, oldStatus) | |||
| } | |||
| err = models.UpdateJob(job) | |||
| if err != nil { | |||
| log.Error("UpdateJob failed:", err) | |||
| } | |||
| } | |||
| } else if job.Type == models.TypeCloudBrainTwo { | |||
| err := modelarts.HandleTrainJobInfo(job) | |||
| if err != nil { | |||
| @@ -308,15 +281,6 @@ func TrainJobGetLog(ctx *context.APIContext) { | |||
| return | |||
| } | |||
| prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, versionName), "/") + "/job" | |||
| _, err = storage.GetObsLogFileName(prefix) | |||
| var canLogDownload bool | |||
| if err != nil { | |||
| canLogDownload = false | |||
| } else { | |||
| canLogDownload = true | |||
| } | |||
| ctx.Data["log_file_name"] = resultLogFile.LogFileList[0] | |||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||
| @@ -326,11 +290,23 @@ func TrainJobGetLog(ctx *context.APIContext) { | |||
| "EndLine": result.EndLine, | |||
| "Content": result.Content, | |||
| "Lines": result.Lines, | |||
| "CanLogDownload": canLogDownload, | |||
| "CanLogDownload": canLogDownload(ctx.User, task), | |||
| "StartTime": task.StartTime, | |||
| }) | |||
| } | |||
| func canLogDownload(user *models.User, task *models.Cloudbrain) bool { | |||
| if task == nil || !task.IsUserHasRight(user) { | |||
| return false | |||
| } | |||
| prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, task.VersionName), "/") + "/job" | |||
| _, err := storage.GetObsLogFileName(prefix) | |||
| if err != nil { | |||
| return false | |||
| } | |||
| return true | |||
| } | |||
| func trainJobGetLogContent(jobID string, versionID int64, baseLine string, order string, lines int) (*models.GetTrainJobLogFileNamesResult, *models.GetTrainJobLogResult, error) { | |||
| resultLogFile, err := modelarts.GetTrainJobLogFileNames(jobID, strconv.FormatInt(versionID, 10)) | |||
| @@ -0,0 +1,36 @@ | |||
| package repo | |||
| import ( | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/context" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/routers/response" | |||
| "code.gitea.io/gitea/services/cloudbrain/resource" | |||
| ) | |||
| func GetResourceSpec(ctx *context.APIContext) { | |||
| jobType := ctx.Query("jobType") | |||
| computeResource := ctx.Query("compute") | |||
| cluster := ctx.Query("cluster") | |||
| aiCenterCode := ctx.Query("center") | |||
| if jobType == "" || computeResource == "" || cluster == "" { | |||
| log.Info("GetResourceSpec api.param error") | |||
| ctx.JSON(200, response.OuterBizError(response.PARAM_ERROR)) | |||
| return | |||
| } | |||
| specs, err := resource.FindAvailableSpecs4Show(ctx.User.ID, models.FindSpecsOptions{ | |||
| JobType: models.JobType(jobType), | |||
| ComputeResource: computeResource, | |||
| Cluster: cluster, | |||
| AiCenterCode: aiCenterCode, | |||
| }) | |||
| if err != nil { | |||
| log.Error("GetResourceSpec api error. %v", err) | |||
| ctx.JSON(200, response.OuterServerError(err.Error())) | |||
| return | |||
| } | |||
| specMap := make(map[string]interface{}, 0) | |||
| specMap["specs"] = specs | |||
| ctx.JSON(200, response.OuterSuccessWithData(specMap)) | |||
| } | |||
| @@ -6,9 +6,10 @@ | |||
| package private | |||
| import ( | |||
| "code.gitea.io/gitea/routers/admin" | |||
| "strings" | |||
| "code.gitea.io/gitea/routers/admin" | |||
| "code.gitea.io/gitea/routers/repo" | |||
| "code.gitea.io/gitea/modules/log" | |||
| @@ -52,6 +53,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("/tool/org_stat", OrgStatisticManually) | |||
| m.Post("/tool/update_repo_visit/:date", UpdateRepoVisit) | |||
| m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration) | |||
| m.Post("/task/history_handle/aicenter", repo.HandleTaskWithAiCenter) | |||
| m.Post("/resources/specification/handle_historical_task", admin.RefreshHistorySpec) | |||
| }, CheckInternalToken) | |||
| @@ -804,7 +804,7 @@ func createForNPU(ctx *context.Context, jobName string) error { | |||
| JobType: string(models.JobTypeModelSafety), | |||
| } | |||
| err = modelarts.GenerateInferenceJob(ctx, req) | |||
| _, err = modelarts.GenerateInferenceJob(ctx, req) | |||
| if err != nil { | |||
| log.Error("GenerateTrainJob failed:%v", err.Error()) | |||
| return err | |||
| @@ -901,7 +901,7 @@ func createForGPU(ctx *context.Context, jobName string) error { | |||
| LabelName: evaluationIndex, | |||
| } | |||
| err = cloudbrain.GenerateTask(req) | |||
| _, err = cloudbrain.GenerateTask(req) | |||
| if err != nil { | |||
| return err | |||
| } | |||
| @@ -2,7 +2,6 @@ package repo | |||
| import ( | |||
| "bufio" | |||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||
| "encoding/json" | |||
| "errors" | |||
| "fmt" | |||
| @@ -16,6 +15,8 @@ import ( | |||
| "time" | |||
| "unicode/utf8" | |||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||
| "code.gitea.io/gitea/modules/dataset" | |||
| "code.gitea.io/gitea/services/cloudbrain/cloudbrainTask" | |||
| @@ -398,7 +399,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
| } | |||
| err = cloudbrain.GenerateTask(req) | |||
| _, err = cloudbrain.GenerateTask(req) | |||
| if err != nil { | |||
| cloudBrainNewDataPrepare(ctx, jobType) | |||
| ctx.RenderWithErr(err.Error(), tpl, &form) | |||
| @@ -584,7 +585,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra | |||
| Spec: spec, | |||
| } | |||
| err = cloudbrain.GenerateTask(req) | |||
| _, err = cloudbrain.GenerateTask(req) | |||
| if err != nil { | |||
| cloudBrainNewDataPrepare(ctx, jobType) | |||
| ctx.RenderWithErr(err.Error(), tpl, &form) | |||
| @@ -1845,59 +1846,37 @@ func SyncCloudbrainStatus() { | |||
| continue | |||
| } | |||
| if task.Type == models.TypeCloudBrainOne { | |||
| result, err := cloudbrain.GetJob(task.JobID) | |||
| task, err = cloudbrainTask.SyncCloudBrainOneStatus(task) | |||
| if err != nil { | |||
| log.Error("GetJob(%s) failed:%v", task.JobName, err) | |||
| log.Error("Sync cloud brain one (%s) failed:%v", task.JobName, err) | |||
| continue | |||
| } | |||
| if result != nil { | |||
| jobRes, _ := models.ConvertToJobResultPayload(result.Payload) | |||
| taskRoles := jobRes.TaskRoles | |||
| taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) | |||
| oldStatus := task.Status | |||
| task.Status = taskRes.TaskStatuses[0].State | |||
| if task.Status != string(models.JobWaiting) { | |||
| models.ParseAndSetDurationFromCloudBrainOne(jobRes, task) | |||
| if task.Status != string(models.JobWaiting) { | |||
| if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) { | |||
| log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName) | |||
| err = cloudbrain.StopJob(task.JobID) | |||
| if err != nil { | |||
| log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err) | |||
| continue | |||
| } | |||
| oldStatus := task.Status | |||
| task.Status = string(models.JobStopped) | |||
| if task.EndTime == 0 { | |||
| task.EndTime = timeutil.TimeStampNow() | |||
| } | |||
| task.ComputeAndSetDuration() | |||
| if oldStatus != task.Status { | |||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||
| } | |||
| err = models.UpdateJob(task) | |||
| if err != nil { | |||
| log.Error("UpdateJob(%s) failed:%v", task.JobName, err) | |||
| } | |||
| var maxDuration int64 | |||
| if task.JobType == string(models.JobTypeBenchmark) { | |||
| maxDuration = setting.BenchmarkMaxDuration | |||
| } else if task.JobType == string(models.JobTypeSnn4imagenet) || task.JobType == string(models.JobTypeBrainScore) { | |||
| maxDuration = setting.ModelBenchmarkMaxDuration | |||
| } else { | |||
| maxDuration = setting.MaxDuration | |||
| } | |||
| if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) { | |||
| log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName) | |||
| err = cloudbrain.StopJob(task.JobID) | |||
| if err != nil { | |||
| log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err) | |||
| continue | |||
| } | |||
| task.Status = string(models.JobStopped) | |||
| if task.EndTime == 0 { | |||
| task.EndTime = timeutil.TimeStampNow() | |||
| } | |||
| task.ComputeAndSetDuration() | |||
| if oldStatus != task.Status { | |||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||
| } | |||
| err = models.UpdateJob(task) | |||
| if err != nil { | |||
| log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) | |||
| continue | |||
| } | |||
| log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) | |||
| continue | |||
| } | |||
| } | |||
| } | |||
| } else if task.Type == models.TypeCloudBrainTwo { | |||
| if task.JobType == string(models.JobTypeDebug) { | |||
| @@ -2509,7 +2488,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo | |||
| Spec: spec, | |||
| } | |||
| err = cloudbrain.GenerateTask(req) | |||
| _, err = cloudbrain.GenerateTask(req) | |||
| if err != nil { | |||
| cloudBrainNewDataPrepare(ctx, jobType) | |||
| ctx.RenderWithErr(err.Error(), tplCloudBrainBenchmarkNew, &form) | |||
| @@ -2663,7 +2642,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) | |||
| Spec: spec, | |||
| } | |||
| err = cloudbrain.GenerateTask(req) | |||
| _, err = cloudbrain.GenerateTask(req) | |||
| if err != nil { | |||
| cloudBrainNewDataPrepare(ctx, jobType) | |||
| ctx.RenderWithErr(err.Error(), tpl, &form) | |||
| @@ -47,8 +47,8 @@ func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment | |||
| permission := false | |||
| if !permission && ctx.User != nil { | |||
| isCollaborator, _ := repo.IsCollaborator(ctx.User.ID) | |||
| isInRepoTeam,_:=repo.IsInRepoTeam(ctx.User.ID) | |||
| if isCollaborator ||isInRepoTeam { | |||
| isInRepoTeam, _ := repo.IsInRepoTeam(ctx.User.ID) | |||
| if isCollaborator || isInRepoTeam { | |||
| log.Info("Collaborator user may visit the attach.") | |||
| permission = true | |||
| } | |||
| @@ -349,96 +349,6 @@ func DatasetAction(ctx *context.Context) { | |||
| } | |||
| func CurrentRepoDataset(ctx *context.Context) { | |||
| page := ctx.QueryInt("page") | |||
| cloudbrainType := ctx.QueryInt("type") | |||
| keyword := strings.Trim(ctx.Query("q"), " ") | |||
| repo := ctx.Repo.Repository | |||
| var datasetIDs []int64 | |||
| dataset, err := models.GetDatasetByRepo(repo) | |||
| if err != nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err))) | |||
| return | |||
| } | |||
| datasetIDs = append(datasetIDs, dataset.ID) | |||
| datasets, count, err := models.Attachments(&models.AttachmentsOptions{ | |||
| ListOptions: models.ListOptions{ | |||
| Page: page, | |||
| PageSize: setting.UI.DatasetPagingNum, | |||
| }, | |||
| Keyword: keyword, | |||
| NeedDatasetIDs: true, | |||
| DatasetIDs: datasetIDs, | |||
| Type: cloudbrainType, | |||
| NeedIsPrivate: false, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| return | |||
| } | |||
| data, err := json.Marshal(datasets) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "data": string(data), | |||
| "count": strconv.FormatInt(count, 10), | |||
| }) | |||
| } | |||
| func MyDatasets(ctx *context.Context) { | |||
| page := ctx.QueryInt("page") | |||
| cloudbrainType := ctx.QueryInt("type") | |||
| keyword := strings.Trim(ctx.Query("q"), " ") | |||
| uploaderID := ctx.User.ID | |||
| datasets, count, err := models.Attachments(&models.AttachmentsOptions{ | |||
| ListOptions: models.ListOptions{ | |||
| Page: page, | |||
| PageSize: setting.UI.DatasetPagingNum, | |||
| }, | |||
| Keyword: keyword, | |||
| NeedDatasetIDs: false, | |||
| UploaderID: uploaderID, | |||
| Type: cloudbrainType, | |||
| NeedIsPrivate: false, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| return | |||
| } | |||
| data, err := json.Marshal(datasets) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "data": string(data), | |||
| "count": strconv.FormatInt(count, 10), | |||
| }) | |||
| } | |||
| func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) { | |||
| page := ctx.QueryInt("page") | |||
| keyword := strings.Trim(ctx.Query("q"), " ") | |||
| @@ -593,180 +503,6 @@ func ReferenceDatasetData(ctx *context.Context) { | |||
| } | |||
| func PublicDataset(ctx *context.Context) { | |||
| page := ctx.QueryInt("page") | |||
| cloudbrainType := ctx.QueryInt("type") | |||
| keyword := strings.Trim(ctx.Query("q"), " ") | |||
| datasets, count, err := models.Attachments(&models.AttachmentsOptions{ | |||
| ListOptions: models.ListOptions{ | |||
| Page: page, | |||
| PageSize: setting.UI.DatasetPagingNum, | |||
| }, | |||
| Keyword: keyword, | |||
| NeedDatasetIDs: false, | |||
| NeedIsPrivate: true, | |||
| IsPrivate: false, | |||
| Type: cloudbrainType, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| return | |||
| } | |||
| data, err := json.Marshal(datasets) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "data": string(data), | |||
| "count": strconv.FormatInt(count, 10), | |||
| }) | |||
| } | |||
| func MyFavoriteDataset(ctx *context.Context) { | |||
| UserId := ctx.User.ID | |||
| cloudbrainType := ctx.QueryInt("type") | |||
| keyword := strings.Trim(ctx.Query("q"), " ") | |||
| var NotColDatasetIDs []int64 | |||
| var IsColDatasetIDs []int64 | |||
| datasetStars, err := models.GetDatasetStarByUser(ctx.User) | |||
| if err != nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err))) | |||
| log.Error("GetDatasetStarByUser failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| //If the dataset has been deleted, it will not be counted | |||
| for _, datasetStar := range datasetStars { | |||
| IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar) | |||
| if err != nil { | |||
| log.Error("IsDatasetStarExist error:", err.Error()) | |||
| } | |||
| if IsExist { | |||
| DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset) | |||
| if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator { | |||
| IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID) | |||
| } else { | |||
| NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID) | |||
| } | |||
| } | |||
| } | |||
| NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{ | |||
| Keyword: keyword, | |||
| NeedDatasetIDs: true, | |||
| DatasetIDs: NotColDatasetIDs, | |||
| NeedIsPrivate: true, | |||
| IsPrivate: false, | |||
| Type: cloudbrainType, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| UserId: UserId, | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| return | |||
| } | |||
| //If is collaborator, there is no need to determine whether the dataset is private or public | |||
| IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{ | |||
| Keyword: keyword, | |||
| NeedDatasetIDs: true, | |||
| DatasetIDs: IsColDatasetIDs, | |||
| NeedIsPrivate: false, | |||
| Type: cloudbrainType, | |||
| JustNeedZipFile: true, | |||
| NeedRepoInfo: true, | |||
| RecommendOnly: ctx.QueryBool("recommend"), | |||
| UserId: UserId, | |||
| }) | |||
| if err != nil { | |||
| ctx.ServerError("datasets", err) | |||
| return | |||
| } | |||
| for _, NotColDataset := range NotColDatasets { | |||
| IsColDatasets = append(IsColDatasets, NotColDataset) | |||
| } | |||
| datasets := IsColDatasets | |||
| count := NotColcount + IsColcount | |||
| sort.Slice(datasets, func(i, j int) bool { | |||
| return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix | |||
| }) | |||
| page := ctx.QueryInt("page") | |||
| if page <= 0 { | |||
| page = 1 | |||
| } | |||
| pagesize := ctx.QueryInt("pagesize") | |||
| if pagesize <= 0 { | |||
| pagesize = 5 | |||
| } | |||
| pageDatasetsInfo := getPageDatasets(datasets, page, pagesize) | |||
| if pageDatasetsInfo == nil { | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "data": "[]", | |||
| "count": strconv.FormatInt(count, 10), | |||
| }) | |||
| return | |||
| } | |||
| data, err := json.Marshal(pageDatasetsInfo) | |||
| log.Info("data:", data) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "data": string(data), | |||
| "count": strconv.FormatInt(count, 10), | |||
| }) | |||
| } | |||
| func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo { | |||
| begin := (page - 1) * pagesize | |||
| end := (page) * pagesize | |||
| if begin > len(AttachmentInfos)-1 { | |||
| return nil | |||
| } | |||
| if end > len(AttachmentInfos)-1 { | |||
| return AttachmentInfos[begin:] | |||
| } else { | |||
| return AttachmentInfos[begin:end] | |||
| } | |||
| } | |||
| func getTotalPage(total int64, pageSize int) int { | |||
| another := 0 | |||
| if int(total)%pageSize != 0 { | |||
| another = 1 | |||
| } | |||
| return int(total)/pageSize + another | |||
| } | |||
| func GetDatasetStatus(ctx *context.Context) { | |||
| var ( | |||
| @@ -791,55 +527,3 @@ func GetDatasetStatus(ctx *context.Context) { | |||
| "AttachmentStatus": fmt.Sprint(attachment.DecompressState), | |||
| }) | |||
| } | |||
| func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool { | |||
| repo, err := models.GetRepositoryByID(dataset.RepoID) | |||
| if err != nil { | |||
| log.Error("query repo error:", err.Error()) | |||
| } else { | |||
| repo.GetOwner() | |||
| if ctx.User != nil { | |||
| if repo.Owner.IsOrganization() { | |||
| org := repo.Owner | |||
| org.Teams, err = org.GetUserTeams(ctx.User.ID) | |||
| if err != nil { | |||
| log.Error("GetUserTeams error:", err.Error()) | |||
| return false | |||
| } | |||
| if org.IsUserPartOfOrg(ctx.User.ID) { | |||
| for _, t := range org.Teams { | |||
| if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) { | |||
| return true | |||
| } | |||
| } | |||
| isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID) | |||
| if isOwner { | |||
| return isOwner | |||
| } | |||
| return false | |||
| } | |||
| } | |||
| isCollaborator, _ := repo.IsCollaborator(ctx.User.ID) | |||
| if isCollaborator { | |||
| return true | |||
| } | |||
| } | |||
| } | |||
| return false | |||
| } | |||
| func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) { | |||
| dataset, err := models.GetDatasetByID(datasetStar.DatasetID) | |||
| if err != nil { | |||
| log.Error("query dataset error:", err.Error()) | |||
| return false, nil, nil, err | |||
| } else { | |||
| repo, err := models.GetRepositoryByID(dataset.RepoID) | |||
| if err != nil { | |||
| log.Error("GetRepositoryByID error:", err.Error()) | |||
| return false, nil, nil, err | |||
| } | |||
| return true, repo, dataset, nil | |||
| } | |||
| } | |||
| @@ -1,7 +1,6 @@ | |||
| package repo | |||
| import ( | |||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||
| "encoding/json" | |||
| "errors" | |||
| "fmt" | |||
| @@ -13,6 +12,9 @@ import ( | |||
| "strings" | |||
| "time" | |||
| "code.gitea.io/gitea/modules/urfs_client/urchin" | |||
| "code.gitea.io/gitea/routers/response" | |||
| "code.gitea.io/gitea/services/cloudbrain/cloudbrainTask" | |||
| "code.gitea.io/gitea/modules/dataset" | |||
| @@ -474,7 +476,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
| } | |||
| err = grampus.GenerateTrainJob(ctx, req) | |||
| _, err = grampus.GenerateTrainJob(ctx, req) | |||
| if err != nil { | |||
| log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"]) | |||
| grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
| @@ -509,28 +511,6 @@ func GrampusTrainJobVersionCreate(ctx *context.Context, form auth.CreateGrampusT | |||
| } | |||
| func checkSpecialPool(ctx *context.Context, resourceType string) string { | |||
| grampus.InitSpecialPool() | |||
| if grampus.SpecialPools != nil { | |||
| for _, pool := range grampus.SpecialPools.Pools { | |||
| if pool.IsExclusive && pool.Type == resourceType { | |||
| org, _ := models.GetOrgByName(pool.Org) | |||
| if org != nil { | |||
| isOrgMember, _ := models.IsOrganizationMember(org.ID, ctx.User.ID) | |||
| if !isOrgMember { | |||
| return ctx.Tr("repo.grampus.no_operate_right") | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| return "" | |||
| } | |||
| func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) { | |||
| ctx.Data["IsCreate"] = true | |||
| grampusTrainJobNpuCreate(ctx, form) | |||
| @@ -733,7 +713,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
| req.PreTrainModelPath = preTrainModelPath | |||
| } | |||
| err = grampus.GenerateTrainJob(ctx, req) | |||
| _, err = grampus.GenerateTrainJob(ctx, req) | |||
| if err != nil { | |||
| log.Error("GenerateTrainJob failed:%v", err.Error()) | |||
| grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | |||
| @@ -885,10 +865,10 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||
| } | |||
| } | |||
| } | |||
| err = models.UpdateJob(task) | |||
| if err != nil { | |||
| log.Error("UpdateJob failed:" + err.Error()) | |||
| } | |||
| } | |||
| err = models.UpdateJob(task) | |||
| if err != nil { | |||
| log.Error("UpdateJob failed:" + err.Error()) | |||
| } | |||
| } | |||
| } | |||
| @@ -960,15 +940,14 @@ func GrampusGetLog(ctx *context.Context) { | |||
| content, err := grampus.GetTrainJobLog(job.JobID) | |||
| if err != nil { | |||
| log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"]) | |||
| ctx.ServerError(err.Error(), err) | |||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||
| "JobName": job.JobName, | |||
| "Content": "", | |||
| "CanLogDownload": false, | |||
| }) | |||
| return | |||
| } | |||
| var canLogDownload bool | |||
| if err != nil { | |||
| canLogDownload = false | |||
| } else { | |||
| canLogDownload = true | |||
| } | |||
| canLogDownload := err == nil && job.IsUserHasRight(ctx.User) | |||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||
| "JobName": job.JobName, | |||
| "Content": content, | |||
| @@ -978,6 +957,28 @@ func GrampusGetLog(ctx *context.Context) { | |||
| return | |||
| } | |||
| func GrampusMetrics(ctx *context.Context) { | |||
| jobID := ctx.Params(":jobid") | |||
| job, err := models.GetCloudbrainByJobID(jobID) | |||
| if err != nil { | |||
| log.Error("GetCloudbrainByJobID failed: %v", err, ctx.Data["MsgID"]) | |||
| ctx.ServerError(err.Error(), err) | |||
| return | |||
| } | |||
| result, err := grampus.GetGrampusMetrics(job.JobID) | |||
| if err != nil { | |||
| log.Error("GetTrainJobLog failed: %v", err, ctx.Data["MsgID"]) | |||
| } | |||
| ctx.JSON(http.StatusOK, map[string]interface{}{ | |||
| "JobID": jobID, | |||
| "Interval": result.Interval, | |||
| "MetricsInfo": result.MetricsInfo, | |||
| }) | |||
| return | |||
| } | |||
| func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) { | |||
| var command string | |||
| @@ -1003,7 +1004,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||
| if processorType == grampus.ProcessorTypeNPU { | |||
| //no need to process | |||
| } else if processorType == grampus.ProcessorTypeGPU { | |||
| unZipDatasetCommand := generateDatasetUnzipCommand(datasetName) | |||
| unZipDatasetCommand := cloudbrainTask.GenerateDatasetUnzipCommand(datasetName) | |||
| commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand | |||
| command += commandUnzip | |||
| } | |||
| @@ -1077,31 +1078,6 @@ func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileNa | |||
| return commandDownloadTemp | |||
| } | |||
| func generateDatasetUnzipCommand(datasetName string) string { | |||
| var unZipDatasetCommand string | |||
| datasetNameArray := strings.Split(datasetName, ";") | |||
| if len(datasetNameArray) == 1 { //单数据集 | |||
| unZipDatasetCommand = "unzip -q '" + datasetName + "';" | |||
| if strings.HasSuffix(datasetNameArray[0], ".tar.gz") { | |||
| unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';" | |||
| } | |||
| unZipDatasetCommand += "rm -f '" + datasetName + "';" | |||
| } else { //多数据集 | |||
| for _, datasetNameTemp := range datasetNameArray { | |||
| if strings.HasSuffix(datasetNameTemp, ".tar.gz") { | |||
| unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';" | |||
| } else { | |||
| unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';" | |||
| } | |||
| unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';" | |||
| } | |||
| } | |||
| return unZipDatasetCommand | |||
| } | |||
| func downloadZipCode(ctx *context.Context, codePath, branchName string) error { | |||
| archiveType := git.ZIP | |||
| archivePath := codePath | |||
| @@ -1149,3 +1125,38 @@ func downloadZipCode(ctx *context.Context, codePath, branchName string) error { | |||
| return nil | |||
| } | |||
| func HandleTaskWithAiCenter(ctx *context.Context) { | |||
| log.Info("HandleTaskWithAiCenter start") | |||
| updateCounts := 0 | |||
| cloudBrains, err := models.GetC2NetWithAiCenterWrongJob() | |||
| if err != nil { | |||
| log.Error("GetC2NetWithAiCenterWrongJob failed:" + err.Error()) | |||
| return | |||
| } | |||
| if len(cloudBrains) == 0 { | |||
| log.Info("HandleC2NetWithAiCenterWrongJob:no task need handle") | |||
| return | |||
| } | |||
| cloudBrainCounts := len(cloudBrains) | |||
| for _, task := range cloudBrains { | |||
| result, err := grampus.GetJob(task.JobID) | |||
| if err != nil { | |||
| log.Error("GetJob failed:" + err.Error()) | |||
| continue | |||
| } | |||
| if result != nil { | |||
| if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 { | |||
| task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0] | |||
| } | |||
| err = models.UpdateJob(task) | |||
| if err != nil { | |||
| log.Error("UpdateJob failed:" + err.Error()) | |||
| } | |||
| updateCounts++ | |||
| } | |||
| } | |||
| r := make(map[string]interface{}, 0) | |||
| r["cloudBrainCounts"] = cloudBrainCounts | |||
| r["updateCounts"] = updateCounts | |||
| ctx.JSON(http.StatusOK, response.SuccessWithData(r)) | |||
| } | |||
| @@ -1230,7 +1230,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||
| return | |||
| } | |||
| err = modelarts.GenerateTrainJob(ctx, req) | |||
| _, err = modelarts.GenerateTrainJob(ctx, req) | |||
| if err != nil { | |||
| log.Error("GenerateTrainJob failed:%v", err.Error()) | |||
| trainJobNewDataPrepare(ctx) | |||
| @@ -2205,7 +2205,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||
| req.UserCommand = userCommand | |||
| req.UserImageUrl = userImageUrl | |||
| err = modelarts.GenerateInferenceJob(ctx, req) | |||
| _, err = modelarts.GenerateInferenceJob(ctx, req) | |||
| if err != nil { | |||
| log.Error("GenerateTrainJob failed:%v", err.Error()) | |||
| inferenceJobErrorNewDataPrepare(ctx, form) | |||
| @@ -0,0 +1,30 @@ | |||
| package response | |||
| type AiforgeOuterResponse struct { | |||
| Code int `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Data interface{} `json:"data"` | |||
| } | |||
| func OuterSuccess() *AiforgeOuterResponse { | |||
| return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS} | |||
| } | |||
| func OuterError(code int, msg string) *AiforgeOuterResponse { | |||
| return &AiforgeOuterResponse{Code: code, Msg: msg} | |||
| } | |||
| func OuterServerError(msg string) *AiforgeOuterResponse { | |||
| return &AiforgeOuterResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg} | |||
| } | |||
| func OuterBizError(err *BizError) *AiforgeOuterResponse { | |||
| return &AiforgeOuterResponse{Code: err.Code, Msg: err.Err} | |||
| } | |||
| func OuterSuccessWithData(data interface{}) *AiforgeOuterResponse { | |||
| return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data} | |||
| } | |||
| func OuterErrorWithData(code int, msg string, data interface{}) *AiforgeOuterResponse { | |||
| return &AiforgeOuterResponse{Code: code, Msg: msg, Data: data} | |||
| } | |||
| @@ -24,10 +24,14 @@ func ServerError(msg string) *AiforgeResponse { | |||
| return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg} | |||
| } | |||
| func ResponseError(err *BizError) *AiforgeResponse { | |||
| func ResponseBizError(err *BizError) *AiforgeResponse { | |||
| return &AiforgeResponse{Code: err.Code, Msg: err.Err} | |||
| } | |||
| func ResponseError(err error) *AiforgeResponse { | |||
| return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: err.Error()} | |||
| } | |||
| func SuccessWithData(data interface{}) *AiforgeResponse { | |||
| return &AiforgeResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data} | |||
| } | |||
| @@ -1,6 +1,7 @@ | |||
| package response | |||
| //repo response | |||
| var PARAM_ERROR = &BizError{Code: 9001, Err: "param error"} | |||
| var RESOURCE_QUEUE_NOT_AVAILABLE = &BizError{Code: 1001, Err: "resource queue not available"} | |||
| var SPECIFICATION_NOT_EXIST = &BizError{Code: 1002, Err: "specification not exist"} | |||
| var SPECIFICATION_NOT_AVAILABLE = &BizError{Code: 1003, Err: "specification not available"} | |||
| @@ -11,4 +12,3 @@ var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of | |||
| //common response | |||
| var SYSTEM_ERROR = &BizError{Code: 9009, Err: "System error.Please try again later"} | |||
| var INSUFFICIENT_PERMISSION = &BizError{Code: 9003, Err: "insufficient permissions"} | |||
| var PARAM_ERROR = &BizError{Code: 9001, Err: "param error permissions"} | |||
| @@ -645,6 +645,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Group("/specification", func() { | |||
| m.Get("", admin.GetSpecificationPage) | |||
| m.Get("/list", admin.GetResourceSpecificationList) | |||
| m.Get("/list/all", admin.GetAllResourceSpecificationList) | |||
| m.Get("/scenes/:id", admin.GetResourceSpecificationScenes) | |||
| m.Post("/grampus/sync", admin.SyncGrampusSpecs) | |||
| m.Post("/add", binding.Bind(models.ResourceSpecificationReq{}), admin.AddResourceSpecification) | |||
| @@ -1127,10 +1128,6 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset) | |||
| m.Post("/reference_datasets", reqRepoDatasetWriterJson, bindIgnErr(auth.ReferenceDatasetForm{}), repo.ReferenceDatasetPost) | |||
| m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost) | |||
| m.Get("/current_repo", repo.CurrentRepoDataset) | |||
| m.Get("/my_datasets", repo.MyDatasets) | |||
| m.Get("/public_datasets", repo.PublicDataset) | |||
| m.Get("/my_favorite", repo.MyFavoriteDataset) | |||
| m.Get("/current_repo_m", repo.CurrentRepoDatasetMultiple) | |||
| m.Get("/my_datasets_m", repo.MyDatasetsMultiple) | |||
| @@ -1497,6 +1494,12 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("/record/list", point.GetPointRecordList) | |||
| }, reqSignIn) | |||
| m.Group("/resources", func() { | |||
| m.Group("/queue", func() { | |||
| m.Get("/centers", admin.GetResourceAiCenters) | |||
| }) | |||
| }) | |||
| if setting.API.EnableSwagger { | |||
| m.Get("/swagger.v1.json", templates.JSONRenderer(), routers.SwaggerV1Json) | |||
| } | |||
| @@ -132,11 +132,11 @@ func getNotifications(c *context.Context) { | |||
| } | |||
| c.Data["Title"] = c.Tr("notifications") | |||
| //c.Data["Keyword"] = keyword | |||
| c.Data["Type"] = keyword | |||
| c.Data["Status"] = status | |||
| c.Data["Notifications"] = notifications | |||
| pager.SetDefaultParams(c) | |||
| pager.AddParam(c, "q", "Type") | |||
| c.Data["Page"] = pager | |||
| } | |||
| @@ -14,28 +14,28 @@ type StatusInfo struct { | |||
| ComputeResource string | |||
| } | |||
| var cloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)} | |||
| var cloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)} | |||
| var grampusTwoNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning} | |||
| var CloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)} | |||
| var CloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)} | |||
| var GrampusNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning} | |||
| var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainOne): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainOne}, | |||
| JobType: []models.JobType{models.JobTypeDebug}, | |||
| NotFinalStatuses: cloudbrainOneNotFinalStatuses, | |||
| NotFinalStatuses: CloudbrainOneNotFinalStatuses, | |||
| ComputeResource: models.GPUResource, | |||
| }, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainOne): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainOne}, | |||
| JobType: []models.JobType{models.JobTypeTrain}, | |||
| NotFinalStatuses: cloudbrainOneNotFinalStatuses, | |||
| NotFinalStatuses: CloudbrainOneNotFinalStatuses, | |||
| ComputeResource: models.GPUResource, | |||
| }, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainOne): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainOne}, | |||
| JobType: []models.JobType{models.JobTypeInference}, | |||
| NotFinalStatuses: cloudbrainOneNotFinalStatuses, | |||
| NotFinalStatuses: CloudbrainOneNotFinalStatuses, | |||
| ComputeResource: models.GPUResource, | |||
| }, string(models.JobTypeBenchmark) + "-" + strconv.Itoa(models.TypeCloudBrainOne): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainOne}, | |||
| JobType: []models.JobType{models.JobTypeBenchmark, models.JobTypeBrainScore, models.JobTypeSnn4imagenet}, | |||
| NotFinalStatuses: cloudbrainOneNotFinalStatuses, | |||
| NotFinalStatuses: CloudbrainOneNotFinalStatuses, | |||
| ComputeResource: models.GPUResource, | |||
| }, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainTwo, models.TypeCDCenter}, | |||
| @@ -45,22 +45,22 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s | |||
| }, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainTwo}, | |||
| JobType: []models.JobType{models.JobTypeTrain}, | |||
| NotFinalStatuses: cloudbrainTwoNotFinalStatuses, | |||
| NotFinalStatuses: CloudbrainTwoNotFinalStatuses, | |||
| ComputeResource: models.NPUResource, | |||
| }, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): { | |||
| CloudBrainTypes: []int{models.TypeCloudBrainTwo}, | |||
| JobType: []models.JobType{models.JobTypeInference}, | |||
| NotFinalStatuses: cloudbrainTwoNotFinalStatuses, | |||
| NotFinalStatuses: CloudbrainTwoNotFinalStatuses, | |||
| ComputeResource: models.NPUResource, | |||
| }, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.GPUResource: { | |||
| CloudBrainTypes: []int{models.TypeC2Net}, | |||
| JobType: []models.JobType{models.JobTypeTrain}, | |||
| NotFinalStatuses: grampusTwoNotFinalStatuses, | |||
| NotFinalStatuses: GrampusNotFinalStatuses, | |||
| ComputeResource: models.GPUResource, | |||
| }, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.NPUResource: { | |||
| CloudBrainTypes: []int{models.TypeC2Net}, | |||
| JobType: []models.JobType{models.JobTypeTrain}, | |||
| NotFinalStatuses: grampusTwoNotFinalStatuses, | |||
| NotFinalStatuses: GrampusNotFinalStatuses, | |||
| ComputeResource: models.NPUResource, | |||
| }} | |||
| @@ -71,7 +71,7 @@ func GetNotFinalStatusTaskCount(uid int64, cloudbrainType int, jobType string, c | |||
| } | |||
| key := jobNewType + "-" + strconv.Itoa(cloudbrainType) | |||
| if len(computeResource) > 0 { | |||
| if len(computeResource) > 0 && cloudbrainType == models.TypeC2Net { | |||
| key = key + "-" + computeResource[0] | |||
| } | |||
| @@ -0,0 +1,631 @@ | |||
| package cloudbrainTask | |||
| import ( | |||
| "bufio" | |||
| "encoding/json" | |||
| "errors" | |||
| "fmt" | |||
| "io" | |||
| "io/ioutil" | |||
| "net/http" | |||
| "os" | |||
| "path" | |||
| "strconv" | |||
| "strings" | |||
| "unicode/utf8" | |||
| "code.gitea.io/gitea/modules/modelarts" | |||
| "code.gitea.io/gitea/modules/git" | |||
| api "code.gitea.io/gitea/modules/structs" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/cloudbrain" | |||
| "code.gitea.io/gitea/modules/context" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/redis/redis_key" | |||
| "code.gitea.io/gitea/modules/redis/redis_lock" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| "code.gitea.io/gitea/modules/storage" | |||
| "code.gitea.io/gitea/modules/util" | |||
| "code.gitea.io/gitea/services/cloudbrain/resource" | |||
| "code.gitea.io/gitea/services/reward/point/account" | |||
| ) | |||
| const CLONE_FILE_PREFIX = "file:///" | |||
| func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) { | |||
| displayJobName := option.DisplayJobName | |||
| jobName := util.ConvertDisplayJobNameToJobName(displayJobName) | |||
| image := strings.TrimSpace(option.Image) | |||
| uuid := option.Attachment | |||
| jobType := string(models.JobTypeInference) | |||
| codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath | |||
| branchName := option.BranchName | |||
| bootFile := strings.TrimSpace(option.BootFile) | |||
| labelName := option.LabelName | |||
| repo := ctx.Repo.Repository | |||
| lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName)) | |||
| defer lock.UnLock() | |||
| isOk, err := lock.Lock(models.CloudbrainKeyDuration) | |||
| if !isOk { | |||
| log.Error("lock processed failed:%v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) | |||
| return | |||
| } | |||
| ckptUrl := setting.Attachment.Minio.RealPath + option.PreTrainModelUrl + option.CkptName | |||
| log.Info("ckpt url:" + ckptUrl) | |||
| command, err := getInferenceJobCommand(option) | |||
| if err != nil { | |||
| log.Error("getTrainJobCommand failed: %v", err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
| return | |||
| } | |||
| tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName) | |||
| if err == nil { | |||
| if len(tasks) != 0 { | |||
| log.Error("the job name did already exist", ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist")) | |||
| return | |||
| } | |||
| } else { | |||
| if !models.IsErrJobNotExist(err) { | |||
| log.Error("system error, %v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error")) | |||
| return | |||
| } | |||
| } | |||
| if !jobNamePattern.MatchString(displayJobName) { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_jobname_err"))) | |||
| return | |||
| } | |||
| bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName) | |||
| if err != nil || !bootFileExist { | |||
| log.Error("Get bootfile error:", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err"))) | |||
| return | |||
| } | |||
| count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, jobType) | |||
| if err != nil { | |||
| log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error")) | |||
| return | |||
| } else { | |||
| if count >= 1 { | |||
| log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain.morethanonejob"))) | |||
| return | |||
| } | |||
| } | |||
| if branchName == "" { | |||
| branchName = cloudbrain.DefaultBranchName | |||
| } | |||
| errStr := loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ResultPath) | |||
| if errStr != "" { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr))) | |||
| return | |||
| } | |||
| commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName) | |||
| datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid) | |||
| if err != nil { | |||
| log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.error.dataset_select"))) | |||
| return | |||
| } | |||
| spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{ | |||
| JobType: models.JobTypeInference, | |||
| ComputeResource: models.GPU, | |||
| Cluster: models.OpenICluster, | |||
| AiCenterCode: models.AICenterOfCloudBrainOne}) | |||
| if err != nil || spec == nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification is not available")) | |||
| return | |||
| } | |||
| if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) { | |||
| log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance"))) | |||
| return | |||
| } | |||
| req := cloudbrain.GenerateCloudBrainTaskReq{ | |||
| Ctx: ctx, | |||
| DisplayJobName: displayJobName, | |||
| JobName: jobName, | |||
| Image: image, | |||
| Command: command, | |||
| Uuids: uuid, | |||
| DatasetNames: datasetNames, | |||
| DatasetInfos: datasetInfos, | |||
| CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), | |||
| ModelPath: setting.Attachment.Minio.RealPath + option.PreTrainModelUrl, | |||
| BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), | |||
| Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), | |||
| BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), | |||
| JobType: jobType, | |||
| Description: option.Description, | |||
| BranchName: branchName, | |||
| BootFile: option.BootFile, | |||
| Params: option.Params, | |||
| CommitID: commitID, | |||
| ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"), | |||
| ModelName: option.ModelName, | |||
| ModelVersion: option.ModelVersion, | |||
| CkptName: option.CkptName, | |||
| TrainUrl: option.PreTrainModelUrl, | |||
| LabelName: labelName, | |||
| Spec: spec, | |||
| } | |||
| jobId, err := cloudbrain.GenerateTask(req) | |||
| if err != nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
| return | |||
| } | |||
| ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId}) | |||
| } | |||
| func ModelArtsInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) { | |||
| ctx.Data["PageIsTrainJob"] = true | |||
| VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount) | |||
| displayJobName := option.DisplayJobName | |||
| jobName := util.ConvertDisplayJobNameToJobName(displayJobName) | |||
| uuid := option.Attachment | |||
| description := option.Description | |||
| workServerNumber := option.WorkServerNumber | |||
| engineID, _ := strconv.Atoi(option.ImageID) | |||
| bootFile := strings.TrimSpace(option.BootFile) | |||
| params := option.Params | |||
| repo := ctx.Repo.Repository | |||
| codeLocalPath := setting.JobPath + jobName + modelarts.CodePath | |||
| codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath | |||
| resultObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.ResultPath + VersionOutputPath + "/" | |||
| logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/" | |||
| //dataPath := "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/" | |||
| branchName := option.BranchName | |||
| EngineName := option.Image | |||
| LabelName := option.LabelName | |||
| isLatestVersion := modelarts.IsLatestVersion | |||
| VersionCount := modelarts.VersionCountOne | |||
| trainUrl := option.PreTrainModelUrl | |||
| modelName := option.ModelName | |||
| modelVersion := option.ModelVersion | |||
| ckptName := option.CkptName | |||
| ckptUrl := "/" + option.PreTrainModelUrl + option.CkptName | |||
| errStr := checkInferenceJobMultiNode(ctx.User.ID, option.WorkServerNumber) | |||
| if errStr != "" { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr))) | |||
| return | |||
| } | |||
| lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeInference), displayJobName)) | |||
| isOk, err := lock.Lock(models.CloudbrainKeyDuration) | |||
| if !isOk { | |||
| log.Error("lock processed failed:%v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) | |||
| return | |||
| } | |||
| defer lock.UnLock() | |||
| count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeInference)) | |||
| if err != nil { | |||
| log.Error("GetCloudbrainInferenceJobCountByUserID failed:%v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error")) | |||
| return | |||
| } else { | |||
| if count >= 1 { | |||
| log.Error("the user already has running or waiting inference task", ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("you have already a running or waiting inference task, can not create more")) | |||
| return | |||
| } | |||
| } | |||
| if err := paramCheckCreateInferenceJob(option); err != nil { | |||
| log.Error("paramCheckCreateInferenceJob failed:(%v)", err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
| return | |||
| } | |||
| bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName) | |||
| if err != nil || !bootFileExist { | |||
| log.Error("Get bootfile error:", err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err"))) | |||
| return | |||
| } | |||
| //Determine whether the task name of the task in the project is duplicated | |||
| tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeInference), displayJobName) | |||
| if err == nil { | |||
| if len(tasks) != 0 { | |||
| log.Error("the job name did already exist", ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist")) | |||
| return | |||
| } | |||
| } else { | |||
| if !models.IsErrJobNotExist(err) { | |||
| log.Error("system error, %v", err, ctx.Data["MsgID"]) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error")) | |||
| return | |||
| } | |||
| } | |||
| spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{ | |||
| JobType: models.JobTypeInference, | |||
| ComputeResource: models.NPU, | |||
| Cluster: models.OpenICluster, | |||
| AiCenterCode: models.AICenterOfCloudBrainTwo}) | |||
| if err != nil || spec == nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification not available")) | |||
| return | |||
| } | |||
| if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) { | |||
| log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance"))) | |||
| return | |||
| } | |||
| //todo: del the codeLocalPath | |||
| _, err = ioutil.ReadDir(codeLocalPath) | |||
| if err == nil { | |||
| os.RemoveAll(codeLocalPath) | |||
| } | |||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | |||
| commitID, _ := gitRepo.GetBranchCommitID(branchName) | |||
| if err := downloadCode(repo, codeLocalPath, branchName); err != nil { | |||
| log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) | |||
| return | |||
| } | |||
| //todo: upload code (send to file_server todo this work?) | |||
| if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.ResultPath + VersionOutputPath + "/"); err != nil { | |||
| log.Error("Failed to obsMkdir_result: %s (%v)", repo.FullName(), err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_result")) | |||
| return | |||
| } | |||
| if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil { | |||
| log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_log")) | |||
| return | |||
| } | |||
| if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { | |||
| log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) | |||
| return | |||
| } | |||
| var parameters models.Parameters | |||
| param := make([]models.Parameter, 0) | |||
| param = append(param, models.Parameter{ | |||
| Label: modelarts.ResultUrl, | |||
| Value: "s3:/" + resultObsPath, | |||
| }, models.Parameter{ | |||
| Label: modelarts.CkptUrl, | |||
| Value: "s3:/" + ckptUrl, | |||
| }) | |||
| datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid) | |||
| if err != nil { | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
| return | |||
| } | |||
| dataPath := dataUrl | |||
| jsondatas, err := json.Marshal(datasUrlList) | |||
| if err != nil { | |||
| log.Error("Failed to Marshal: %v", err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("json error:"+err.Error())) | |||
| return | |||
| } | |||
| if isMultiDataset { | |||
| param = append(param, models.Parameter{ | |||
| Label: modelarts.MultiDataUrl, | |||
| Value: string(jsondatas), | |||
| }) | |||
| } | |||
| existDeviceTarget := false | |||
| if len(params) != 0 { | |||
| err := json.Unmarshal([]byte(params), ¶meters) | |||
| if err != nil { | |||
| log.Error("Failed to Unmarshal params: %s (%v)", params, err) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("运行参数错误")) | |||
| return | |||
| } | |||
| for _, parameter := range parameters.Parameter { | |||
| if parameter.Label == modelarts.DeviceTarget { | |||
| existDeviceTarget = true | |||
| } | |||
| if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl { | |||
| param = append(param, models.Parameter{ | |||
| Label: parameter.Label, | |||
| Value: parameter.Value, | |||
| }) | |||
| } | |||
| } | |||
| } | |||
| if !existDeviceTarget { | |||
| param = append(param, models.Parameter{ | |||
| Label: modelarts.DeviceTarget, | |||
| Value: modelarts.Ascend, | |||
| }) | |||
| } | |||
| req := &modelarts.GenerateInferenceJobReq{ | |||
| JobName: jobName, | |||
| DisplayJobName: displayJobName, | |||
| DataUrl: dataPath, | |||
| Description: description, | |||
| CodeObsPath: codeObsPath, | |||
| BootFileUrl: codeObsPath + bootFile, | |||
| BootFile: bootFile, | |||
| TrainUrl: trainUrl, | |||
| WorkServerNumber: workServerNumber, | |||
| EngineID: int64(engineID), | |||
| LogUrl: logObsPath, | |||
| PoolID: getPoolId(), | |||
| Uuid: uuid, | |||
| Parameters: param, //modelarts train parameters | |||
| CommitID: commitID, | |||
| BranchName: branchName, | |||
| Params: option.Params, | |||
| EngineName: EngineName, | |||
| LabelName: LabelName, | |||
| IsLatestVersion: isLatestVersion, | |||
| VersionCount: VersionCount, | |||
| TotalVersionCount: modelarts.TotalVersionCount, | |||
| ModelName: modelName, | |||
| ModelVersion: modelVersion, | |||
| CkptName: ckptName, | |||
| ResultUrl: resultObsPath, | |||
| Spec: spec, | |||
| DatasetName: datasetNames, | |||
| JobType: string(models.JobTypeInference), | |||
| } | |||
| jobId, err := modelarts.GenerateInferenceJob(ctx, req) | |||
| if err != nil { | |||
| log.Error("GenerateTrainJob failed:%v", err.Error()) | |||
| ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
| return | |||
| } | |||
| ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId}) | |||
| } | |||
| func getDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) { | |||
| var isMultiDataset bool | |||
| var dataUrl string | |||
| var datasetNames string | |||
| var datasUrlList []models.Datasurl | |||
| uuids := strings.Split(uuidStr, ";") | |||
| if len(uuids) > setting.MaxDatasetNum { | |||
| log.Error("the dataset count(%d) exceed the limit", len(uuids)) | |||
| return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset count exceed the limit") | |||
| } | |||
| datasetInfos := make(map[string]models.DatasetInfo) | |||
| attachs, err := models.GetAttachmentsByUUIDs(uuids) | |||
| if err != nil || len(attachs) != len(uuids) { | |||
| log.Error("GetAttachmentsByUUIDs failed: %v", err) | |||
| return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed") | |||
| } | |||
| for i, tmpUuid := range uuids { | |||
| var attach *models.Attachment | |||
| for _, tmpAttach := range attachs { | |||
| if tmpAttach.UUID == tmpUuid { | |||
| attach = tmpAttach | |||
| break | |||
| } | |||
| } | |||
| if attach == nil { | |||
| log.Error("GetAttachmentsByUUIDs failed: %v", err) | |||
| return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed") | |||
| } | |||
| fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz") | |||
| for _, datasetInfo := range datasetInfos { | |||
| if fileName == datasetInfo.Name { | |||
| log.Error("the dataset name is same: %v", attach.Name) | |||
| return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset name is same") | |||
| } | |||
| } | |||
| if len(attachs) <= 1 { | |||
| dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/" | |||
| isMultiDataset = false | |||
| } else { | |||
| dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attachs[0].UUID[0:1], attachs[0].UUID[1:2]) + "/" + attachs[0].UUID + attachs[0].UUID + "/" | |||
| datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/" | |||
| datasUrlList = append(datasUrlList, models.Datasurl{ | |||
| DatasetUrl: datasetUrl, | |||
| DatasetName: fileName, | |||
| }) | |||
| isMultiDataset = true | |||
| } | |||
| if i == 0 { | |||
| datasetNames = attach.Name | |||
| } else { | |||
| datasetNames += ";" + attach.Name | |||
| } | |||
| } | |||
| return datasUrlList, dataUrl, datasetNames, isMultiDataset, nil | |||
| } | |||
| func checkInferenceJobMultiNode(userId int64, serverNum int) string { | |||
| if serverNum == 1 { | |||
| return "" | |||
| } | |||
| return "repo.modelarts.no_node_right" | |||
| } | |||
| func paramCheckCreateInferenceJob(option api.CreateTrainJobOption) error { | |||
| if !strings.HasSuffix(strings.TrimSpace(option.BootFile), ".py") { | |||
| log.Error("the boot file(%s) must be a python file", strings.TrimSpace(option.BootFile)) | |||
| return errors.New("启动文件必须是python文件") | |||
| } | |||
| if option.ModelName == "" { | |||
| log.Error("the ModelName(%d) must not be nil", option.ModelName) | |||
| return errors.New("模型名称不能为空") | |||
| } | |||
| if option.ModelVersion == "" { | |||
| log.Error("the ModelVersion(%d) must not be nil", option.ModelVersion) | |||
| return errors.New("模型版本不能为空") | |||
| } | |||
| if option.CkptName == "" { | |||
| log.Error("the CkptName(%d) must not be nil", option.CkptName) | |||
| return errors.New("权重文件不能为空") | |||
| } | |||
| if option.BranchName == "" { | |||
| log.Error("the Branch(%d) must not be nil", option.BranchName) | |||
| return errors.New("分支名不能为空") | |||
| } | |||
| if utf8.RuneCountInString(option.Description) > 255 { | |||
| log.Error("the Description length(%d) must not more than 255", option.Description) | |||
| return errors.New("描述字符不能超过255个字符") | |||
| } | |||
| return nil | |||
| } | |||
| func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string { | |||
| err := downloadCode(repo, codePath, branchName) | |||
| if err != nil { | |||
| return "cloudbrain.load_code_failed" | |||
| } | |||
| err = uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/") | |||
| if err != nil { | |||
| return "cloudbrain.load_code_failed" | |||
| } | |||
| modelPath := setting.JobPath + jobName + resultPath + "/" | |||
| err = mkModelPath(modelPath) | |||
| if err != nil { | |||
| return "cloudbrain.load_code_failed" | |||
| } | |||
| err = uploadCodeToMinio(modelPath, jobName, resultPath+"/") | |||
| if err != nil { | |||
| return "cloudbrain.load_code_failed" | |||
| } | |||
| return "" | |||
| } | |||
| func downloadCode(repo *models.Repository, codePath, branchName string) error { | |||
| //add "file:///" prefix to make the depth valid | |||
| if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil { | |||
| log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) | |||
| return err | |||
| } | |||
| configFile, err := os.OpenFile(codePath+"/.git/config", os.O_RDWR, 0666) | |||
| if err != nil { | |||
| log.Error("open file(%s) failed:%v", codePath+"/,git/config", err) | |||
| return err | |||
| } | |||
| defer configFile.Close() | |||
| pos := int64(0) | |||
| reader := bufio.NewReader(configFile) | |||
| for { | |||
| line, err := reader.ReadString('\n') | |||
| if err != nil { | |||
| if err == io.EOF { | |||
| log.Error("not find the remote-url") | |||
| return nil | |||
| } else { | |||
| log.Error("read error: %v", err) | |||
| return err | |||
| } | |||
| } | |||
| if strings.Contains(line, "url") && strings.Contains(line, ".git") { | |||
| originUrl := "\turl = " + repo.CloneLink().HTTPS + "\n" | |||
| if len(line) > len(originUrl) { | |||
| originUrl += strings.Repeat(" ", len(line)-len(originUrl)) | |||
| } | |||
| bytes := []byte(originUrl) | |||
| _, err := configFile.WriteAt(bytes, pos) | |||
| if err != nil { | |||
| log.Error("WriteAt failed:%v", err) | |||
| return err | |||
| } | |||
| break | |||
| } | |||
| pos += int64(len(line)) | |||
| } | |||
| return nil | |||
| } | |||
| func getInferenceJobCommand(option api.CreateTrainJobOption) (string, error) { | |||
| var command string | |||
| bootFile := strings.TrimSpace(option.BootFile) | |||
| params := option.Params | |||
| if !strings.HasSuffix(bootFile, ".py") { | |||
| log.Error("bootFile(%s) format error", bootFile) | |||
| return command, errors.New("bootFile format error") | |||
| } | |||
| var parameters models.Parameters | |||
| var param string | |||
| if len(params) != 0 { | |||
| err := json.Unmarshal([]byte(params), ¶meters) | |||
| if err != nil { | |||
| log.Error("Failed to Unmarshal params: %s (%v)", params, err) | |||
| return command, err | |||
| } | |||
| for _, parameter := range parameters.Parameter { | |||
| param += " --" + parameter.Label + "=" + parameter.Value | |||
| } | |||
| } | |||
| param += " --modelname" + "=" + option.CkptName | |||
| command += "python /code/" + bootFile + param + " > " + cloudbrain.ResultPath + "/" + option.DisplayJobName + "-" + cloudbrain.LogFile | |||
| return command, nil | |||
| } | |||
| @@ -0,0 +1,83 @@ | |||
| package cloudbrainTask | |||
| import ( | |||
| "net/http" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/cloudbrain" | |||
| "code.gitea.io/gitea/modules/httplib" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/notification" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| ) | |||
| var noteBookOKMap = make(map[int64]int, 20) | |||
| //if a task notebook url can get two times, the notebook can browser. | |||
| const successfulCount = 3 | |||
| func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error) { | |||
| jobResult, err := cloudbrain.GetJob(task.JobID) | |||
| if err != nil { | |||
| log.Error("GetJob failed:", err) | |||
| return task, err | |||
| } | |||
| result, err := models.ConvertToJobResultPayload(jobResult.Payload) | |||
| if err != nil { | |||
| log.Error("ConvertToJobResultPayload failed:", err) | |||
| return task, err | |||
| } | |||
| oldStatus := task.Status | |||
| if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) { | |||
| taskRoles := result.TaskRoles | |||
| taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) | |||
| task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP | |||
| task.ContainerID = taskRes.TaskStatuses[0].ContainerID | |||
| } | |||
| if (result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobRunning)) || | |||
| task.Status == string(models.JobRunning) || (result.JobStatus.State == string(models.JobRunning) && isNoteBookReady(task)) { | |||
| models.ParseAndSetDurationFromCloudBrainOne(result, task) | |||
| task.Status = result.JobStatus.State | |||
| if oldStatus != task.Status { | |||
| notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||
| } | |||
| err = models.UpdateJob(task) | |||
| if err != nil { | |||
| log.Error("UpdateJob failed:", err) | |||
| return task, err | |||
| } | |||
| } | |||
| return task, nil | |||
| } | |||
| func isNoteBookReady(task *models.Cloudbrain) bool { | |||
| if task.JobType != string(models.JobTypeDebug) { | |||
| return true | |||
| } | |||
| noteBookUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName | |||
| r := httplib.Get(noteBookUrl) | |||
| res, err := r.Response() | |||
| if err != nil { | |||
| return false | |||
| } | |||
| if res.StatusCode == http.StatusOK { | |||
| count := noteBookOKMap[task.ID] | |||
| if count < successfulCount-1 { | |||
| noteBookOKMap[task.ID] = count + 1 | |||
| return false | |||
| } else { | |||
| delete(noteBookOKMap, task.ID) | |||
| return true | |||
| } | |||
| } | |||
| return false | |||
| } | |||
| @@ -16,7 +16,7 @@ func AddResourceQueue(req models.ResourceQueueReq) error { | |||
| } | |||
| func UpdateResourceQueue(queueId int64, req models.ResourceQueueReq) error { | |||
| if _, err := models.UpdateResourceQueueById(queueId, models.ResourceQueue{ | |||
| if _, err := models.UpdateResourceCardsTotalNum(queueId, models.ResourceQueue{ | |||
| CardsTotalNum: req.CardsTotalNum, | |||
| Remark: req.Remark, | |||
| }); err != nil { | |||
| @@ -1,20 +1,23 @@ | |||
| package resource | |||
| import ( | |||
| "encoding/json" | |||
| "errors" | |||
| "fmt" | |||
| "strconv" | |||
| "strings" | |||
| "time" | |||
| "code.gitea.io/gitea/models" | |||
| "code.gitea.io/gitea/modules/cloudbrain" | |||
| "code.gitea.io/gitea/modules/convert" | |||
| "code.gitea.io/gitea/modules/grampus" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/modelarts" | |||
| "code.gitea.io/gitea/modules/setting" | |||
| api "code.gitea.io/gitea/modules/structs" | |||
| "code.gitea.io/gitea/routers/response" | |||
| "code.gitea.io/gitea/services/admin/operate_log" | |||
| "encoding/json" | |||
| "errors" | |||
| "fmt" | |||
| "strconv" | |||
| "strings" | |||
| "time" | |||
| ) | |||
| func AddResourceSpecification(doerId int64, req models.ResourceSpecificationReq) error { | |||
| @@ -127,10 +130,48 @@ func GetResourceSpecificationList(opts models.SearchResourceSpecificationOptions | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| return models.NewResourceSpecAndQueueListRes(n, r), nil | |||
| } | |||
| //GetAllDistinctResourceSpecification returns specification and queue after distinct | |||
| //totalSize is always 0 here | |||
| func GetAllDistinctResourceSpecification(opts models.SearchResourceSpecificationOptions) (*models.ResourceSpecAndQueueListRes, error) { | |||
| opts.Page = 0 | |||
| opts.PageSize = 1000 | |||
| _, r, err := models.SearchResourceSpecification(opts) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| nr := distinctResourceSpecAndQueue(r) | |||
| return models.NewResourceSpecAndQueueListRes(0, nr), nil | |||
| } | |||
| func distinctResourceSpecAndQueue(r []models.ResourceSpecAndQueue) []models.ResourceSpecAndQueue { | |||
| specs := make([]models.ResourceSpecAndQueue, 0, len(r)) | |||
| sourceSpecIdMap := make(map[string]models.ResourceSpecAndQueue, 0) | |||
| for i := 0; i < len(r); i++ { | |||
| spec := r[i] | |||
| if spec.SourceSpecId == "" { | |||
| specs = append(specs, spec) | |||
| continue | |||
| } | |||
| if _, has := sourceSpecIdMap[spec.SourceSpecId]; has { | |||
| //prefer to use on-shelf spec | |||
| if sourceSpecIdMap[spec.SourceSpecId].Status != spec.Status && spec.Status == models.SpecOnShelf { | |||
| for k, v := range specs { | |||
| if v.ResourceSpecification.ID == sourceSpecIdMap[spec.SourceSpecId].ResourceSpecification.ID { | |||
| specs[k] = spec | |||
| } | |||
| } | |||
| } | |||
| continue | |||
| } | |||
| specs = append(specs, spec) | |||
| sourceSpecIdMap[spec.SourceSpecId] = spec | |||
| } | |||
| return specs | |||
| } | |||
| func GetResourceSpecificationScenes(specId int64) ([]models.ResourceSceneBriefRes, error) { | |||
| r, err := models.GetSpecScenes(specId) | |||
| if err != nil { | |||
| @@ -197,6 +238,7 @@ func AddSpecOperateLog(doerId int64, operateType string, newValue, oldValue *mod | |||
| } | |||
| func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.Specification, error) { | |||
| opts.SpecStatus = models.SpecOnShelf | |||
| r, err := models.FindSpecs(opts) | |||
| if err != nil { | |||
| log.Error("FindAvailableSpecs error.%v", err) | |||
| @@ -210,6 +252,18 @@ func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.S | |||
| return specs, err | |||
| } | |||
| func FindAvailableSpecs4Show(userId int64, opts models.FindSpecsOptions) ([]*api.SpecificationShow, error) { | |||
| specs, err := FindAvailableSpecs(userId, opts) | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| result := make([]*api.SpecificationShow, len(specs)) | |||
| for i, v := range specs { | |||
| result[i] = convert.ToSpecification(v) | |||
| } | |||
| return result, nil | |||
| } | |||
| func filterExclusiveSpecs(r []*models.Specification, userId int64) []*models.Specification { | |||
| specs := make([]*models.Specification, 0, len(r)) | |||
| specMap := make(map[int64]string, 0) | |||
| @@ -38,7 +38,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui simple dropdown item" id='dropdown_explore'> | |||
| @@ -78,7 +78,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| @@ -38,7 +38,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui dropdown item" id='dropdown_explore'> | |||
| @@ -77,7 +77,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui dropdown item" id='dropdown_PageHome'> | |||
| @@ -30,7 +30,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui dropdown item" id='dropdown_explore'> | |||
| @@ -70,7 +70,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui dropdown item" id='dropdown_PageHome'> | |||
| @@ -40,7 +40,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui dropdown item" id='dropdown_explore'> | |||
| @@ -80,7 +80,7 @@ | |||
| {{.i18n.Tr "repo.model_manager"}} | |||
| <i class="dropdown icon"></i> | |||
| <div class="menu"> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">模型体验</a> | |||
| <a class="item" href="{{AppSubUrl}}/extension/tuomin/upload">{{.i18n.Tr "repo.model_experience"}}</a> | |||
| </div> | |||
| </div> | |||
| <div class="ui dropdown item" id='dropdown_PageHome'> | |||
| @@ -29,7 +29,7 @@ | |||
| <div class="ui message message-max{{.VersionName}}" style="display: none;"> | |||
| <div id="header"></div> | |||
| </div> | |||
| <div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;"> | |||
| <div class="log-scroll-max" id="log-max{{.VersionName}}" data-version="{{.VersionName}}" style="overflow: auto;max-height: 100%;height: 100%"> | |||
| <div class="ui inverted active dimmer"> | |||
| <div class="ui loader"></div> | |||
| </div> | |||
| @@ -284,10 +284,7 @@ | |||
| <div class="content-pad"> | |||
| <div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);"> | |||
| <a class="active item" | |||
| data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | |||
| <a class="item" data-tab="second{{$k}}" | |||
| onclick="javascript:parseInfo()">{{$.i18n.Tr "repo.cloudbrain.runinfo"}}</a> | |||
| data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | |||
| <a class="item log_bottom" data-tab="third{{$k}}" | |||
| data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||
| @@ -504,25 +501,6 @@ | |||
| </div> | |||
| </div> | |||
| <div class="ui tab" data-tab="second{{$k}}"> | |||
| <div> | |||
| <div class="ui message message{{.VersionName}}" style="display: none;"> | |||
| <div id="header"></div> | |||
| </div> | |||
| <div class="ui attached log" id="log_state{{.VersionName}}" | |||
| style="height: 390px !important; overflow: auto;"> | |||
| <input type="hidden" id="json_value" value="{{$.result.JobStatus.AppExitDiagnostics}}"> | |||
| <input type="hidden" id="ExitDiagnostics" value="{{$.ExitDiagnostics}}"> | |||
| <span id="info_display" class="info_text"> | |||
| </span> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| <div class="ui tab" data-tab="third{{$k}}"> | |||
| <div class="file-info"> | |||
| <a id="{{.VersionName}}-log-down" | |||
| @@ -922,66 +900,7 @@ | |||
| $('.secondary.menu .item').tab(); | |||
| }); | |||
| let userName | |||
| let repoPath | |||
| let jobID | |||
| let downlaodFlag = {{ $.canDownload }} | |||
| let taskID = {{ $.task.ID }} | |||
| let realJobName = {{ $.task.JobName }} | |||
| $(document).ready(function () { | |||
| let url = window.location.href; | |||
| let urlArr = url.split('/') | |||
| userName = urlArr.slice(-5)[0] | |||
| repoPath = urlArr.slice(-4)[0] | |||
| jobID = urlArr.slice(-1)[0] | |||
| }) | |||
| function stopBubbling(e) { | |||
| e = window.event || e; | |||
| if (e.stopPropagation) { | |||
| e.stopPropagation(); //阻止事件 冒泡传播 | |||
| } else { | |||
| e.cancelBubble = true; //ie兼容 | |||
| } | |||
| } | |||
| function loadLog(version_name) { | |||
| document.getElementById("mask").style.display = "block" | |||
| let startLine = $('input[name=end_line]').val(); | |||
| if(startLine==""){ | |||
| startLine=0; | |||
| } | |||
| let endLine = $('input[name=end_line]').val(); | |||
| if(endLine==""){ | |||
| endLine = 50; | |||
| } | |||
| $.get(`/${userName}/${repoPath}/cloudbrain/train-job/${jobID}/get_log?endLine=${endLine}&startLine=${startLine}`, (data) => { | |||
| $('input[name=end_line]').val(data.EndLine) | |||
| $('input[name=start_line]').val(data.StartLine) | |||
| $(`#log_file${version_name}`).text(data.Content) | |||
| document.getElementById("mask").style.display = "none" | |||
| }).fail(function (err) { | |||
| console.log(err); | |||
| document.getElementById("mask").style.display = "none" | |||
| }); | |||
| } | |||
| function refreshStatus(version_name) { | |||
| $.get(`/api/v1/repos/${userName}/${repoPath}/cloudbrain/${taskID}?version_name=${versionname}`, (data) => { | |||
| // header status and duration | |||
| //$(`#${version_name}-duration-span`).text(data.JobDuration) | |||
| $(`#${version_name}-status-span span`).text(data.JobStatus) | |||
| $(`#${version_name}-status-span i`).attr("class", data.JobStatus) | |||
| // detail status and duration | |||
| //$('#'+version_name+'-duration').text(data.JobDuration) | |||
| $('#' + version_name + '-status').text(data.JobStatus) | |||
| loadLog(version_name) | |||
| }).fail(function (err) { | |||
| console.log(err); | |||
| }); | |||
| stopBubbling(arguments.callee.caller.arguments[0]) | |||
| } | |||
| function parseInfo() { | |||
| let jsonValue = document.getElementById("json_value").value; | |||
| @@ -238,11 +238,8 @@ | |||
| <span> | |||
| <div style="float: right;"> | |||
| {{$.CsrfTokenHtml}} | |||
| </div> | |||
| <div class="ac-display-inblock title_text acc-margin-bottom"> | |||
| <span class="cti-mgRight-sm">{{TimeSinceUnix1 .CreatedUnix}}</span> | |||
| <span class="cti-mgRight-sm"> | |||
| {{$.i18n.Tr "repo.modelarts.current_version"}}:{{.VersionName}}</span> | |||
| @@ -260,7 +257,6 @@ | |||
| <span class="refresh-status" data-tooltip="刷新" style="cursor: pointer;" data-inverted="" data-version="{{.VersionName}}"> | |||
| <i class="redo icon redo-color"></i> | |||
| </span> | |||
| </div> | |||
| <div style="float: right;"> | |||
| {{if and ($.canDownload) (ne .Status "WAITING") ($.Permission.CanWrite $.UnitTypeModelManage) }} | |||
| @@ -269,7 +265,6 @@ | |||
| {{else}} | |||
| <a class="ti-action-menu-item disabled" id="{{.VersionName}}-create-model">{{$.i18n.Tr "repo.modelarts.create_model"}}</a> | |||
| {{end}} | |||
| </div> | |||
| </span> | |||
| </span> | |||
| @@ -282,6 +277,9 @@ | |||
| <a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | |||
| <a class="item log_bottom" data-tab="second{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||
| {{ if eq $.Spec.ComputeResource "NPU"}} | |||
| <a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}" data-path="{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/metrics">{{$.i18n.Tr "cloudbrain.resource_use"}}</a> | |||
| {{end}} | |||
| <a class="item load-model-file" data-tab="third{{$k}}" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a> | |||
| </div> | |||
| <div class="ui tab active" data-tab="first{{$k}}"> | |||
| @@ -564,6 +562,14 @@ | |||
| </div> | |||
| </div> | |||
| <div class="ui tab" data-tab="four{{$k}}" style="position: relative;"> | |||
| <i class="ri-refresh-line metric_chart" | |||
| style="position: absolute;right: 25%;color:#3291f8;z-index:99;cursor: pointer;" | |||
| data-version="{{.VersionName}}"></i> | |||
| <div id="metric-{{.VersionName}}" style="height: 260px;width: 870px;"> | |||
| </div> | |||
| </div> | |||
| <div class="ui tab" data-tab="third{{$k}}"> | |||
| <input type="hidden" name="model{{.VersionName}}" value="-1"> | |||
| @@ -321,7 +321,7 @@ | |||
| data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | |||
| <a class="item log_bottom" data-tab="second{{$k}}" | |||
| data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||
| <a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "cloudbrain.resource_use"}}</a> | |||
| <a class="item metric_chart" data-tab="four{{$k}}" data-version="{{.VersionName}}" data-path="{{$.RepoRelPath}}/modelarts/train-job/{{.JobID}}/metric_statistics?version_name={{.VersionName}}&statistic_type=each&metrics=">{{$.i18n.Tr "cloudbrain.resource_use"}}</a> | |||
| <a class="item load-model-file" data-tab="third{{$k}}" data-download-flag="{{$.canDownload}}" data-path="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/model_list" data-version="{{.VersionName}}" data-parents="" data-filename="" data-init="init" >{{$.i18n.Tr "repo.model_download"}}</a> | |||
| </div> | |||
| <div class="ui tab active" data-tab="first{{$k}}"> | |||
| @@ -570,7 +570,8 @@ | |||
| html +="<option name=\"MindSpore\" " + getSelected(2,value) + " value=\"2\">MindSpore</option>"; | |||
| html +="<option name=\"PaddlePaddle\" " + getSelected(4,value) + " value=\"4\">PaddlePaddle</option>"; | |||
| html +="<option name=\"MXNet\" " + getSelected(6,value) + " value=\"6\">MXNet</option>"; | |||
| $('#SrcEngine').html(html); | |||
| $('#srcEngine').html(html); | |||
| srcEngineChanged(); | |||
| } | |||
| function getSelected(engineOption, modelName){ | |||
| @@ -861,8 +861,10 @@ | |||
| $('td.ti-text-form-content.spec div').text(specStr); | |||
| SPEC && $('td.ti-text-form-content.resorce_type div').text(getListValueWithKey(ACC_CARD_TYPE, SPEC.AccCardType)); | |||
| } | |||
| var oLogHref = $('#-log-down').attr('href'); | |||
| var repoPath = {{$.RepoRelPath}}; | |||
| var oLogHref = `/api/v1/repos/${repoPath}/cloudbrain`; | |||
| $('#-log-down').attr('href', oLogHref + `/${res.ID}/download_log_file`); | |||
| $('.full-log-dialog').attr('data-href', oLogHref + `/${res.ID}/download_log_file`); | |||
| if (res.ResultJson) { | |||
| try { | |||
| resultData = JSON.parse(res.ResultJson); | |||
| @@ -1,4 +1,5 @@ | |||
| <div class="{{TabSizeClass .Editorconfig .FileName}} non-diff-file-content"> | |||
| <div class="{{TabSizeClass .Editorconfig .FileName}} non-diff-file-content gallery"> | |||
| <h4 class="file-header ui top attached header"> | |||
| <div class="file-header-left"> | |||
| {{if .ReadmeInList}} | |||
| @@ -12,7 +12,7 @@ | |||
| {{.i18n.Tr "home.switch_dashboard_context"}} | |||
| </div> | |||
| <div class="scrolling menu items"> | |||
| <a class="{{if eq .ContextUser.ID .SignedUser.ID}}active selected{{end}} item" href="{{AppSubUrl}}/{{if .PageIsIssues}}issues{{else if .PageIsPulls}}pulls{{else if .PageIsMilestonesDashboard}}milestones{{end}}"> | |||
| <a class="{{if eq .ContextUser.ID .SignedUser.ID}}active selected{{end}} item" href="{{AppSubUrl}}/{{if .PageIsIssues}}issues{{else if .PageIsPulls}}pulls{{else if .PageIsMilestonesDashboard}}milestones{{else}}dashboard{{end}}"> | |||
| <img class="ui avatar image" src="{{.SignedUser.RelAvatarLink}}" width="28" height="28"> | |||
| {{.SignedUser.Name}} | |||
| </a> | |||
| @@ -489,7 +489,7 @@ export default { | |||
| let trainTaskInfo; | |||
| this.tableData = res.data.data; | |||
| for (let i = 0; i < this.tableData.length; i++) { | |||
| trainTaskInfo = JSON.parse(this.tableData[i].trainTaskInfo || '{}'); | |||
| trainTaskInfo = JSON.parse(this.tableData[i].trainTaskInfo); | |||
| this.tableData[i].cName = this.tableData[i].name; | |||
| this.tableData[i].rowKey = this.tableData[i].id + Math.random(); | |||
| this.tableData[i].engineName = this.getEngineName( | |||
| @@ -497,8 +497,7 @@ export default { | |||
| ); | |||
| // this.tableData[i].computeResource = trainTaskInfo.ComputeResource; | |||
| this.tableData[i].computeResource = this.tableData[i].type == '0' ? 'CPU/GPU' : 'NPU'; | |||
| this.tableData[i].hasChildren = | |||
| res.data.data[i].versionCount === 1 ? false : true; | |||
| this.tableData[i].hasChildren = res.data.data[i].versionCount === 1 ? false : true; | |||
| if (this.tableData[i].status !== 1) { | |||
| countStatus++; | |||
| } | |||
| @@ -120,13 +120,13 @@ | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="createdUnix" | |||
| prop="updatedUnix" | |||
| label="创建时间" | |||
| align="center" | |||
| min-width="14%" | |||
| > | |||
| <template slot-scope="scope"> | |||
| {{ scope.row.createdUnix | transformTimestamp }} | |||
| {{ scope.row.updatedUnix | transformTimestamp }} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column align="center" min-width="21%" label="操作"> | |||
| @@ -369,13 +369,13 @@ | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="createdUnix" | |||
| prop="updatedUnix" | |||
| label="创建时间" | |||
| align="center" | |||
| min-width="14%" | |||
| > | |||
| <template slot-scope="scope"> | |||
| {{ scope.row.createdUnix | transformTimestamp }} | |||
| {{ scope.row.updatedUnix | transformTimestamp }} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column align="center" min-width="21%" label="操作"> | |||
| @@ -595,13 +595,13 @@ | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="createdUnix" | |||
| prop="updatedUnix" | |||
| label="创建时间" | |||
| align="center" | |||
| min-width="14%" | |||
| > | |||
| <template slot-scope="scope"> | |||
| {{ scope.row.createdUnix | transformTimestamp }} | |||
| {{ scope.row.updatedUnix | transformTimestamp }} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column align="center" min-width="21%" label="操作"> | |||
| @@ -75,9 +75,9 @@ | |||
| </a> | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column prop="createdUnix" label="创建时间" align="center" min-width="13%"> | |||
| <el-table-column prop="updatedUnix" label="创建时间" align="center" min-width="13%"> | |||
| <template slot-scope="scope"> | |||
| {{scope.row.createdUnix | transformTimestamp}} | |||
| {{scope.row.updatedUnix | transformTimestamp}} | |||
| </template> | |||
| </el-table-column> | |||
| <el-table-column align="center" min-width="23%" label="操作"> | |||
| @@ -55,6 +55,8 @@ import { Message } from "element-ui"; | |||
| import { i18nVue } from "./features/i18nVue.js"; | |||
| import './features/ad.js'; | |||
| import { Fancybox } from "./vendor/fancybox.esm.js"; | |||
| Vue.prototype.$axios = axios; | |||
| Vue.prototype.$Cookies = Cookies; | |||
| @@ -5075,12 +5077,7 @@ function initcreateRepo() { | |||
| initcreateRepo(); | |||
| function initChartsNpu() { | |||
| const url = window.location.href; | |||
| const urlArr = url.split("/"); | |||
| let userName = urlArr.slice(-5)[0]; | |||
| let repoPath = urlArr.slice(-4)[0]; | |||
| let jobID = urlArr.slice(-1)[0]; | |||
| const repoPath = $('.metric_chart').data('path') | |||
| let options = { | |||
| legend: { | |||
| data: [], | |||
| @@ -5131,7 +5128,7 @@ function initChartsNpu() { | |||
| document.getElementById(`metric-${versionName}`) | |||
| ); | |||
| $.get( | |||
| `${window.config.AppSubUrl}/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/metric_statistics?version_name=${versionName}&statistic_type=each&metrics=`, | |||
| `${window.config.AppSubUrl}/api/v1/repos/${repoPath}`, | |||
| (res) => { | |||
| let filterDta = res.MetricsInfo.filter((item) => { | |||
| return ![ | |||
| @@ -5183,3 +5180,31 @@ function initChartsNpu() { | |||
| } | |||
| initChartsNpu(); | |||
| Fancybox.bind('.gallery img', { | |||
| // Do not create a gallery | |||
| groupAttr: null, | |||
| // Do not hide page scrollbars | |||
| hideScrollbar: false, | |||
| // Disable drag to close guesture | |||
| dragToClose: false, | |||
| // Hide close button | |||
| closeButton: false, | |||
| // Disable toolbar | |||
| Toolbar: false, | |||
| // Disable zoom animation; close on click and wheel events | |||
| Image: { | |||
| zoom: false, | |||
| click: "close", | |||
| wheel: "close", | |||
| }, | |||
| // Custom animations | |||
| showClass: "fancybox-zoomIn", | |||
| hideClass: "fancybox-zoomOut", | |||
| }); | |||
| @@ -1,5 +1,6 @@ | |||
| @import "~highlight.js/styles/github.css"; | |||
| @import "./vendor/gitGraph.css"; | |||
| @import "./vendor/fancyapp.less"; | |||
| // @import "~/remixicon/fonts/remixicon.css"; | |||
| @import "_svg"; | |||
| @import "_tribute"; | |||
| @@ -0,0 +1,791 @@ | |||
| .carousel { | |||
| position: relative; | |||
| box-sizing: border-box; | |||
| } | |||
| .carousel *, | |||
| .carousel *:before, | |||
| .carousel *:after { | |||
| box-sizing: inherit; | |||
| } | |||
| .carousel.is-draggable { | |||
| cursor: move; | |||
| cursor: grab; | |||
| } | |||
| .carousel.is-dragging { | |||
| cursor: move; | |||
| cursor: grabbing; | |||
| } | |||
| .carousel__viewport { | |||
| position: relative; | |||
| overflow: hidden; | |||
| max-width: 100%; | |||
| max-height: 100%; | |||
| } | |||
| .carousel__track { | |||
| display: flex; | |||
| } | |||
| .carousel__slide { | |||
| flex: 0 0 auto; | |||
| width: var(--carousel-slide-width, 60%); | |||
| max-width: 100%; | |||
| padding: 1rem; | |||
| position: relative; | |||
| overflow-x: hidden; | |||
| overflow-y: auto; | |||
| overscroll-behavior: contain; | |||
| } | |||
| .has-dots { | |||
| margin-bottom: calc(0.5rem + 22px); | |||
| } | |||
| .carousel__dots { | |||
| margin: 0 auto; | |||
| padding: 0; | |||
| position: absolute; | |||
| top: calc(100% + 0.5rem); | |||
| left: 0; | |||
| right: 0; | |||
| display: flex; | |||
| justify-content: center; | |||
| list-style: none; | |||
| user-select: none; | |||
| } | |||
| .carousel__dots .carousel__dot { | |||
| margin: 0; | |||
| padding: 0; | |||
| display: block; | |||
| position: relative; | |||
| width: 22px; | |||
| height: 22px; | |||
| cursor: pointer; | |||
| } | |||
| .carousel__dots .carousel__dot:after { | |||
| content: ""; | |||
| width: 8px; | |||
| height: 8px; | |||
| border-radius: 50%; | |||
| position: absolute; | |||
| top: 50%; | |||
| left: 50%; | |||
| transform: translate(-50%, -50%); | |||
| background-color: currentColor; | |||
| opacity: 0.25; | |||
| transition: opacity 0.15s ease-in-out; | |||
| } | |||
| .carousel__dots .carousel__dot.is-selected:after { | |||
| opacity: 1; | |||
| } | |||
| .carousel__button { | |||
| width: var(--carousel-button-width, 48px); | |||
| height: var(--carousel-button-height, 48px); | |||
| padding: 0; | |||
| border: 0; | |||
| display: flex; | |||
| justify-content: center; | |||
| align-items: center; | |||
| pointer-events: all; | |||
| cursor: pointer; | |||
| color: var(--carousel-button-color, currentColor); | |||
| background: var(--carousel-button-bg, transparent); | |||
| border-radius: var(--carousel-button-border-radius, 50%); | |||
| box-shadow: var(--carousel-button-shadow, none); | |||
| transition: opacity 0.15s ease; | |||
| } | |||
| .carousel__button.is-prev, | |||
| .carousel__button.is-next { | |||
| position: absolute; | |||
| top: 50%; | |||
| transform: translateY(-50%); | |||
| } | |||
| .carousel__button.is-prev { | |||
| left: 10px; | |||
| } | |||
| .carousel__button.is-next { | |||
| right: 10px; | |||
| } | |||
| .carousel__button[disabled] { | |||
| cursor: default; | |||
| opacity: 0.3; | |||
| } | |||
| .carousel__button svg { | |||
| width: var(--carousel-button-svg-width, 50%); | |||
| height: var(--carousel-button-svg-height, 50%); | |||
| fill: none; | |||
| stroke: currentColor; | |||
| stroke-width: var(--carousel-button-svg-stroke-width, 1.5); | |||
| stroke-linejoin: bevel; | |||
| stroke-linecap: round; | |||
| filter: var(--carousel-button-svg-filter, none); | |||
| pointer-events: none; | |||
| } | |||
| html.with-fancybox { | |||
| scroll-behavior: auto; | |||
| } | |||
| body.compensate-for-scrollbar { | |||
| overflow: hidden !important; | |||
| touch-action: none; | |||
| } | |||
| .fancybox__container { | |||
| position: fixed; | |||
| top: 0; | |||
| left: 0; | |||
| bottom: 0; | |||
| right: 0; | |||
| direction: ltr; | |||
| margin: 0; | |||
| padding: env(safe-area-inset-top, 0px) env(safe-area-inset-right, 0px) | |||
| env(safe-area-inset-bottom, 0px) env(safe-area-inset-left, 0px); | |||
| box-sizing: border-box; | |||
| display: flex; | |||
| flex-direction: column; | |||
| color: var(--fancybox-color, #fff); | |||
| -webkit-tap-highlight-color: rgba(0, 0, 0, 0); | |||
| overflow: hidden; | |||
| z-index: 1050; | |||
| outline: none; | |||
| transform-origin: top left; | |||
| --carousel-button-width: 48px; | |||
| --carousel-button-height: 48px; | |||
| --carousel-button-svg-width: 24px; | |||
| --carousel-button-svg-height: 24px; | |||
| --carousel-button-svg-stroke-width: 2.5; | |||
| --carousel-button-svg-filter: drop-shadow(1px 1px 1px rgba(0, 0, 0, 0.4)); | |||
| } | |||
| .fancybox__container *, | |||
| .fancybox__container *::before, | |||
| .fancybox__container *::after { | |||
| box-sizing: inherit; | |||
| } | |||
| .fancybox__container :focus { | |||
| outline: none; | |||
| } | |||
| body:not(.is-using-mouse) .fancybox__container :focus { | |||
| box-shadow: 0 0 0 1px #fff, | |||
| 0 0 0 2px var(--fancybox-accent-color, rgba(1, 210, 232, 0.94)); | |||
| } | |||
| @media all and (min-width: 1024px) { | |||
| .fancybox__container { | |||
| --carousel-button-width: 48px; | |||
| --carousel-button-height: 48px; | |||
| --carousel-button-svg-width: 27px; | |||
| --carousel-button-svg-height: 27px; | |||
| } | |||
| } | |||
| .fancybox__backdrop { | |||
| position: absolute; | |||
| top: 0; | |||
| right: 0; | |||
| bottom: 0; | |||
| left: 0; | |||
| z-index: -1; | |||
| background: var(--fancybox-bg, rgba(24, 24, 27, 0.92)); | |||
| } | |||
| .fancybox__carousel { | |||
| position: relative; | |||
| flex: 1 1 auto; | |||
| min-height: 0; | |||
| height: 100%; | |||
| z-index: 10; | |||
| } | |||
| .fancybox__carousel.has-dots { | |||
| margin-bottom: calc(0.5rem + 22px); | |||
| } | |||
| .fancybox__viewport { | |||
| position: relative; | |||
| width: 100%; | |||
| height: 100%; | |||
| overflow: visible; | |||
| cursor: default; | |||
| } | |||
| .fancybox__track { | |||
| display: flex; | |||
| height: 100%; | |||
| } | |||
| .fancybox__slide { | |||
| flex: 0 0 auto; | |||
| width: 100%; | |||
| max-width: 100%; | |||
| margin: 0; | |||
| padding: 48px 8px 8px 8px; | |||
| position: relative; | |||
| overscroll-behavior: contain; | |||
| display: flex; | |||
| flex-direction: column; | |||
| outline: 0; | |||
| overflow: auto; | |||
| --carousel-button-width: 36px; | |||
| --carousel-button-height: 36px; | |||
| --carousel-button-svg-width: 22px; | |||
| --carousel-button-svg-height: 22px; | |||
| } | |||
| .fancybox__slide::before, | |||
| .fancybox__slide::after { | |||
| content: ""; | |||
| flex: 0 0 0; | |||
| margin: auto; | |||
| } | |||
| @media all and (min-width: 1024px) { | |||
| .fancybox__slide { | |||
| padding: 64px 100px; | |||
| } | |||
| } | |||
| .fancybox__content { | |||
| margin: 0 env(safe-area-inset-right, 0px) 0 env(safe-area-inset-left, 0px); | |||
| padding: 36px; | |||
| color: var(--fancybox-content-color, #374151); | |||
| background: var(--fancybox-content-bg, #fff); | |||
| position: relative; | |||
| align-self: center; | |||
| display: flex; | |||
| flex-direction: column; | |||
| z-index: 20; | |||
| } | |||
| .fancybox__content :focus:not(.carousel__button.is-close) { | |||
| outline: thin dotted; | |||
| box-shadow: none; | |||
| } | |||
| .fancybox__caption { | |||
| align-self: center; | |||
| max-width: 100%; | |||
| margin: 0; | |||
| padding: 1rem 0 0 0; | |||
| line-height: 1.375; | |||
| color: var(--fancybox-color, currentColor); | |||
| visibility: visible; | |||
| cursor: auto; | |||
| flex-shrink: 0; | |||
| overflow-wrap: anywhere; | |||
| } | |||
| .is-loading .fancybox__caption { | |||
| visibility: hidden; | |||
| } | |||
| .fancybox__container > .carousel__dots { | |||
| top: 100%; | |||
| color: var(--fancybox-color, #fff); | |||
| } | |||
| .fancybox__nav .carousel__button { | |||
| z-index: 40; | |||
| } | |||
| .fancybox__nav .carousel__button.is-next { | |||
| right: 8px; | |||
| } | |||
| @media all and (min-width: 1024px) { | |||
| .fancybox__nav .carousel__button.is-next { | |||
| right: 40px; | |||
| } | |||
| } | |||
| .fancybox__nav .carousel__button.is-prev { | |||
| left: 8px; | |||
| } | |||
| @media all and (min-width: 1024px) { | |||
| .fancybox__nav .carousel__button.is-prev { | |||
| left: 40px; | |||
| } | |||
| } | |||
| .carousel__button.is-close { | |||
| position: absolute; | |||
| top: 8px; | |||
| right: 8px; | |||
| top: calc(env(safe-area-inset-top, 0px) + 8px); | |||
| right: calc(env(safe-area-inset-right, 0px) + 8px); | |||
| z-index: 40; | |||
| } | |||
| @media all and (min-width: 1024px) { | |||
| .carousel__button.is-close { | |||
| right: 40px; | |||
| } | |||
| } | |||
| .fancybox__content > .carousel__button.is-close { | |||
| position: absolute; | |||
| top: -40px; | |||
| right: 0; | |||
| color: var(--fancybox-color, #fff); | |||
| } | |||
| .fancybox__no-click, | |||
| .fancybox__no-click button { | |||
| pointer-events: none; | |||
| } | |||
| .fancybox__spinner { | |||
| position: absolute; | |||
| top: 50%; | |||
| left: 50%; | |||
| transform: translate(-50%, -50%); | |||
| width: 50px; | |||
| height: 50px; | |||
| color: var(--fancybox-color, currentColor); | |||
| } | |||
| .fancybox__slide .fancybox__spinner { | |||
| cursor: pointer; | |||
| z-index: 1053; | |||
| } | |||
| .fancybox__spinner svg { | |||
| animation: fancybox-rotate 2s linear infinite; | |||
| transform-origin: center center; | |||
| position: absolute; | |||
| top: 0; | |||
| right: 0; | |||
| bottom: 0; | |||
| left: 0; | |||
| margin: auto; | |||
| width: 100%; | |||
| height: 100%; | |||
| } | |||
| .fancybox__spinner svg circle { | |||
| fill: none; | |||
| stroke-width: 2.75; | |||
| stroke-miterlimit: 10; | |||
| stroke-dasharray: 1, 200; | |||
| stroke-dashoffset: 0; | |||
| animation: fancybox-dash 1.5s ease-in-out infinite; | |||
| stroke-linecap: round; | |||
| stroke: currentColor; | |||
| } | |||
| @keyframes fancybox-rotate { | |||
| 100% { | |||
| transform: rotate(360deg); | |||
| } | |||
| } | |||
| @keyframes fancybox-dash { | |||
| 0% { | |||
| stroke-dasharray: 1, 200; | |||
| stroke-dashoffset: 0; | |||
| } | |||
| 50% { | |||
| stroke-dasharray: 89, 200; | |||
| stroke-dashoffset: -35px; | |||
| } | |||
| 100% { | |||
| stroke-dasharray: 89, 200; | |||
| stroke-dashoffset: -124px; | |||
| } | |||
| } | |||
| .fancybox__backdrop, | |||
| .fancybox__caption, | |||
| .fancybox__nav, | |||
| .carousel__dots, | |||
| .carousel__button.is-close { | |||
| opacity: var(--fancybox-opacity, 1); | |||
| } | |||
| .fancybox__container.is-animated[aria-hidden="false"] .fancybox__backdrop, | |||
| .fancybox__container.is-animated[aria-hidden="false"] .fancybox__caption, | |||
| .fancybox__container.is-animated[aria-hidden="false"] .fancybox__nav, | |||
| .fancybox__container.is-animated[aria-hidden="false"] .carousel__dots, | |||
| .fancybox__container.is-animated[aria-hidden="false"] | |||
| .carousel__button.is-close { | |||
| animation: 0.15s ease backwards fancybox-fadeIn; | |||
| } | |||
| .fancybox__container.is-animated.is-closing .fancybox__backdrop, | |||
| .fancybox__container.is-animated.is-closing .fancybox__caption, | |||
| .fancybox__container.is-animated.is-closing .fancybox__nav, | |||
| .fancybox__container.is-animated.is-closing .carousel__dots, | |||
| .fancybox__container.is-animated.is-closing .carousel__button.is-close { | |||
| animation: 0.15s ease both fancybox-fadeOut; | |||
| } | |||
| .fancybox-fadeIn { | |||
| animation: 0.15s ease both fancybox-fadeIn; | |||
| } | |||
| .fancybox-fadeOut { | |||
| animation: 0.1s ease both fancybox-fadeOut; | |||
| } | |||
| .fancybox-zoomInUp { | |||
| animation: 0.2s ease both fancybox-zoomInUp; | |||
| } | |||
| .fancybox-zoomOutDown { | |||
| animation: 0.15s ease both fancybox-zoomOutDown; | |||
| } | |||
| .fancybox-throwOutUp { | |||
| animation: 0.15s ease both fancybox-throwOutUp; | |||
| } | |||
| .fancybox-throwOutDown { | |||
| animation: 0.15s ease both fancybox-throwOutDown; | |||
| } | |||
| @keyframes fancybox-fadeIn { | |||
| from { | |||
| opacity: 0; | |||
| } | |||
| to { | |||
| opacity: 1; | |||
| } | |||
| } | |||
| @keyframes fancybox-fadeOut { | |||
| to { | |||
| opacity: 0; | |||
| } | |||
| } | |||
| @keyframes fancybox-zoomInUp { | |||
| from { | |||
| transform: scale(0.97) translate3d(0, 16px, 0); | |||
| opacity: 0; | |||
| } | |||
| to { | |||
| transform: scale(1) translate3d(0, 0, 0); | |||
| opacity: 1; | |||
| } | |||
| } | |||
| @keyframes fancybox-zoomOutDown { | |||
| to { | |||
| transform: scale(0.97) translate3d(0, 16px, 0); | |||
| opacity: 0; | |||
| } | |||
| } | |||
| @keyframes fancybox-throwOutUp { | |||
| to { | |||
| transform: translate3d(0, -30%, 0); | |||
| opacity: 0; | |||
| } | |||
| } | |||
| @keyframes fancybox-throwOutDown { | |||
| to { | |||
| transform: translate3d(0, 30%, 0); | |||
| opacity: 0; | |||
| } | |||
| } | |||
| .fancybox__carousel .carousel__slide { | |||
| scrollbar-width: thin; | |||
| scrollbar-color: #ccc rgba(255, 255, 255, 0.1); | |||
| } | |||
| .fancybox__carousel .carousel__slide::-webkit-scrollbar { | |||
| width: 8px; | |||
| height: 8px; | |||
| } | |||
| .fancybox__carousel .carousel__slide::-webkit-scrollbar-track { | |||
| background-color: rgba(255, 255, 255, 0.1); | |||
| } | |||
| .fancybox__carousel .carousel__slide::-webkit-scrollbar-thumb { | |||
| background-color: #ccc; | |||
| border-radius: 2px; | |||
| box-shadow: inset 0 0 4px rgba(0, 0, 0, 0.2); | |||
| } | |||
| .fancybox__carousel.is-draggable .fancybox__slide, | |||
| .fancybox__carousel.is-draggable .fancybox__slide .fancybox__content { | |||
| cursor: move; | |||
| cursor: grab; | |||
| } | |||
| .fancybox__carousel.is-dragging .fancybox__slide, | |||
| .fancybox__carousel.is-dragging .fancybox__slide .fancybox__content { | |||
| cursor: move; | |||
| cursor: grabbing; | |||
| } | |||
| .fancybox__carousel .fancybox__slide .fancybox__content { | |||
| cursor: auto; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.can-zoom_in .fancybox__content { | |||
| cursor: zoom-in; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.can-zoom_out .fancybox__content { | |||
| cursor: zoom-out; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.is-draggable .fancybox__content { | |||
| cursor: move; | |||
| cursor: grab; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.is-dragging .fancybox__content { | |||
| cursor: move; | |||
| cursor: grabbing; | |||
| } | |||
| .fancybox__image { | |||
| transform-origin: 0 0; | |||
| user-select: none; | |||
| transition: none; | |||
| } | |||
| .has-image .fancybox__content { | |||
| padding: 0; | |||
| background: rgba(0, 0, 0, 0); | |||
| min-height: 1px; | |||
| } | |||
| .is-closing .has-image .fancybox__content { | |||
| overflow: visible; | |||
| } | |||
| .has-image[data-image-fit="contain"] { | |||
| overflow: visible; | |||
| touch-action: none; | |||
| } | |||
| .has-image[data-image-fit="contain"] .fancybox__content { | |||
| flex-direction: row; | |||
| flex-wrap: wrap; | |||
| } | |||
| .has-image[data-image-fit="contain"] .fancybox__image { | |||
| max-width: 100%; | |||
| max-height: 100%; | |||
| object-fit: contain; | |||
| } | |||
| .has-image[data-image-fit="contain-w"] { | |||
| overflow-x: hidden; | |||
| overflow-y: auto; | |||
| } | |||
| .has-image[data-image-fit="contain-w"] .fancybox__content { | |||
| min-height: auto; | |||
| } | |||
| .has-image[data-image-fit="contain-w"] .fancybox__image { | |||
| max-width: 100%; | |||
| height: auto; | |||
| } | |||
| .has-image[data-image-fit="cover"] { | |||
| overflow: visible; | |||
| touch-action: none; | |||
| } | |||
| .has-image[data-image-fit="cover"] .fancybox__content { | |||
| width: 100%; | |||
| height: 100%; | |||
| } | |||
| .has-image[data-image-fit="cover"] .fancybox__image { | |||
| width: 100%; | |||
| height: 100%; | |||
| object-fit: cover; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.has-iframe .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-map .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-pdf .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-video .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-html5video .fancybox__content { | |||
| max-width: 100%; | |||
| flex-shrink: 1; | |||
| min-height: 1px; | |||
| overflow: visible; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.has-iframe .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-map .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-pdf .fancybox__content { | |||
| width: 100%; | |||
| height: 80%; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.has-video .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-html5video .fancybox__content { | |||
| width: 960px; | |||
| height: 540px; | |||
| max-width: 100%; | |||
| max-height: 100%; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.has-map .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-pdf .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-video .fancybox__content, | |||
| .fancybox__carousel .fancybox__slide.has-html5video .fancybox__content { | |||
| padding: 0; | |||
| background: rgba(24, 24, 27, 0.9); | |||
| color: #fff; | |||
| } | |||
| .fancybox__carousel .fancybox__slide.has-map .fancybox__content { | |||
| background: #e5e3df; | |||
| } | |||
| .fancybox__html5video, | |||
| .fancybox__iframe { | |||
| border: 0; | |||
| display: block; | |||
| height: 100%; | |||
| width: 100%; | |||
| background: rgba(0, 0, 0, 0); | |||
| } | |||
| .fancybox-placeholder { | |||
| position: absolute; | |||
| width: 1px; | |||
| height: 1px; | |||
| padding: 0; | |||
| margin: -1px; | |||
| overflow: hidden; | |||
| clip: rect(0, 0, 0, 0); | |||
| white-space: nowrap; | |||
| border-width: 0; | |||
| } | |||
| .fancybox__thumbs { | |||
| flex: 0 0 auto; | |||
| position: relative; | |||
| padding: 0px 3px; | |||
| opacity: var(--fancybox-opacity, 1); | |||
| } | |||
| .fancybox__container.is-animated[aria-hidden="false"] .fancybox__thumbs { | |||
| animation: 0.15s ease-in backwards fancybox-fadeIn; | |||
| } | |||
| .fancybox__container.is-animated.is-closing .fancybox__thumbs { | |||
| opacity: 0; | |||
| } | |||
| .fancybox__thumbs .carousel__slide { | |||
| flex: 0 0 auto; | |||
| width: var(--fancybox-thumbs-width, 96px); | |||
| margin: 0; | |||
| padding: 8px 3px; | |||
| box-sizing: content-box; | |||
| display: flex; | |||
| align-items: center; | |||
| justify-content: center; | |||
| overflow: visible; | |||
| cursor: pointer; | |||
| } | |||
| .fancybox__thumbs .carousel__slide .fancybox__thumb::after { | |||
| content: ""; | |||
| position: absolute; | |||
| top: 0; | |||
| left: 0; | |||
| right: 0; | |||
| bottom: 0; | |||
| border-width: 5px; | |||
| border-style: solid; | |||
| border-color: var(--fancybox-accent-color, rgba(34, 213, 233, 0.96)); | |||
| opacity: 0; | |||
| transition: opacity 0.15s ease; | |||
| border-radius: var(--fancybox-thumbs-border-radius, 4px); | |||
| } | |||
| .fancybox__thumbs .carousel__slide.is-nav-selected .fancybox__thumb::after { | |||
| opacity: 0.92; | |||
| } | |||
| .fancybox__thumbs .carousel__slide > * { | |||
| pointer-events: none; | |||
| user-select: none; | |||
| } | |||
| .fancybox__thumb { | |||
| position: relative; | |||
| width: 100%; | |||
| padding-top: calc(100% / (var(--fancybox-thumbs-ratio, 1.5))); | |||
| background-size: cover; | |||
| background-position: center center; | |||
| background-color: rgba(255, 255, 255, 0.1); | |||
| background-repeat: no-repeat; | |||
| border-radius: var(--fancybox-thumbs-border-radius, 4px); | |||
| } | |||
| .fancybox__toolbar { | |||
| position: absolute; | |||
| top: 0; | |||
| right: 0; | |||
| left: 0; | |||
| z-index: 20; | |||
| background: linear-gradient( | |||
| to top, | |||
| hsla(0deg, 0%, 0%, 0) 0%, | |||
| hsla(0deg, 0%, 0%, 0.006) 8.1%, | |||
| hsla(0deg, 0%, 0%, 0.021) 15.5%, | |||
| hsla(0deg, 0%, 0%, 0.046) 22.5%, | |||
| hsla(0deg, 0%, 0%, 0.077) 29%, | |||
| hsla(0deg, 0%, 0%, 0.114) 35.3%, | |||
| hsla(0deg, 0%, 0%, 0.155) 41.2%, | |||
| hsla(0deg, 0%, 0%, 0.198) 47.1%, | |||
| hsla(0deg, 0%, 0%, 0.242) 52.9%, | |||
| hsla(0deg, 0%, 0%, 0.285) 58.8%, | |||
| hsla(0deg, 0%, 0%, 0.326) 64.7%, | |||
| hsla(0deg, 0%, 0%, 0.363) 71%, | |||
| hsla(0deg, 0%, 0%, 0.394) 77.5%, | |||
| hsla(0deg, 0%, 0%, 0.419) 84.5%, | |||
| hsla(0deg, 0%, 0%, 0.434) 91.9%, | |||
| hsla(0deg, 0%, 0%, 0.44) 100% | |||
| ); | |||
| padding: 0; | |||
| touch-action: none; | |||
| display: flex; | |||
| justify-content: space-between; | |||
| --carousel-button-svg-width: 20px; | |||
| --carousel-button-svg-height: 20px; | |||
| opacity: var(--fancybox-opacity, 1); | |||
| text-shadow: var( | |||
| --fancybox-toolbar-text-shadow, | |||
| 1px 1px 1px rgba(0, 0, 0, 0.4) | |||
| ); | |||
| } | |||
| @media all and (min-width: 1024px) { | |||
| .fancybox__toolbar { | |||
| padding: 8px; | |||
| } | |||
| } | |||
| .fancybox__container.is-animated[aria-hidden="false"] .fancybox__toolbar { | |||
| animation: 0.15s ease-in backwards fancybox-fadeIn; | |||
| } | |||
| .fancybox__container.is-animated.is-closing .fancybox__toolbar { | |||
| opacity: 0; | |||
| } | |||
| .fancybox__toolbar__items { | |||
| display: flex; | |||
| } | |||
| .fancybox__toolbar__items--left { | |||
| margin-right: auto; | |||
| } | |||
| .fancybox__toolbar__items--center { | |||
| position: absolute; | |||
| left: 50%; | |||
| transform: translateX(-50%); | |||
| } | |||
| .fancybox__toolbar__items--right { | |||
| margin-left: auto; | |||
| } | |||
| @media (max-width: 640px) { | |||
| .fancybox__toolbar__items--center:not(:last-child) { | |||
| display: none; | |||
| } | |||
| } | |||
| .fancybox__counter { | |||
| min-width: 72px; | |||
| padding: 0 10px; | |||
| line-height: var(--carousel-button-height, 48px); | |||
| text-align: center; | |||
| font-size: 17px; | |||
| font-variant-numeric: tabular-nums; | |||
| -webkit-font-smoothing: subpixel-antialiased; | |||
| } | |||
| .fancybox__progress { | |||
| background: var(--fancybox-accent-color, rgba(34, 213, 233, 0.96)); | |||
| height: 3px; | |||
| left: 0; | |||
| position: absolute; | |||
| right: 0; | |||
| top: 0; | |||
| transform: scaleX(0); | |||
| transform-origin: 0; | |||
| transition-property: transform; | |||
| transition-timing-function: linear; | |||
| z-index: 30; | |||
| user-select: none; | |||
| } | |||
| .fancybox__container:fullscreen::backdrop { | |||
| opacity: 0; | |||
| } | |||
| .fancybox__button--fullscreen g:nth-child(2) { | |||
| display: none; | |||
| } | |||
| .fancybox__container:fullscreen .fancybox__button--fullscreen g:nth-child(1) { | |||
| display: none; | |||
| } | |||
| .fancybox__container:fullscreen .fancybox__button--fullscreen g:nth-child(2) { | |||
| display: block; | |||
| } | |||
| .fancybox__button--slideshow g:nth-child(2) { | |||
| display: none; | |||
| } | |||
| .fancybox__container.has-slideshow .fancybox__button--slideshow g:nth-child(1) { | |||
| display: none; | |||
| } | |||
| .fancybox__container.has-slideshow .fancybox__button--slideshow g:nth-child(2) { | |||
| display: block; | |||
| } | |||
| .gallery img { | |||
| cursor: zoom-in; | |||
| } | |||
| .fancybox__container { | |||
| --fancybox-bg: rgba(193, 201, 210, 0.7); | |||
| } | |||
| .fancybox-zoomOut { | |||
| animation: 0.2s ease-in-out fancybox-zoomOut both; | |||
| } | |||
| .fancybox-zoomIn { | |||
| animation: 0.25s ease-in-out fancybox-zoomIn both; | |||
| } | |||
| @keyframes fancybox-zoomIn { | |||
| from { | |||
| opacity: 0; | |||
| transform: scale3d(0.8, 0.8, 0.8); | |||
| } | |||
| 100% { | |||
| opacity: 1; | |||
| } | |||
| } | |||
| @keyframes fancybox-zoomOut { | |||
| from { | |||
| opacity: 1; | |||
| } | |||
| to { | |||
| opacity: 0; | |||
| transform: scale3d(0.8, 0.8, 0.8); | |||
| } | |||
| } | |||
| @@ -110,6 +110,19 @@ export const getResSpecificationList = (params) => { | |||
| }); | |||
| } | |||
| // 查询资源规格列表(所有) | |||
| // cluster 所属集群 :OpenI 启智集群,C2Net 智算集群 | |||
| // queue 所属队列id | |||
| // status 状态 : 1 待审核 2已上架 3已下架 | |||
| export const getResSpecificationListAll = (params) => { | |||
| return service({ | |||
| url: '/admin/resources/specification/list/all', | |||
| method: 'get', | |||
| params, | |||
| data: {}, | |||
| }); | |||
| } | |||
| // 同步智算网络资源池(队列) | |||
| export const syncResSpecification = () => { | |||
| return service({ | |||
| @@ -187,7 +187,7 @@ const en = { | |||
| onlyFace:'Only face', | |||
| onlyLicensePlate:'Only license plate', | |||
| dragThePictureHere:'Drag the picture here', | |||
| or:'or', | |||
| or:' or ', | |||
| clickUpload:'Click upload', | |||
| dataDesensitizationModelExperience:'Data desensitization model experience', | |||
| dataDesensitizationModelDesc:'Use AI technology to desensitize the face and license plate number in the picture. For more information about this model, please visit the project', | |||
| @@ -33,11 +33,9 @@ | |||
| drag | |||
| > | |||
| <div class="el-upload__text"> | |||
| {{ $t("dragThePictureHere") | |||
| }}<span style="color: rgba(136, 136, 136, 0.87)">{{ | |||
| $t("or") | |||
| }}</span | |||
| >{{ $t("clickUpload") }} | |||
| <span> | |||
| <span>{{ $t("dragThePictureHere") }}</span><span style="color: rgba(136, 136, 136, 0.87)">{{ $t("or") }}</span><span>{{ $t("clickUpload") }}</span> | |||
| </span> | |||
| </div> | |||
| </el-upload> | |||
| @@ -69,7 +69,9 @@ | |||
| </div> | |||
| <div class="content"> | |||
| <el-select v-model="dataInfo.SpecIds" multiple collapse-tags class="specSel"> | |||
| <el-option v-for="item in specsList" :key="item.k" :label="item.v" :value="item.k" /> | |||
| <el-option v-for="item in specsList" :label="item.v" :key="item.k" :value="item.k"> | |||
| <span v-html="item.v"></span> | |||
| </el-option> | |||
| </el-select> | |||
| </div> | |||
| </div> | |||
| @@ -87,7 +89,7 @@ | |||
| </template> | |||
| <script> | |||
| import BaseDialog from '~/components/BaseDialog.vue'; | |||
| import { getResQueueCode, getResSpecificationList, addResScene, updateResScene } from '~/apis/modules/resources'; | |||
| import { getResQueueCode, getResSpecificationListAll, addResScene, updateResScene } from '~/apis/modules/resources'; | |||
| import { JOB_TYPE, CLUSTERS, ACC_CARD_TYPE, SPECIFICATION_STATUS } from '~/const'; | |||
| import { getListValueWithKey } from '~/utils'; | |||
| @@ -167,21 +169,21 @@ export default { | |||
| const params = { | |||
| cluster: this.dataInfo.Cluster, | |||
| queue: this.dataInfo.QueueId === '-1' ? '' : this.dataInfo.QueueId, | |||
| status: 2, | |||
| page: 1, | |||
| // status: 2, | |||
| // page: 1, | |||
| }; | |||
| return getResSpecificationList(params).then(res => { | |||
| return getResSpecificationListAll(params).then(res => { | |||
| res = res.data; | |||
| if (res.Code === 0) { | |||
| const list = res.Data.List; | |||
| const data = list.map((item) => { | |||
| const Queue = item.Queue; | |||
| const Spec = item.Spec; | |||
| // const NGPU = `${Queue.ComputeResource}:${Spec.AccCardsNum === 0 ? '0' : Spec.AccCardsNum + '*' + getListValueWithKey(this.accCardTypeList, Queue.AccCardType)}`; | |||
| const NGPU = `${Queue.ComputeResource}:${Spec.AccCardsNum + '*' + getListValueWithKey(this.accCardTypeList, Queue.AccCardType)}`; | |||
| const statusStr = Spec.Status != '2' ? `<span style="color:rgb(245, 34, 45)">(${getListValueWithKey(this.statusList, Spec.Status.toString())})</span>` : ''; | |||
| return { | |||
| k: Spec.ID, | |||
| v: `${NGPU}, CPU:${Spec.CpuCores}, ${this.$t('resourcesManagement.gpuMem')}:${Spec.GPUMemGiB}GB, ${this.$t('resourcesManagement.mem')}:${Spec.MemGiB}GB, ${this.$t('resourcesManagement.shareMem')}:${Spec.ShareMemGiB}GB, ${this.$t('resourcesManagement.unitPrice')}:${Spec.UnitPrice}${this.$t('resourcesManagement.point_hr')}`, | |||
| v: `${NGPU}, CPU:${Spec.CpuCores}, ${this.$t('resourcesManagement.gpuMem')}:${Spec.GPUMemGiB}GB, ${this.$t('resourcesManagement.mem')}:${Spec.MemGiB}GB, ${this.$t('resourcesManagement.shareMem')}:${Spec.ShareMemGiB}GB, ${this.$t('resourcesManagement.unitPrice')}:${Spec.UnitPrice}${this.$t('resourcesManagement.point_hr')}${statusStr}`, | |||
| } | |||
| }); | |||
| this.specsList.splice(0, Infinity, ...data); | |||
| @@ -64,7 +64,7 @@ | |||
| header-align="center" min-width="180"> | |||
| <template slot-scope="scope"> | |||
| <div v-for="item in scope.row.SpecsList" :key="item.k"> | |||
| <span>{{ item.v }}</span> | |||
| <span v-html="item.v"></span> | |||
| </div> | |||
| </template> | |||
| </el-table-column> | |||
| @@ -100,7 +100,7 @@ | |||
| <script> | |||
| import SceneDialog from '../components/SceneDialog.vue'; | |||
| import { getQueueList, getResQueueCode, getResSceneList, updateResScene, getAiCenterList } from '~/apis/modules/resources'; | |||
| import { JOB_TYPE, CLUSTERS, ACC_CARD_TYPE } from '~/const'; | |||
| import { JOB_TYPE, CLUSTERS, ACC_CARD_TYPE, SPECIFICATION_STATUS } from '~/const'; | |||
| import { getListValueWithKey } from '~/utils'; | |||
| import { formatDate } from 'element-ui/lib/utils/date-util'; | |||
| @@ -117,6 +117,7 @@ export default { | |||
| selAiCenter: '', | |||
| aiCenterList: [{ k: '', v: this.$t('resourcesManagement.allAiCenter') }], | |||
| accCardTypeList: [...ACC_CARD_TYPE], | |||
| statusList: [{ k: '', v: this.$t('resourcesManagement.allStatus') }, ...SPECIFICATION_STATUS], | |||
| loading: false, | |||
| tableData: [], | |||
| pageInfo: { | |||
| @@ -191,11 +192,11 @@ export default { | |||
| let cluster = ''; | |||
| for (let i = 0, iLen = Specs.length; i < iLen; i++) { | |||
| const Spec = Specs[i]; | |||
| // const NGPU = `${Spec.ComputeResource}:${Spec.AccCardsNum === 0 ? '0' : Spec.AccCardsNum + '*' + getListValueWithKey(this.accCardTypeList, Spec.AccCardType)}`; | |||
| const NGPU = `${Spec.ComputeResource}:${Spec.AccCardsNum + '*' + getListValueWithKey(this.accCardTypeList, Spec.AccCardType)}`; | |||
| const statusStr = Spec.Status != '2' ? `<span style="color:rgb(245, 34, 45)">(${getListValueWithKey(this.statusList, Spec.Status.toString())})</span>` : ''; | |||
| specsList.push({ | |||
| k: Spec.ID, | |||
| v: `${NGPU}, CPU:${Spec.CpuCores}, ${this.$t('resourcesManagement.gpuMem')}:${Spec.GPUMemGiB}GB, ${this.$t('resourcesManagement.mem')}:${Spec.MemGiB}GB, ${this.$t('resourcesManagement.shareMem')}:${Spec.ShareMemGiB}GB, ${this.$t('resourcesManagement.unitPrice')}:${Spec.UnitPrice}${this.$t('resourcesManagement.point_hr')}`, | |||
| v: `${NGPU}, CPU:${Spec.CpuCores}, ${this.$t('resourcesManagement.gpuMem')}:${Spec.GPUMemGiB}GB, ${this.$t('resourcesManagement.mem')}:${Spec.MemGiB}GB, ${this.$t('resourcesManagement.shareMem')}:${Spec.ShareMemGiB}GB, ${this.$t('resourcesManagement.unitPrice')}:${Spec.UnitPrice}${this.$t('resourcesManagement.point_hr')}${statusStr}`, | |||
| }); | |||
| cluster = Spec.Cluster; | |||
| if (queueIds.indexOf(Spec.QueueId) < 0) { | |||