| @@ -10,6 +10,7 @@ import ( | |||||
| "io" | "io" | ||||
| "path" | "path" | ||||
| "strings" | "strings" | ||||
| "time" | |||||
| "code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
| "code.gitea.io/gitea/modules/obs" | "code.gitea.io/gitea/modules/obs" | ||||
| @@ -64,6 +65,7 @@ type AttachmentInfo struct { | |||||
| Repo *Repository `xorm:"extends"` | Repo *Repository `xorm:"extends"` | ||||
| RelAvatarLink string `xorm:"extends"` | RelAvatarLink string `xorm:"extends"` | ||||
| UserName string `xorm:"extends"` | UserName string `xorm:"extends"` | ||||
| Recommend bool `xorm:"-"` | |||||
| } | } | ||||
| type AttachmentsOptions struct { | type AttachmentsOptions struct { | ||||
| @@ -78,6 +80,7 @@ type AttachmentsOptions struct { | |||||
| JustNeedZipFile bool | JustNeedZipFile bool | ||||
| NeedRepoInfo bool | NeedRepoInfo bool | ||||
| Keyword string | Keyword string | ||||
| RecommendOnly bool | |||||
| } | } | ||||
| func (a *Attachment) AfterUpdate() { | func (a *Attachment) AfterUpdate() { | ||||
| @@ -104,6 +107,14 @@ func (a *Attachment) IncreaseDownloadCount() error { | |||||
| return nil | return nil | ||||
| } | } | ||||
| func (a *Attachment) UpdateDatasetUpdateUnix() error { | |||||
| // Update download count. | |||||
| if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { | |||||
| return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // APIFormat converts models.Attachment to api.Attachment | // APIFormat converts models.Attachment to api.Attachment | ||||
| func (a *Attachment) APIFormat() *api.Attachment { | func (a *Attachment) APIFormat() *api.Attachment { | ||||
| return &api.Attachment{ | return &api.Attachment{ | ||||
| @@ -570,6 +581,11 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { | |||||
| builder.Eq{"attachment.is_private": opts.IsPrivate}, | builder.Eq{"attachment.is_private": opts.IsPrivate}, | ||||
| ) | ) | ||||
| } | } | ||||
| if opts.RecommendOnly { | |||||
| cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id"). | |||||
| From("attachment"). | |||||
| Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true"))) | |||||
| } | |||||
| if opts.JustNeedZipFile { | if opts.JustNeedZipFile { | ||||
| var DecompressState []int32 | var DecompressState []int32 | ||||
| @@ -618,6 +634,7 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { | |||||
| if err != nil { | if err != nil { | ||||
| return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) | return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) | ||||
| } | } | ||||
| attachment.Recommend = dataset.Recommend | |||||
| repo, err := GetRepositoryByID(dataset.RepoID) | repo, err := GetRepositoryByID(dataset.RepoID) | ||||
| if err == nil { | if err == nil { | ||||
| attachment.Repo = repo | attachment.Repo = repo | ||||
| @@ -580,6 +580,8 @@ type CommitImageParams struct { | |||||
| Topics []string | Topics []string | ||||
| CloudBrainType int | CloudBrainType int | ||||
| UID int64 | UID int64 | ||||
| Place string | |||||
| Type int | |||||
| } | } | ||||
| type CommitImageResult struct { | type CommitImageResult struct { | ||||
| @@ -1629,12 +1631,12 @@ func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { | |||||
| var err error | var err error | ||||
| condition := "cloudbrain.user_id = `user`.id" | condition := "cloudbrain.user_id = `user`.id" | ||||
| if len(opts.Keyword) == 0 { | if len(opts.Keyword) == 0 { | ||||
| count, err = sess.Where(cond).Count(new(Cloudbrain)) | |||||
| count, err = sess.Unscoped().Where(cond).Count(new(Cloudbrain)) | |||||
| } else { | } else { | ||||
| lowerKeyWord := strings.ToLower(opts.Keyword) | lowerKeyWord := strings.ToLower(opts.Keyword) | ||||
| cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) | cond = cond.And(builder.Or(builder.Like{"LOWER(cloudbrain.job_name)", lowerKeyWord}, builder.Like{"LOWER(cloudbrain.display_job_name)", lowerKeyWord}, builder.Like{"`user`.lower_name", lowerKeyWord})) | ||||
| count, err = sess.Table(&Cloudbrain{}).Where(cond). | |||||
| count, err = sess.Table(&Cloudbrain{}).Unscoped().Where(cond). | |||||
| Join("left", "`user`", condition).Count(new(CloudbrainInfo)) | Join("left", "`user`", condition).Count(new(CloudbrainInfo)) | ||||
| } | } | ||||
| @@ -567,12 +567,12 @@ func isImageStaring(e Engine, userID, imageID int64) bool { | |||||
| } | } | ||||
| func RecommendImage(imageId int64, recommond bool) error { | func RecommendImage(imageId int64, recommond bool) error { | ||||
| image := Image{Type: getRecommondType(recommond)} | |||||
| image := Image{Type: GetRecommondType(recommond)} | |||||
| _, err := x.ID(imageId).Cols("type").Update(image) | _, err := x.ID(imageId).Cols("type").Update(image) | ||||
| return err | return err | ||||
| } | } | ||||
| func getRecommondType(recommond bool) int { | |||||
| func GetRecommondType(recommond bool) int { | |||||
| if recommond { | if recommond { | ||||
| return RECOMMOND_TYPE | return RECOMMOND_TYPE | ||||
| @@ -23,7 +23,8 @@ type Dataset struct { | |||||
| Category string | Category string | ||||
| Description string `xorm:"TEXT"` | Description string `xorm:"TEXT"` | ||||
| DownloadTimes int64 | DownloadTimes int64 | ||||
| NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||||
| Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||||
| License string | License string | ||||
| Task string | Task string | ||||
| ReleaseID int64 `xorm:"INDEX"` | ReleaseID int64 `xorm:"INDEX"` | ||||
| @@ -99,6 +100,7 @@ type SearchDatasetOptions struct { | |||||
| OwnerID int64 | OwnerID int64 | ||||
| RepoID int64 | RepoID int64 | ||||
| IncludePublic bool | IncludePublic bool | ||||
| RecommendOnly bool | |||||
| Category string | Category string | ||||
| Task string | Task string | ||||
| License string | License string | ||||
| @@ -132,6 +134,13 @@ func CreateDataset(dataset *Dataset) (err error) { | |||||
| } | } | ||||
| func RecommendDataset(dataSetId int64, recommend bool) error { | |||||
| dataset := Dataset{Recommend: recommend} | |||||
| _, err := x.ID(dataSetId).Cols("recommend").Update(dataset) | |||||
| return err | |||||
| } | |||||
| func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) { | func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) { | ||||
| cond := SearchDatasetCondition(opts) | cond := SearchDatasetCondition(opts) | ||||
| return SearchDatasetByCondition(opts, cond) | return SearchDatasetByCondition(opts, cond) | ||||
| @@ -146,6 +155,9 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { | |||||
| if opts.RepoID > 0 { | if opts.RepoID > 0 { | ||||
| cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) | cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) | ||||
| } | } | ||||
| if opts.RecommendOnly { | |||||
| cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) | |||||
| } | |||||
| if opts.IncludePublic { | if opts.IncludePublic { | ||||
| cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) | cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) | ||||
| @@ -198,7 +210,7 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da | |||||
| defer sess.Close() | defer sess.Close() | ||||
| datasets := make(DatasetList, 0, opts.PageSize) | datasets := make(DatasetList, 0, opts.PageSize) | ||||
| selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars" | |||||
| selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend" | |||||
| count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). | count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). | ||||
| Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). | Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). | ||||
| @@ -12,15 +12,19 @@ import ( | |||||
| // env keys for git hooks need | // env keys for git hooks need | ||||
| const ( | const ( | ||||
| EnvRepoName = "GITEA_REPO_NAME" | |||||
| EnvRepoUsername = "GITEA_REPO_USER_NAME" | |||||
| EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" | |||||
| EnvPusherName = "GITEA_PUSHER_NAME" | |||||
| EnvPusherEmail = "GITEA_PUSHER_EMAIL" | |||||
| EnvPusherID = "GITEA_PUSHER_ID" | |||||
| EnvKeyID = "GITEA_KEY_ID" | |||||
| EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" | |||||
| EnvIsInternal = "GITEA_INTERNAL_PUSH" | |||||
| EnvRepoName = "GITEA_REPO_NAME" | |||||
| EnvRepoUsername = "GITEA_REPO_USER_NAME" | |||||
| EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" | |||||
| EnvPusherName = "GITEA_PUSHER_NAME" | |||||
| EnvPusherEmail = "GITEA_PUSHER_EMAIL" | |||||
| EnvPusherID = "GITEA_PUSHER_ID" | |||||
| EnvKeyID = "GITEA_KEY_ID" | |||||
| EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" | |||||
| EnvIsInternal = "GITEA_INTERNAL_PUSH" | |||||
| EnvRepoSize = "REPO_CURRENT_SIZE" | |||||
| EnvRepoMaxFileSize = "REPO_MAX_FILE_SIZE" | |||||
| EnvRepoMaxSize = "REPO_MAX_SIZE" | |||||
| EnvPushSizeCheckFlag = "PUSH_SIZE_CHECK_FLAG" | |||||
| ) | ) | ||||
| // InternalPushingEnvironment returns an os environment to switch off hooks on push | // InternalPushingEnvironment returns an os environment to switch off hooks on push | ||||
| @@ -82,17 +82,27 @@ type UserBusinessAnalysisAll struct { | |||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| //cloudbraintask | //cloudbraintask | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysis struct { | type UserBusinessAnalysis struct { | ||||
| @@ -159,17 +169,27 @@ type UserBusinessAnalysis struct { | |||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysisQueryOptions struct { | type UserBusinessAnalysisQueryOptions struct { | ||||
| @@ -410,8 +430,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| log.Info("truncate all data from table: " + tableName) | log.Info("truncate all data from table: " + tableName) | ||||
| statictisSess.Exec("TRUNCATE TABLE " + tableName) | statictisSess.Exec("TRUNCATE TABLE " + tableName) | ||||
| log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) | |||||
| log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) | |||||
| StartTimeNextDay := pageStartTime.AddDate(0, 0, 1) | |||||
| EndTimeNextDay := pageEndTime.AddDate(0, 0, 1) | |||||
| log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05")) | |||||
| log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05")) | |||||
| start_unix := pageStartTime.Unix() | start_unix := pageStartTime.Unix() | ||||
| end_unix := pageEndTime.Unix() | end_unix := pageEndTime.Unix() | ||||
| @@ -426,8 +448,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| CommentCountMap := queryComment(start_unix, end_unix) | CommentCountMap := queryComment(start_unix, end_unix) | ||||
| FocusRepoCountMap := queryWatch(start_unix, end_unix) | FocusRepoCountMap := queryWatch(start_unix, end_unix) | ||||
| StarRepoCountMap := queryStar(start_unix, end_unix) | StarRepoCountMap := queryStar(start_unix, end_unix) | ||||
| WatchedCountMap := queryFollow(start_unix, end_unix) | |||||
| CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix) | |||||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | |||||
| CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) | |||||
| CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) | CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) | ||||
| SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | ||||
| CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) | CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) | ||||
| @@ -436,6 +458,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix) | OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix) | ||||
| CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | ||||
| AiModelManageMap := queryUserModel(start_unix, end_unix) | AiModelManageMap := queryUserModel(start_unix, end_unix) | ||||
| CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||||
| RecommendDataset := queryRecommedDataSet(start_unix, end_unix) | |||||
| CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) | |||||
| RecommendImage := queryRecommedImage(start_unix, end_unix) | |||||
| DataDate := currentTimeNow.Format("2006-01-02") + " 00:01" | DataDate := currentTimeNow.Format("2006-01-02") + " 00:01" | ||||
| cond := "type != 1 and is_active=true" | cond := "type != 1 and is_active=true" | ||||
| @@ -472,6 +500,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) | dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) | ||||
| dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) | dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) | ||||
| dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap) | dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap) | ||||
| dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) | |||||
| dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) | dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) | ||||
| dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) | dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) | ||||
| dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) | dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) | ||||
| @@ -496,13 +525,20 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | ||||
| dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | ||||
| dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) | dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) | ||||
| dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) | |||||
| userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex | |||||
| if maxUserIndex < dateRecordAll.UserIndex { | |||||
| maxUserIndex = dateRecordAll.UserIndex | |||||
| dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) | |||||
| dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) | |||||
| dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset) | |||||
| dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage) | |||||
| dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage) | |||||
| dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage) | |||||
| dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) | |||||
| userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive | |||||
| if maxUserIndex < dateRecordAll.UserIndexPrimitive { | |||||
| maxUserIndex = dateRecordAll.UserIndexPrimitive | |||||
| } | } | ||||
| if minUserIndex > dateRecordAll.UserIndex { | |||||
| minUserIndex = dateRecordAll.UserIndex | |||||
| if minUserIndex > dateRecordAll.UserIndexPrimitive { | |||||
| minUserIndex = dateRecordAll.UserIndexPrimitive | |||||
| } | } | ||||
| dateRecordBatch = append(dateRecordBatch, dateRecordAll) | dateRecordBatch = append(dateRecordBatch, dateRecordAll) | ||||
| if len(dateRecordBatch) >= BATCH_INSERT_SIZE { | if len(dateRecordBatch) >= BATCH_INSERT_SIZE { | ||||
| @@ -552,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static | |||||
| insertBatchSql := "INSERT INTO public." + tableName + | insertBatchSql := "INSERT INTO public." + tableName + | ||||
| "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + | "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + | ||||
| "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " + | |||||
| "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " + | |||||
| "VALUES" | "VALUES" | ||||
| for i, record := range dateRecords { | for i, record := range dateRecords { | ||||
| @@ -560,7 +596,8 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static | |||||
| ", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) + | ", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) + | ||||
| ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + | ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + | ||||
| ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + | ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + | ||||
| ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')" | |||||
| ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," + | |||||
| fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")" | |||||
| if i < (len(dateRecords) - 1) { | if i < (len(dateRecords) - 1) { | ||||
| insertBatchSql += "," | insertBatchSql += "," | ||||
| } | } | ||||
| @@ -628,7 +665,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||||
| CommentCountMap := queryComment(start_unix, end_unix) | CommentCountMap := queryComment(start_unix, end_unix) | ||||
| FocusRepoCountMap := queryWatch(start_unix, end_unix) | FocusRepoCountMap := queryWatch(start_unix, end_unix) | ||||
| StarRepoCountMap := queryStar(start_unix, end_unix) | StarRepoCountMap := queryStar(start_unix, end_unix) | ||||
| WatchedCountMap := queryFollow(start_unix, end_unix) | |||||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | |||||
| CommitCodeSizeMap, err := GetAllUserKPIStats() | CommitCodeSizeMap, err := GetAllUserKPIStats() | ||||
| if err != nil { | if err != nil { | ||||
| @@ -643,6 +680,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||||
| OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) | OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) | ||||
| CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | ||||
| AiModelManageMap := queryUserModel(start_unix, end_unix) | AiModelManageMap := queryUserModel(start_unix, end_unix) | ||||
| CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||||
| RecommendDataset := queryRecommedDataSet(start_unix, end_unix) | |||||
| CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) | |||||
| RecommendImage := queryRecommedImage(start_unix, end_unix) | |||||
| statictisSess := xStatistic.NewSession() | statictisSess := xStatistic.NewSession() | ||||
| defer statictisSess.Close() | defer statictisSess.Close() | ||||
| @@ -683,13 +726,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||||
| dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) | dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) | ||||
| dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) | dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) | ||||
| dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) | dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) | ||||
| dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap) | |||||
| if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { | if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { | ||||
| dateRecord.CommitCodeSize = 0 | dateRecord.CommitCodeSize = 0 | ||||
| } else { | } else { | ||||
| dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) | dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) | ||||
| } | } | ||||
| dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) | dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) | ||||
| dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) | dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) | ||||
| dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) | dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) | ||||
| @@ -715,7 +757,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||||
| dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | ||||
| dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | ||||
| dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) | dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) | ||||
| dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight) | |||||
| dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset) | |||||
| dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset) | |||||
| dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset) | |||||
| dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage) | |||||
| dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) | |||||
| dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) | |||||
| dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) | |||||
| setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) | setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) | ||||
| _, err = statictisSess.Insert(&dateRecord) | _, err = statictisSess.Insert(&dateRecord) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -765,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en | |||||
| userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 | userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 | ||||
| } | } | ||||
| if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 { | |||||
| if getUserActivate(dateRecord) > 0 { | |||||
| userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1 | userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1 | ||||
| } | } | ||||
| @@ -802,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight | |||||
| result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | ||||
| result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||||
| codeLine := float64(dateRecord.CommitCodeSize) | |||||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) | |||||
| if codeLine >= limitCodeLine { | |||||
| codeLine = limitCodeLine | |||||
| } | |||||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) | |||||
| result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | ||||
| result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | ||||
| @@ -810,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight | |||||
| result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) | result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) | ||||
| result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) | result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) | |||||
| result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) | |||||
| return result | |||||
| } | |||||
| func getUserActivate(dateRecord UserBusinessAnalysis) int { | |||||
| var result int | |||||
| result += dateRecord.CodeMergeCount | |||||
| result += dateRecord.CommitCount | |||||
| result += dateRecord.IssueCount | |||||
| result += dateRecord.CommentCount | |||||
| result += dateRecord.FocusRepoCount | |||||
| result += dateRecord.StarRepoCount | |||||
| result += dateRecord.SolveIssueCount | |||||
| result += dateRecord.EncyclopediasCount | |||||
| result += dateRecord.CreateRepoCount | |||||
| result += dateRecord.CloudBrainTaskNum | |||||
| result += dateRecord.CommitModelCount | |||||
| result += dateRecord.CommitDatasetNum | |||||
| result += dateRecord.FocusOtherUser | |||||
| result += dateRecord.CollectDataset | |||||
| result += dateRecord.CollectImage | |||||
| result += dateRecord.CommitCodeSize | |||||
| return result | return result | ||||
| } | } | ||||
| @@ -831,7 +914,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||||
| result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | ||||
| result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||||
| codeLine := float64(dateRecord.CommitCodeSize) | |||||
| limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) | |||||
| if codeLine >= limitCodeLine { | |||||
| codeLine = limitCodeLine | |||||
| } | |||||
| result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) | |||||
| result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | ||||
| result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | ||||
| @@ -839,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||||
| result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) | result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) | ||||
| result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) | result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) | ||||
| result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) | |||||
| result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) | |||||
| result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) | |||||
| return result | return result | ||||
| } | } | ||||
| @@ -1129,17 +1224,18 @@ func queryStar(start_unix int64, end_unix int64) map[int64]int { | |||||
| return resultMap | return resultMap | ||||
| } | } | ||||
| func queryFollow(start_unix int64, end_unix int64) map[int64]int { | |||||
| func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||||
| sess := x.NewSession() | sess := x.NewSession() | ||||
| defer sess.Close() | defer sess.Close() | ||||
| resultMap := make(map[int64]int) | resultMap := make(map[int64]int) | ||||
| resultFocusedByOtherMap := make(map[int64]int) | |||||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | ||||
| count, err := sess.Where(cond).Count(new(Follow)) | count, err := sess.Where(cond).Count(new(Follow)) | ||||
| if err != nil { | if err != nil { | ||||
| log.Info("query follow error. return.") | log.Info("query follow error. return.") | ||||
| return resultMap | |||||
| return resultMap, resultFocusedByOtherMap | |||||
| } | } | ||||
| var indexTotal int64 | var indexTotal int64 | ||||
| indexTotal = 0 | indexTotal = 0 | ||||
| @@ -1155,6 +1251,11 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { | |||||
| } else { | } else { | ||||
| resultMap[followRecord.FollowID] += 1 | resultMap[followRecord.FollowID] += 1 | ||||
| } | } | ||||
| if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok { | |||||
| resultFocusedByOtherMap[followRecord.UserID] = 1 | |||||
| } else { | |||||
| resultFocusedByOtherMap[followRecord.UserID] += 1 | |||||
| } | |||||
| } | } | ||||
| indexTotal += PAGE_SIZE | indexTotal += PAGE_SIZE | ||||
| @@ -1163,7 +1264,215 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { | |||||
| } | } | ||||
| } | } | ||||
| return resultMap | |||||
| return resultMap, resultFocusedByOtherMap | |||||
| } | |||||
| func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| userIdDdatasetMap := make(map[int64]int) | |||||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true" | |||||
| count, err := sess.Where(cond).Count(new(Dataset)) | |||||
| if err != nil { | |||||
| log.Info("query recommend dataset error. return.") | |||||
| return userIdDdatasetMap | |||||
| } | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| datasetList := make([]*Dataset, 0) | |||||
| sess.Find(&datasetList) | |||||
| log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) | |||||
| for _, datasetRecord := range datasetList { | |||||
| if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { | |||||
| userIdDdatasetMap[datasetRecord.UserID] = 1 | |||||
| } else { | |||||
| userIdDdatasetMap[datasetRecord.UserID] += 1 | |||||
| } | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| return userIdDdatasetMap | |||||
| } | |||||
| func queryAllDataSet() (map[int64]int64, map[int64]int64) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| datasetUserIdMap := make(map[int64]int64) | |||||
| userIdDdatasetMap := make(map[int64]int64) | |||||
| count, err := sess.Count(new(Dataset)) | |||||
| if err != nil { | |||||
| log.Info("query dataset error. return.") | |||||
| return datasetUserIdMap, userIdDdatasetMap | |||||
| } | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| datasetList := make([]*Dataset, 0) | |||||
| sess.Find(&datasetList) | |||||
| log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) | |||||
| for _, datasetRecord := range datasetList { | |||||
| datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID | |||||
| if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { | |||||
| userIdDdatasetMap[datasetRecord.UserID] = 1 | |||||
| } else { | |||||
| userIdDdatasetMap[datasetRecord.UserID] += 1 | |||||
| } | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| return datasetUserIdMap, userIdDdatasetMap | |||||
| } | |||||
| func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| userIdImageMap := make(map[int64]int) | |||||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5" | |||||
| count, err := sess.Where(cond).Count(new(Image)) | |||||
| if err != nil { | |||||
| log.Info("query recommend image error. return.") | |||||
| return userIdImageMap | |||||
| } | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| imageList := make([]*Image, 0) | |||||
| sess.Find(&imageList) | |||||
| log.Info("query imageList size=" + fmt.Sprint(len(imageList))) | |||||
| for _, imageRecord := range imageList { | |||||
| if _, ok := userIdImageMap[imageRecord.UID]; !ok { | |||||
| userIdImageMap[imageRecord.UID] = 1 | |||||
| } else { | |||||
| userIdImageMap[imageRecord.UID] += 1 | |||||
| } | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| return userIdImageMap | |||||
| } | |||||
| func queryAllImage() (map[int64]int64, map[int64]int64) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| imageUserIdMap := make(map[int64]int64) | |||||
| userIdDImageMap := make(map[int64]int64) | |||||
| count, err := sess.Count(new(Image)) | |||||
| if err != nil { | |||||
| log.Info("query image error. return.") | |||||
| return imageUserIdMap, userIdDImageMap | |||||
| } | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| imageList := make([]*Image, 0) | |||||
| sess.Find(&imageList) | |||||
| log.Info("query imageList size=" + fmt.Sprint(len(imageList))) | |||||
| for _, imageRecord := range imageList { | |||||
| imageUserIdMap[imageRecord.ID] = imageRecord.UID | |||||
| if _, ok := userIdDImageMap[imageRecord.UID]; !ok { | |||||
| userIdDImageMap[imageRecord.UID] = 1 | |||||
| } else { | |||||
| userIdDImageMap[imageRecord.UID] += 1 | |||||
| } | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| return imageUserIdMap, userIdDImageMap | |||||
| } | |||||
| func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| datasetCollect := make(map[int64]int) | |||||
| datasetCollected := make(map[int64]int) | |||||
| datasetUserIdMap, _ := queryAllDataSet() | |||||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||||
| count, err := sess.Where(cond).Count(new(DatasetStar)) | |||||
| if err != nil { | |||||
| log.Info("query follow error. return.") | |||||
| return datasetCollect, datasetCollected | |||||
| } | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| datasetStarList := make([]*DatasetStar, 0) | |||||
| sess.Find(&datasetStarList) | |||||
| log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList))) | |||||
| for _, datasetStarRecord := range datasetStarList { | |||||
| if _, ok := datasetCollect[datasetStarRecord.UID]; !ok { | |||||
| datasetCollect[datasetStarRecord.UID] = 1 | |||||
| } else { | |||||
| datasetCollect[datasetStarRecord.UID] += 1 | |||||
| } | |||||
| if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok { | |||||
| datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1 | |||||
| } else { | |||||
| datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1 | |||||
| } | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| return datasetCollect, datasetCollected | |||||
| } | |||||
| func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| imageCollect := make(map[int64]int) | |||||
| imageCollected := make(map[int64]int) | |||||
| imageUserIdMap, _ := queryAllDataSet() | |||||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||||
| count, err := sess.Where(cond).Count(new(ImageStar)) | |||||
| if err != nil { | |||||
| log.Info("query follow error. return.") | |||||
| return imageCollect, imageCollected | |||||
| } | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| imageStarList := make([]*ImageStar, 0) | |||||
| sess.Find(&imageStarList) | |||||
| log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) | |||||
| for _, imageStarRecord := range imageStarList { | |||||
| if _, ok := imageCollect[imageStarRecord.UID]; !ok { | |||||
| imageCollect[imageStarRecord.UID] = 1 | |||||
| } else { | |||||
| imageCollect[imageStarRecord.UID] += 1 | |||||
| } | |||||
| if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok { | |||||
| imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1 | |||||
| } else { | |||||
| imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1 | |||||
| } | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| return imageCollect, imageCollected | |||||
| } | } | ||||
| func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { | ||||
| @@ -45,17 +45,26 @@ type UserBusinessAnalysisCurrentYear struct { | |||||
| Name string `xorm:"NOT NULL"` | Name string `xorm:"NOT NULL"` | ||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysisLast30Day struct { | type UserBusinessAnalysisLast30Day struct { | ||||
| @@ -101,17 +110,26 @@ type UserBusinessAnalysisLast30Day struct { | |||||
| Name string `xorm:"NOT NULL"` | Name string `xorm:"NOT NULL"` | ||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysisLastMonth struct { | type UserBusinessAnalysisLastMonth struct { | ||||
| @@ -157,17 +175,26 @@ type UserBusinessAnalysisLastMonth struct { | |||||
| Name string `xorm:"NOT NULL"` | Name string `xorm:"NOT NULL"` | ||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysisCurrentMonth struct { | type UserBusinessAnalysisCurrentMonth struct { | ||||
| @@ -213,17 +240,26 @@ type UserBusinessAnalysisCurrentMonth struct { | |||||
| Name string `xorm:"NOT NULL"` | Name string `xorm:"NOT NULL"` | ||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysisCurrentWeek struct { | type UserBusinessAnalysisCurrentWeek struct { | ||||
| @@ -269,17 +305,27 @@ type UserBusinessAnalysisCurrentWeek struct { | |||||
| Name string `xorm:"NOT NULL"` | Name string `xorm:"NOT NULL"` | ||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserBusinessAnalysisYesterday struct { | type UserBusinessAnalysisYesterday struct { | ||||
| @@ -325,17 +371,27 @@ type UserBusinessAnalysisYesterday struct { | |||||
| Name string `xorm:"NOT NULL"` | Name string `xorm:"NOT NULL"` | ||||
| DataDate string `xorm:"NULL"` | DataDate string `xorm:"NULL"` | ||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | } | ||||
| type UserAnalysisPara struct { | type UserAnalysisPara struct { | ||||
| @@ -33,6 +33,16 @@ type CommitImageCloudBrainForm struct { | |||||
| Topics string `form:"topics"` | Topics string `form:"topics"` | ||||
| } | } | ||||
| type CommitAdminImageCloudBrainForm struct { | |||||
| Description string `form:"description" binding:"Required"` | |||||
| Type int `form:"type" binding:"Required"` | |||||
| Tag string `form:"tag" binding:"Required;MaxSize(100)" ` | |||||
| IsPrivate bool `form:"isPrivate" binding:"Required"` | |||||
| Topics string `form:"topics"` | |||||
| Place string `form:"place" binding:"Required"` | |||||
| IsRecommend bool `form:"isRecommend" binding:"Required"` | |||||
| } | |||||
| type EditImageCloudBrainForm struct { | type EditImageCloudBrainForm struct { | ||||
| ID int64 `form:"id" binding:"Required"` | ID int64 `form:"id" binding:"Required"` | ||||
| Description string `form:"description" binding:"Required"` | Description string `form:"description" binding:"Required"` | ||||
| @@ -312,12 +312,51 @@ sendjob: | |||||
| return nil | return nil | ||||
| }) | }) | ||||
| if err == nil { | if err == nil { | ||||
| go updateImageStatus(image, isSetCreatedUnix, createTime) | go updateImageStatus(image, isSetCreatedUnix, createTime) | ||||
| } | } | ||||
| return err | return err | ||||
| } | } | ||||
| func CommitAdminImage(params models.CommitImageParams) error { | |||||
| exist, err := models.IsImageExist(params.ImageTag) | |||||
| if err != nil { | |||||
| return fmt.Errorf("resty CommitImage: %v", err) | |||||
| } | |||||
| if exist { | |||||
| return models.ErrorImageTagExist{ | |||||
| Tag: params.ImageTag, | |||||
| } | |||||
| } | |||||
| image := models.Image{ | |||||
| CloudbrainType: params.CloudBrainType, | |||||
| UID: params.UID, | |||||
| IsPrivate: params.IsPrivate, | |||||
| Tag: params.ImageTag, | |||||
| Description: params.ImageDescription, | |||||
| Place: params.Place, | |||||
| Status: models.IMAGE_STATUS_SUCCESS, | |||||
| Type: params.Type, | |||||
| } | |||||
| err = models.WithTx(func(ctx models.DBContext) error { | |||||
| if err := models.CreateLocalImage(&image); err != nil { | |||||
| log.Error("Failed to insert image record.", err) | |||||
| return fmt.Errorf("resty CommitImage: %v", err) | |||||
| } | |||||
| if err := models.SaveImageTopics(image.ID, params.Topics...); err != nil { | |||||
| log.Error("Failed to insert image record.", err) | |||||
| return fmt.Errorf("resty CommitImage: %v", err) | |||||
| } | |||||
| return nil | |||||
| }) | |||||
| return err | |||||
| } | |||||
| func updateImageStatus(image models.Image, isSetCreatedUnix bool, createTime time.Time) { | func updateImageStatus(image models.Image, isSetCreatedUnix bool, createTime time.Time) { | ||||
| attemps := 5 | attemps := 5 | ||||
| commitSuccess := false | commitSuccess := false | ||||
| @@ -19,7 +19,11 @@ import ( | |||||
| "xorm.io/builder" | "xorm.io/builder" | ||||
| ) | ) | ||||
| func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { | |||||
| const ( | |||||
| SIZE_LIMIT_SCRIPT_NAME = "size_limit" | |||||
| ) | |||||
| func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []string) { | |||||
| hookNames = []string{"pre-receive", "update", "post-receive"} | hookNames = []string{"pre-receive", "update", "post-receive"} | ||||
| hookTpls = []string{ | hookTpls = []string{ | ||||
| fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), | fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), | ||||
| @@ -31,6 +35,11 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { | |||||
| fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf), | fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf), | ||||
| fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), | fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), | ||||
| } | } | ||||
| sizeLimitTpls = []string{ | |||||
| fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \"<oldref> <newref> <refname>\\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), | |||||
| fmt.Sprintf(""), | |||||
| fmt.Sprintf(""), | |||||
| } | |||||
| return | return | ||||
| } | } | ||||
| @@ -41,7 +50,7 @@ func CreateDelegateHooks(repoPath string) error { | |||||
| // createDelegateHooks creates all the hooks scripts for the repo | // createDelegateHooks creates all the hooks scripts for the repo | ||||
| func createDelegateHooks(repoPath string) (err error) { | func createDelegateHooks(repoPath string) (err error) { | ||||
| hookNames, hookTpls, giteaHookTpls := getHookTemplates() | |||||
| hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() | |||||
| hookDir := filepath.Join(repoPath, "hooks") | hookDir := filepath.Join(repoPath, "hooks") | ||||
| for i, hookName := range hookNames { | for i, hookName := range hookNames { | ||||
| @@ -74,8 +83,26 @@ func createDelegateHooks(repoPath string) (err error) { | |||||
| if err = ensureExecutable(newHookPath); err != nil { | if err = ensureExecutable(newHookPath); err != nil { | ||||
| return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err) | return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err) | ||||
| } | } | ||||
| if err = writeHookTpl(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i]); err != nil { | |||||
| return err | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| func writeHookTpl(hookPath, content string) error { | |||||
| if content == "" { | |||||
| return nil | |||||
| } | |||||
| if err := ioutil.WriteFile(hookPath, []byte(content), 0777); err != nil { | |||||
| return fmt.Errorf("write new hook file '%s': %v", hookPath, err) | |||||
| } | } | ||||
| if err := ensureExecutable(hookPath); err != nil { | |||||
| return fmt.Errorf("Unable to set %s executable. Error %v", hookPath, err) | |||||
| } | |||||
| return nil | return nil | ||||
| } | } | ||||
| @@ -101,7 +128,7 @@ func ensureExecutable(filename string) error { | |||||
| // CheckDelegateHooks checks the hooks scripts for the repo | // CheckDelegateHooks checks the hooks scripts for the repo | ||||
| func CheckDelegateHooks(repoPath string) ([]string, error) { | func CheckDelegateHooks(repoPath string) ([]string, error) { | ||||
| hookNames, hookTpls, giteaHookTpls := getHookTemplates() | |||||
| hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() | |||||
| hookDir := filepath.Join(repoPath, "hooks") | hookDir := filepath.Join(repoPath, "hooks") | ||||
| results := make([]string, 0, 10) | results := make([]string, 0, 10) | ||||
| @@ -146,10 +173,34 @@ func CheckDelegateHooks(repoPath string) ([]string, error) { | |||||
| if !checkExecutable(newHookPath) { | if !checkExecutable(newHookPath) { | ||||
| results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath)) | results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath)) | ||||
| } | } | ||||
| if err = checkHookFile(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i], results); err != nil { | |||||
| return results, err | |||||
| } | |||||
| } | } | ||||
| return results, nil | return results, nil | ||||
| } | } | ||||
| func generateHookScriptPath(hookDir, hookName, fileName string) string { | |||||
| return filepath.Join(hookDir, hookName+".d", fileName) | |||||
| } | |||||
| func checkHookFile(filePath, tpl string, results []string) error { | |||||
| if tpl == "" { | |||||
| return nil | |||||
| } | |||||
| contents, err := ioutil.ReadFile(filePath) | |||||
| if err != nil { | |||||
| return err | |||||
| } | |||||
| if string(contents) != tpl { | |||||
| results = append(results, fmt.Sprintf("old hook file %s is out of date", filePath)) | |||||
| } | |||||
| if !checkExecutable(filePath) { | |||||
| results = append(results, fmt.Sprintf("old hook file %s is not executable", filePath)) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| // SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks | // SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks | ||||
| // to make sure the binary and custom conf path are up-to-date. | // to make sure the binary and custom conf path are up-to-date. | ||||
| func SyncRepositoryHooks(ctx context.Context) error { | func SyncRepositoryHooks(ctx context.Context) error { | ||||
| @@ -56,6 +56,7 @@ var ( | |||||
| FileMaxSize int64 | FileMaxSize int64 | ||||
| MaxFiles int | MaxFiles int | ||||
| TotalMaxSize int64 | TotalMaxSize int64 | ||||
| ShellFlag int | |||||
| } `ini:"-"` | } `ini:"-"` | ||||
| // Repository local settings | // Repository local settings | ||||
| @@ -125,6 +126,7 @@ var ( | |||||
| FileMaxSize int64 | FileMaxSize int64 | ||||
| MaxFiles int | MaxFiles int | ||||
| TotalMaxSize int64 | TotalMaxSize int64 | ||||
| ShellFlag int | |||||
| }{ | }{ | ||||
| Enabled: true, | Enabled: true, | ||||
| TempPath: "data/tmp/uploads", | TempPath: "data/tmp/uploads", | ||||
| @@ -132,6 +134,7 @@ var ( | |||||
| FileMaxSize: 30, | FileMaxSize: 30, | ||||
| MaxFiles: 10, | MaxFiles: 10, | ||||
| TotalMaxSize: 1024, | TotalMaxSize: 1024, | ||||
| ShellFlag: 0, | |||||
| }, | }, | ||||
| // Repository local settings | // Repository local settings | ||||
| @@ -69,8 +69,17 @@ func sessionHandler(session ssh.Session) { | |||||
| os.Environ(), | os.Environ(), | ||||
| "SSH_ORIGINAL_COMMAND="+command, | "SSH_ORIGINAL_COMMAND="+command, | ||||
| "SKIP_MINWINSVC=1", | "SKIP_MINWINSVC=1", | ||||
| models.EnvRepoMaxFileSize+"="+fmt.Sprint(setting.Repository.Upload.FileMaxSize), | |||||
| models.EnvRepoMaxSize+"="+fmt.Sprint(setting.Repository.RepoMaxSize), | |||||
| models.EnvPushSizeCheckFlag+"="+fmt.Sprint(setting.Repository.Upload.ShellFlag), | |||||
| ) | ) | ||||
| if strings.HasPrefix(command, "git-receive-pack") { | |||||
| repo := getRepoFromCommandStr(command) | |||||
| if repo != nil { | |||||
| cmd.Env = append(cmd.Env, models.EnvRepoSize+"="+fmt.Sprint(repo.Size)) | |||||
| } | |||||
| } | |||||
| stdout, err := cmd.StdoutPipe() | stdout, err := cmd.StdoutPipe() | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("SSH: StdoutPipe: %v", err) | log.Error("SSH: StdoutPipe: %v", err) | ||||
| @@ -131,6 +140,23 @@ func sessionHandler(session ssh.Session) { | |||||
| } | } | ||||
| } | } | ||||
| func getRepoFromCommandStr(command string) *models.Repository { | |||||
| repoPath := strings.TrimPrefix(command, "git-receive-pack '") | |||||
| repoPath = strings.TrimSuffix(repoPath, ".git'") | |||||
| if repoPath != "" { | |||||
| nameArray := strings.Split(repoPath, "/") | |||||
| if len(nameArray) >= 2 { | |||||
| ownerName := nameArray[0] | |||||
| repoName := nameArray[1] | |||||
| if repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName); err == nil { | |||||
| return repo | |||||
| } | |||||
| } | |||||
| } | |||||
| return nil | |||||
| } | |||||
| func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool { | func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool { | ||||
| if ctx.User() != setting.SSH.BuiltinServerUser { | if ctx.User() != setting.SSH.BuiltinServerUser { | ||||
| return false | return false | ||||
| @@ -2,6 +2,7 @@ package storage | |||||
| import ( | import ( | ||||
| "encoding/xml" | "encoding/xml" | ||||
| "errors" | |||||
| "path" | "path" | ||||
| "sort" | "sort" | ||||
| "strconv" | "strconv" | ||||
| @@ -129,7 +130,7 @@ func NewMultiPartUpload(uuid string) (string, error) { | |||||
| return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) | return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) | ||||
| } | } | ||||
| func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||||
| func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) { | |||||
| client, core, err := getClients() | client, core, err := getClients() | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("getClients failed:", err.Error()) | log.Error("getClients failed:", err.Error()) | ||||
| @@ -146,6 +147,11 @@ func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||||
| return "", err | return "", err | ||||
| } | } | ||||
| if len(partInfos) != totalChunks { | |||||
| log.Error("ListObjectParts number(%d) is not equal the set total chunk number(%d)", len(partInfos), totalChunks) | |||||
| return "", errors.New("the parts is not complete") | |||||
| } | |||||
| var complMultipartUpload completeMultipartUpload | var complMultipartUpload completeMultipartUpload | ||||
| for _, partInfo := range partInfos { | for _, partInfo := range partInfos { | ||||
| complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{ | complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{ | ||||
| @@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) { | |||||
| return hasObject, nil | return hasObject, nil | ||||
| } | } | ||||
| func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) { | |||||
| output = &obs.ListPartsOutput{} | |||||
| partNumberMarker := 0 | |||||
| for { | |||||
| temp, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||||
| Bucket: setting.Bucket, | |||||
| Key: key, | |||||
| UploadId: uploadID, | |||||
| MaxParts: MAX_LIST_PARTS, | |||||
| PartNumberMarker: partNumberMarker, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("ListParts failed:", err.Error()) | |||||
| return output, err | |||||
| } | |||||
| partNumberMarker = temp.NextPartNumberMarker | |||||
| log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts)) | |||||
| for _, partInfo := range temp.Parts { | |||||
| output.Parts = append(output.Parts, obs.Part{ | |||||
| PartNumber: partInfo.PartNumber, | |||||
| ETag: partInfo.ETag, | |||||
| }) | |||||
| } | |||||
| if !temp.IsTruncated { | |||||
| break | |||||
| } else { | |||||
| continue | |||||
| } | |||||
| break | |||||
| } | |||||
| return output, nil | |||||
| } | |||||
| func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { | func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { | ||||
| key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | ||||
| output, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||||
| Bucket: setting.Bucket, | |||||
| Key: key, | |||||
| UploadId: uploadID, | |||||
| }) | |||||
| allParts, err := listAllParts(uuid, uploadID, key) | |||||
| if err != nil { | if err != nil { | ||||
| log.Error("ListParts failed:", err.Error()) | |||||
| log.Error("listAllParts failed: %v", err) | |||||
| return "", err | return "", err | ||||
| } | } | ||||
| var chunks string | var chunks string | ||||
| for _, partInfo := range output.Parts { | |||||
| for _, partInfo := range allParts.Parts { | |||||
| chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," | chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," | ||||
| } | } | ||||
| @@ -94,45 +128,25 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) { | |||||
| return output.UploadId, nil | return output.UploadId, nil | ||||
| } | } | ||||
| func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { | |||||
| func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error { | |||||
| input := &obs.CompleteMultipartUploadInput{} | input := &obs.CompleteMultipartUploadInput{} | ||||
| input.Bucket = setting.Bucket | input.Bucket = setting.Bucket | ||||
| input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | ||||
| input.UploadId = uploadID | input.UploadId = uploadID | ||||
| partNumberMarker := 0 | |||||
| for { | |||||
| output, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||||
| Bucket: setting.Bucket, | |||||
| Key: input.Key, | |||||
| UploadId: uploadID, | |||||
| MaxParts: MAX_LIST_PARTS, | |||||
| PartNumberMarker: partNumberMarker, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("ListParts failed:", err.Error()) | |||||
| return err | |||||
| } | |||||
| partNumberMarker = output.NextPartNumberMarker | |||||
| log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts)) | |||||
| for _, partInfo := range output.Parts { | |||||
| input.Parts = append(input.Parts, obs.Part{ | |||||
| PartNumber: partInfo.PartNumber, | |||||
| ETag: partInfo.ETag, | |||||
| }) | |||||
| } | |||||
| if len(output.Parts) < output.MaxParts { | |||||
| break | |||||
| } else { | |||||
| continue | |||||
| } | |||||
| allParts, err := listAllParts(uuid, uploadID, input.Key) | |||||
| if err != nil { | |||||
| log.Error("listAllParts failed: %v", err) | |||||
| return err | |||||
| } | |||||
| break | |||||
| if len(allParts.Parts) != totalChunks { | |||||
| log.Error("listAllParts number(%d) is not equal the set total chunk number(%d)", len(allParts.Parts), totalChunks) | |||||
| return errors.New("the parts is not complete") | |||||
| } | } | ||||
| input.Parts = allParts.Parts | |||||
| output, err := ObsCli.CompleteMultipartUpload(input) | output, err := ObsCli.CompleteMultipartUpload(input) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("CompleteMultipartUpload failed:", err.Error()) | log.Error("CompleteMultipartUpload failed:", err.Error()) | ||||
| @@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count | |||||
| static.CloudBrainRunTime=CloudBrain Run Time | static.CloudBrainRunTime=CloudBrain Run Time | ||||
| static.CommitDatasetNum=Commit Dataset Count | static.CommitDatasetNum=Commit Dataset Count | ||||
| static.CommitModelCount=Commit Model Count | static.CommitModelCount=Commit Model Count | ||||
| static.UserIndex=User Index | |||||
| static.UserIndex=Normalized user index | |||||
| static.UserIndexPrimitive=User Index | |||||
| static.countdate=Count Date | static.countdate=Count Date | ||||
| static.FocusOtherUser=Focus Other User Count | |||||
| static.CollectDataset=Collect Dataset Count | |||||
| static.CollectedDataset=Collected Dataset Count | |||||
| static.RecommendDataset=Recommended Dataset Count | |||||
| static.CollectImage=Collect Image Count | |||||
| static.CollectedImage=Collected Image Count | |||||
| static.RecommendImage=Recommended Image Count | |||||
| static.all=All | static.all=All | ||||
| static.public.user_business_analysis_current_month=Current_Month | static.public.user_business_analysis_current_month=Current_Month | ||||
| static.public.user_business_analysis_current_week=Current_Week | static.public.user_business_analysis_current_week=Current_Week | ||||
| @@ -1397,6 +1405,7 @@ issues.filter_sort.feweststars = Fewest stars | |||||
| issues.filter_sort.mostforks = Most forks | issues.filter_sort.mostforks = Most forks | ||||
| issues.filter_sort.fewestforks = Fewest forks | issues.filter_sort.fewestforks = Fewest forks | ||||
| issues.filter_sort.downloadtimes = Most downloaded | issues.filter_sort.downloadtimes = Most downloaded | ||||
| issues.filter_sort.moststars = Most star | |||||
| issues.action_open = Open | issues.action_open = Open | ||||
| issues.action_close = Close | issues.action_close = Close | ||||
| issues.action_label = Label | issues.action_label = Label | ||||
| @@ -2510,11 +2519,16 @@ repos.contributor=Contributor | |||||
| repos.yes=Yes | repos.yes=Yes | ||||
| repos.no=No | repos.no=No | ||||
| images.recommend = Recommend | |||||
| images.unrecommend = Unrecommend | |||||
| datasets.dataset_manage_panel= Dataset Manage | datasets.dataset_manage_panel= Dataset Manage | ||||
| datasets.owner=Owner | datasets.owner=Owner | ||||
| datasets.name=name | datasets.name=name | ||||
| datasets.private=Private | datasets.private=Private | ||||
| datasets.recommend=Set recommend | |||||
| datasets.unrecommend=Set unrecommend | |||||
| datasets.only_recommend = Only show platform recommendations | |||||
| cloudbrain.all_task_types=All Task Types | cloudbrain.all_task_types=All Task Types | ||||
| cloudbrain.all_computing_resources=All Computing Resources | cloudbrain.all_computing_resources=All Computing Resources | ||||
| @@ -2862,7 +2876,7 @@ mirror_sync_create = synced new reference <a href="%s/src/%s">%[2]s</a> to <a hr | |||||
| mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror | mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror | ||||
| approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>` | approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
| reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>` | reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
| upload_dataset=`upload dataset <a href="%s/datasets?type=%s">%s</a>` | |||||
| upload_dataset=`upload dataset <a href="%s/datasets">%s</a>` | |||||
| task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>` | task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>` | ||||
| task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>` | task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>` | ||||
| task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>` | task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>` | ||||
| @@ -2972,6 +2986,7 @@ snn4imagenet_path = Snn4imagenet script path | |||||
| brainscore_path = Brainscore script path | brainscore_path = Brainscore script path | ||||
| start_command = Start command | start_command = Start command | ||||
| choose_mirror = select mirror or enter mirror path | choose_mirror = select mirror or enter mirror path | ||||
| input_mirror = Please enter image path | |||||
| select_dataset = select dataset | select_dataset = select dataset | ||||
| specification = specification | specification = specification | ||||
| select_specification = select specification | select_specification = select specification | ||||
| @@ -512,8 +512,16 @@ static.CloudBrainTaskNum=云脑任务数 | |||||
| static.CloudBrainRunTime=云脑运行时间(小时) | static.CloudBrainRunTime=云脑运行时间(小时) | ||||
| static.CommitDatasetNum=上传(提交)数据集文件数 | static.CommitDatasetNum=上传(提交)数据集文件数 | ||||
| static.CommitModelCount=提交模型数 | static.CommitModelCount=提交模型数 | ||||
| static.UserIndex=用户指数 | |||||
| static.UserIndex=归一化用户指数 | |||||
| static.UserIndexPrimitive=用户指数 | |||||
| static.countdate=系统统计时间 | static.countdate=系统统计时间 | ||||
| static.FocusOtherUser=关注他人数 | |||||
| static.CollectDataset=收藏数据集 | |||||
| static.CollectedDataset=被收藏数据集 | |||||
| static.RecommendDataset=被推荐数据集数 | |||||
| static.CollectImage=收藏镜像数 | |||||
| static.CollectedImage=被收藏镜像数 | |||||
| static.RecommendImage=被推荐镜像数 | |||||
| static.all=所有 | static.all=所有 | ||||
| static.public.user_business_analysis_current_month=本月 | static.public.user_business_analysis_current_month=本月 | ||||
| static.public.user_business_analysis_current_week=本周 | static.public.user_business_analysis_current_week=本周 | ||||
| @@ -1409,6 +1417,7 @@ issues.filter_sort.feweststars=点赞由少到多 | |||||
| issues.filter_sort.mostforks=派生由多到少 | issues.filter_sort.mostforks=派生由多到少 | ||||
| issues.filter_sort.fewestforks=派生由少到多 | issues.filter_sort.fewestforks=派生由少到多 | ||||
| issues.filter_sort.downloadtimes=下载次数 | issues.filter_sort.downloadtimes=下载次数 | ||||
| issues.filter_sort.moststars=收藏数量 | |||||
| issues.action_open=开启 | issues.action_open=开启 | ||||
| issues.action_close=关闭 | issues.action_close=关闭 | ||||
| issues.action_label=标签 | issues.action_label=标签 | ||||
| @@ -2520,11 +2529,16 @@ repos.contributor=贡献者数 | |||||
| repos.yes=是 | repos.yes=是 | ||||
| repos.no=否 | repos.no=否 | ||||
| images.recommend = 推荐 | |||||
| images.unrecommend = 不推荐 | |||||
| datasets.dataset_manage_panel=数据集管理 | datasets.dataset_manage_panel=数据集管理 | ||||
| datasets.owner=所有者 | datasets.owner=所有者 | ||||
| datasets.name=名称 | datasets.name=名称 | ||||
| datasets.private=私有 | datasets.private=私有 | ||||
| datasets.recommend=设为推荐 | |||||
| datasets.unrecommend=取消推荐 | |||||
| datasets.only_recommend = 仅显示平台推荐 | |||||
| cloudbrain.all_task_types=全部任务类型 | cloudbrain.all_task_types=全部任务类型 | ||||
| cloudbrain.all_computing_resources=全部计算资源 | cloudbrain.all_computing_resources=全部计算资源 | ||||
| @@ -2872,7 +2886,7 @@ mirror_sync_create=从镜像同步了新的引用 <a href="%s/src/%s">%[2]s</a> | |||||
| mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code> | mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code> | ||||
| approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>` | approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
| reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>` | reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
| upload_dataset=`上传了数据集文件 <a href="%s/datasets?type=%s">%s</a>` | |||||
| upload_dataset=`上传了数据集文件 <a href="%s/datasets">%s</a>` | |||||
| task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>` | task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>` | ||||
| task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>` | task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>` | ||||
| task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>` | task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>` | ||||
| @@ -2982,6 +2996,7 @@ snn4imagenet_path = snn4imagenet脚本存放路径 | |||||
| brainscore_path = brainscore脚本存放路径 | brainscore_path = brainscore脚本存放路径 | ||||
| start_command = 启动命令 | start_command = 启动命令 | ||||
| choose_mirror = 选择镜像或输入镜像地址 | choose_mirror = 选择镜像或输入镜像地址 | ||||
| input_mirror = 请输入云脑镜像地址 | |||||
| select_dataset = 选择数据集 | select_dataset = 选择数据集 | ||||
| specification = 规格 | specification = 规格 | ||||
| select_specification = 选择资源规格 | select_specification = 选择资源规格 | ||||
| @@ -99,6 +99,11 @@ socket.onmessage = function (e) { | |||||
| console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); | console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); | ||||
| continue; | continue; | ||||
| } | } | ||||
| if(record.OpType == "24"){ | |||||
| if(record.Content.indexOf("true") != -1){ | |||||
| continue; | |||||
| } | |||||
| } | |||||
| var recordPrefix = getMsg(record); | var recordPrefix = getMsg(record); | ||||
| if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | ||||
| html += recordPrefix + actionName; | html += recordPrefix + actionName; | ||||
| @@ -162,7 +167,7 @@ socket.onmessage = function (e) { | |||||
| function getTaskLink(record){ | function getTaskLink(record){ | ||||
| var re = getRepoLink(record); | var re = getRepoLink(record); | ||||
| if(record.OpType == 24){ | if(record.OpType == 24){ | ||||
| re = re + "/datasets?type=" + record.Content; | |||||
| re = re + "/datasets"; | |||||
| }else if(record.OpType == 25){ | }else if(record.OpType == 25){ | ||||
| re = re + "/cloudbrain/" + record.Content; | re = re + "/cloudbrain/" + record.Content; | ||||
| }else if(record.OpType == 26){ | }else if(record.OpType == 26){ | ||||
| @@ -101,16 +101,20 @@ function initPageInfo(){ | |||||
| function searchItem(type,sortType){ | function searchItem(type,sortType){ | ||||
| console.log("enter item 2."); | console.log("enter item 2."); | ||||
| currentSearchKeyword = document.getElementById("keyword_input").value; | |||||
| if(!isEmpty(currentSearchKeyword)){ | |||||
| initPageInfo(); | |||||
| currentSearchTableName = itemType[type]; | |||||
| currentSearchSortBy = sortBy[sortType]; | |||||
| currentSearchAscending = sortAscending[sortType]; | |||||
| OnlySearchLabel =false; | |||||
| page(currentPage); | |||||
| if(OnlySearchLabel){ | |||||
| doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType]) | |||||
| }else{ | }else{ | ||||
| emptySearch(); | |||||
| currentSearchKeyword = document.getElementById("keyword_input").value; | |||||
| if(!isEmpty(currentSearchKeyword)){ | |||||
| initPageInfo(); | |||||
| currentSearchTableName = itemType[type]; | |||||
| currentSearchSortBy = sortBy[sortType]; | |||||
| currentSearchAscending = sortAscending[sortType]; | |||||
| OnlySearchLabel =false; | |||||
| page(currentPage); | |||||
| }else{ | |||||
| emptySearch(); | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| @@ -806,17 +810,14 @@ var repoAndOrgEN={ | |||||
| function page(current){ | function page(current){ | ||||
| currentPage=current; | currentPage=current; | ||||
| doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | ||||
| } | } | ||||
| function nextPage(){ | function nextPage(){ | ||||
| currentPage = currentPage+1; | currentPage = currentPage+1; | ||||
| console.log("currentPage=" + currentPage); | console.log("currentPage=" + currentPage); | ||||
| if(currentPage >= endIndex){ | |||||
| startIndex=startIndex+1; | |||||
| endIndex = endIndex +1; | |||||
| } | |||||
| page(currentPage); | page(currentPage); | ||||
| } | } | ||||
| @@ -824,10 +825,6 @@ function page(current){ | |||||
| console.log("currentPage=" + currentPage); | console.log("currentPage=" + currentPage); | ||||
| if(currentPage > 1){ | if(currentPage > 1){ | ||||
| currentPage = currentPage-1; | currentPage = currentPage-1; | ||||
| if(currentPage <= startIndex && startIndex > 1){ | |||||
| startIndex = startIndex -1; | |||||
| endIndex = endIndex - 1; | |||||
| } | |||||
| console.log("currentPage=" + (currentPage)); | console.log("currentPage=" + (currentPage)); | ||||
| page(currentPage); | page(currentPage); | ||||
| } | } | ||||
| @@ -862,7 +859,7 @@ function getYPosition(e){ | |||||
| showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); | showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); | ||||
| } | } | ||||
| else if(goNum<=totalPage){ | else if(goNum<=totalPage){ | ||||
| page(goNum); | |||||
| page(parseInt(goNum,10)); | |||||
| } | } | ||||
| else{ | else{ | ||||
| showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); | showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); | ||||
| @@ -884,9 +881,14 @@ function getYPosition(e){ | |||||
| var html =""; | var html =""; | ||||
| console.log("currentPage=" + currentPage); | console.log("currentPage=" + currentPage); | ||||
| console.log("privateTotal=" + privateTotal); | console.log("privateTotal=" + privateTotal); | ||||
| // if(totalPage==0){ | |||||
| // return; | |||||
| // } | |||||
| startIndex = currentPage -1; | |||||
| if(startIndex < 1){ | |||||
| startIndex = 1; | |||||
| } | |||||
| endIndex = currentPage + 2; | |||||
| if(endIndex >= totalPage){ | |||||
| endIndex = totalPage; | |||||
| } | |||||
| html += "<span class=\"item\">" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "</span>" | html += "<span class=\"item\">" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "</span>" | ||||
| if(currentPage > 1){ | if(currentPage > 1){ | ||||
| html += "<a class=\"item navigation\" href=\"javascript:page(1)\"><span class=\"navigation_label\">" + getLabel(isZh,"search_home_page") + "</span></a>"; | html += "<a class=\"item navigation\" href=\"javascript:page(1)\"><span class=\"navigation_label\">" + getLabel(isZh,"search_home_page") + "</span></a>"; | ||||
| @@ -908,6 +910,11 @@ function getYPosition(e){ | |||||
| } | } | ||||
| } | } | ||||
| if (endIndex < totalPage-1){ | |||||
| html += "..."; | |||||
| html += "<a id=\"page_" + totalPage+ "\" class=\"item\" href=\"javascript:page(" + totalPage +")\">" + totalPage + "</a>"; | |||||
| } | |||||
| if(currentPage >=totalPage){ | if(currentPage >=totalPage){ | ||||
| html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>"; | html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>"; | ||||
| html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>"; | html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>"; | ||||
| @@ -21,6 +21,7 @@ import ( | |||||
| const ( | const ( | ||||
| tplCloudBrains base.TplName = "admin/cloudbrain/list" | tplCloudBrains base.TplName = "admin/cloudbrain/list" | ||||
| tplImages base.TplName = "admin/cloudbrain/images" | tplImages base.TplName = "admin/cloudbrain/images" | ||||
| tplCommitImages base.TplName = "admin/cloudbrain/imagecommit" | |||||
| EXCEL_DATE_FORMAT = "20060102150405" | EXCEL_DATE_FORMAT = "20060102150405" | ||||
| CREATE_TIME_FORMAT = "2006/01/02 15:04:05" | CREATE_TIME_FORMAT = "2006/01/02 15:04:05" | ||||
| ) | ) | ||||
| @@ -114,6 +115,12 @@ func Images(ctx *context.Context) { | |||||
| } | } | ||||
| func CloudBrainCommitImageShow(ctx *context.Context) { | |||||
| ctx.Data["PageIsAdminImages"] = true | |||||
| ctx.HTML(200, tplCommitImages) | |||||
| } | |||||
| func DownloadCloudBrains(ctx *context.Context) { | func DownloadCloudBrains(ctx *context.Context) { | ||||
| page := 1 | page := 1 | ||||
| @@ -1,6 +1,8 @@ | |||||
| package admin | package admin | ||||
| import ( | import ( | ||||
| "net/http" | |||||
| "strconv" | |||||
| "strings" | "strings" | ||||
| "code.gitea.io/gitea/models" | "code.gitea.io/gitea/models" | ||||
| @@ -49,6 +51,8 @@ func Datasets(ctx *context.Context) { | |||||
| orderBy = models.SearchOrderBySizeReverse | orderBy = models.SearchOrderBySizeReverse | ||||
| case "size": | case "size": | ||||
| orderBy = models.SearchOrderBySize | orderBy = models.SearchOrderBySize | ||||
| case "downloadtimes": | |||||
| orderBy = models.SearchOrderByDownloadTimes | |||||
| case "moststars": | case "moststars": | ||||
| orderBy = models.SearchOrderByStarsReverse | orderBy = models.SearchOrderByStarsReverse | ||||
| case "feweststars": | case "feweststars": | ||||
| @@ -70,6 +74,7 @@ func Datasets(ctx *context.Context) { | |||||
| PageSize: setting.UI.ExplorePagingNum, | PageSize: setting.UI.ExplorePagingNum, | ||||
| }, | }, | ||||
| Keyword: keyword, | Keyword: keyword, | ||||
| RecommendOnly: ctx.QueryBool("recommend"), | |||||
| SearchOrderBy: orderBy, | SearchOrderBy: orderBy, | ||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -80,7 +85,7 @@ func Datasets(ctx *context.Context) { | |||||
| ctx.Data["Keyword"] = keyword | ctx.Data["Keyword"] = keyword | ||||
| ctx.Data["Total"] = count | ctx.Data["Total"] = count | ||||
| ctx.Data["Datasets"] = datasets | ctx.Data["Datasets"] = datasets | ||||
| ctx.Data["Recommend"] = ctx.QueryBool("recommend") | |||||
| pager := context.NewPagination(int(count), setting.UI.ExplorePagingNum, page, 5) | pager := context.NewPagination(int(count), setting.UI.ExplorePagingNum, page, 5) | ||||
| pager.SetDefaultParams(ctx) | pager.SetDefaultParams(ctx) | ||||
| ctx.Data["Page"] = pager | ctx.Data["Page"] = pager | ||||
| @@ -88,6 +93,23 @@ func Datasets(ctx *context.Context) { | |||||
| ctx.HTML(200, tplDatasets) | ctx.HTML(200, tplDatasets) | ||||
| } | } | ||||
| func DatasetAction(ctx *context.Context) { | |||||
| var err error | |||||
| datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) | |||||
| switch ctx.Params(":action") { | |||||
| case "recommend": | |||||
| err = models.RecommendDataset(datasetId, true) | |||||
| case "unrecommend": | |||||
| err = models.RecommendDataset(datasetId, false) | |||||
| } | |||||
| if err != nil { | |||||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) | |||||
| } else { | |||||
| ctx.JSON(http.StatusOK, models.BaseOKMessage) | |||||
| } | |||||
| } | |||||
| func DeleteDataset(ctx *context.Context) { | func DeleteDataset(ctx *context.Context) { | ||||
| dataset, err := models.GetDatasetByID(ctx.QueryInt64("id")) | dataset, err := models.GetDatasetByID(ctx.QueryInt64("id")) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -23,7 +23,7 @@ func DownloadCloudBrainBoard(ctx *context.Context) { | |||||
| _, total, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | _, total, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | ||||
| ListOptions: models.ListOptions{ | ListOptions: models.ListOptions{ | ||||
| Page: page, | Page: page, | ||||
| PageSize: 1, | |||||
| PageSize: pageSize, | |||||
| }, | }, | ||||
| Type: models.TypeCloudBrainAll, | Type: models.TypeCloudBrainAll, | ||||
| NeedRepoInfo: false, | NeedRepoInfo: false, | ||||
| @@ -331,6 +331,7 @@ func ExploreDatasets(ctx *context.Context) { | |||||
| Task: task, | Task: task, | ||||
| License: license, | License: license, | ||||
| OwnerID: ownerID, | OwnerID: ownerID, | ||||
| RecommendOnly: ctx.QueryBool("recommend"), | |||||
| ListOptions: models.ListOptions{ | ListOptions: models.ListOptions{ | ||||
| Page: page, | Page: page, | ||||
| PageSize: 30, | PageSize: 30, | ||||
| @@ -357,6 +358,7 @@ func ExploreDatasets(ctx *context.Context) { | |||||
| ctx.Data["Category"] = category | ctx.Data["Category"] = category | ||||
| ctx.Data["Task"] = task | ctx.Data["Task"] = task | ||||
| ctx.Data["License"] = license | ctx.Data["License"] = license | ||||
| ctx.Data["Recommend"] = ctx.QueryBool("recommend") | |||||
| pager.SetDefaultParams(ctx) | pager.SetDefaultParams(ctx) | ||||
| ctx.Data["Page"] = pager | ctx.Data["Page"] = pager | ||||
| @@ -11,7 +11,6 @@ import ( | |||||
| "fmt" | "fmt" | ||||
| "mime/multipart" | "mime/multipart" | ||||
| "net/http" | "net/http" | ||||
| "path" | |||||
| "strconv" | "strconv" | ||||
| "strings" | "strings" | ||||
| @@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) { | |||||
| }) | }) | ||||
| } | } | ||||
| func GetObsKey(ctx *context.Context) { | |||||
| uuid := gouuid.NewV4().String() | |||||
| key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | |||||
| ctx.JSON(200, map[string]string{ | |||||
| "uuid": uuid, | |||||
| "key": key, | |||||
| "access_key_id": setting.AccessKeyID, | |||||
| "secret_access_key": setting.SecretAccessKey, | |||||
| "server": setting.Endpoint, | |||||
| "bucket": setting.Bucket, | |||||
| }) | |||||
| } | |||||
| func CompleteMultipart(ctx *context.Context) { | func CompleteMultipart(ctx *context.Context) { | ||||
| uuid := ctx.Query("uuid") | uuid := ctx.Query("uuid") | ||||
| uploadID := ctx.Query("uploadID") | uploadID := ctx.Query("uploadID") | ||||
| @@ -870,13 +855,13 @@ func CompleteMultipart(ctx *context.Context) { | |||||
| } | } | ||||
| if typeCloudBrain == models.TypeCloudBrainOne { | if typeCloudBrain == models.TypeCloudBrainOne { | ||||
| _, err = storage.CompleteMultiPartUpload(uuid, uploadID) | |||||
| _, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks) | |||||
| if err != nil { | if err != nil { | ||||
| ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) | ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) | ||||
| return | return | ||||
| } | } | ||||
| } else { | } else { | ||||
| err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName) | |||||
| err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks) | |||||
| if err != nil { | if err != nil { | ||||
| ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err)) | ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err)) | ||||
| return | return | ||||
| @@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) { | |||||
| ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) | ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) | ||||
| return | return | ||||
| } | } | ||||
| attachment.UpdateDatasetUpdateUnix() | |||||
| repository, _ := models.GetRepositoryByID(dataset.RepoID) | repository, _ := models.GetRepositoryByID(dataset.RepoID) | ||||
| notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) | |||||
| notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment) | |||||
| if attachment.DatasetID != 0 { | if attachment.DatasetID != 0 { | ||||
| if isCanDecompress(attachment.Name) { | if isCanDecompress(attachment.Name) { | ||||
| if typeCloudBrain == models.TypeCloudBrainOne { | if typeCloudBrain == models.TypeCloudBrainOne { | ||||
| @@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) { | |||||
| }) | }) | ||||
| } | } | ||||
| func UpdateMultipart(ctx *context.Context) { | |||||
| uuid := ctx.Query("uuid") | |||||
| partNumber := ctx.QueryInt("chunkNumber") | |||||
| etag := ctx.Query("etag") | |||||
| fileChunk, err := models.GetFileChunkByUUID(uuid) | |||||
| if err != nil { | |||||
| if models.IsErrFileChunkNotExist(err) { | |||||
| ctx.Error(404) | |||||
| } else { | |||||
| ctx.ServerError("GetFileChunkByUUID", err) | |||||
| } | |||||
| return | |||||
| } | |||||
| fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1)) | |||||
| err = models.UpdateFileChunk(fileChunk) | |||||
| if err != nil { | |||||
| ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) | |||||
| return | |||||
| } | |||||
| ctx.JSON(200, map[string]string{ | |||||
| "result_code": "0", | |||||
| }) | |||||
| } | |||||
| func HandleUnDecompressAttachment() { | func HandleUnDecompressAttachment() { | ||||
| attachs, err := models.GetUnDecompressAttachments() | attachs, err := models.GetUnDecompressAttachments() | ||||
| if err != nil { | if err != nil { | ||||
| @@ -59,6 +59,7 @@ var ( | |||||
| ) | ) | ||||
| const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" | const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" | ||||
| const CLONE_FILE_PREFIX = "file:///" | |||||
| var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) | var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) | ||||
| @@ -702,6 +703,53 @@ func CloudBrainCommitImageCheck(ctx *context.Context, form auth.CommitImageCloud | |||||
| } | } | ||||
| func CloudBrainAdminCommitImage(ctx *context.Context, form auth.CommitAdminImageCloudBrainForm) { | |||||
| if !NamePattern.MatchString(form.Tag) { | |||||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) | |||||
| return | |||||
| } | |||||
| if utf8.RuneCountInString(form.Description) > 255 { | |||||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 255))) | |||||
| return | |||||
| } | |||||
| validTopics, errMessage := checkTopics(form.Topics) | |||||
| if errMessage != "" { | |||||
| ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr(errMessage))) | |||||
| return | |||||
| } | |||||
| err := cloudbrain.CommitAdminImage(models.CommitImageParams{ | |||||
| CommitImageCloudBrainParams: models.CommitImageCloudBrainParams{ | |||||
| ImageDescription: form.Description, | |||||
| ImageTag: form.Tag, | |||||
| }, | |||||
| IsPrivate: form.IsPrivate, | |||||
| CloudBrainType: form.Type, | |||||
| Topics: validTopics, | |||||
| UID: ctx.User.ID, | |||||
| Type: models.GetRecommondType(form.IsRecommend), | |||||
| Place: form.Place, | |||||
| }) | |||||
| if err != nil { | |||||
| log.Error("CommitImagefailed") | |||||
| if models.IsErrImageTagExist(err) { | |||||
| ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_exist"))) | |||||
| } else if models.IsErrorImageCommitting(err) { | |||||
| ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_committing"))) | |||||
| } else { | |||||
| ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_commit_fail"))) | |||||
| } | |||||
| return | |||||
| } | |||||
| ctx.JSON(200, models.BaseOKMessage) | |||||
| } | |||||
| func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrainForm) { | func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrainForm) { | ||||
| if !NamePattern.MatchString(form.Tag) { | if !NamePattern.MatchString(form.Tag) { | ||||
| @@ -1142,7 +1190,8 @@ func GetRate(ctx *context.Context) { | |||||
| } | } | ||||
| func downloadCode(repo *models.Repository, codePath, branchName string) error { | func downloadCode(repo *models.Repository, codePath, branchName string) error { | ||||
| if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil { | |||||
| //add "file:///" prefix to make the depth valid | |||||
| if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil { | |||||
| log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) | log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) | ||||
| return err | return err | ||||
| } | } | ||||
| @@ -1202,7 +1251,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo | |||||
| return err | return err | ||||
| } | } | ||||
| if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil { | |||||
| if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil { | |||||
| log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) | log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) | ||||
| return err | return err | ||||
| } | } | ||||
| @@ -358,6 +358,7 @@ func MyDatasets(ctx *context.Context) { | |||||
| NeedIsPrivate: false, | NeedIsPrivate: false, | ||||
| JustNeedZipFile: true, | JustNeedZipFile: true, | ||||
| NeedRepoInfo: true, | NeedRepoInfo: true, | ||||
| RecommendOnly: ctx.QueryBool("recommend"), | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("datasets", err) | ctx.ServerError("datasets", err) | ||||
| @@ -398,6 +399,7 @@ func PublicDataset(ctx *context.Context) { | |||||
| Type: cloudbrainType, | Type: cloudbrainType, | ||||
| JustNeedZipFile: true, | JustNeedZipFile: true, | ||||
| NeedRepoInfo: true, | NeedRepoInfo: true, | ||||
| RecommendOnly: ctx.QueryBool("recommend"), | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("datasets", err) | ctx.ServerError("datasets", err) | ||||
| @@ -454,6 +456,7 @@ func MyFavoriteDataset(ctx *context.Context) { | |||||
| Type: cloudbrainType, | Type: cloudbrainType, | ||||
| JustNeedZipFile: true, | JustNeedZipFile: true, | ||||
| NeedRepoInfo: true, | NeedRepoInfo: true, | ||||
| RecommendOnly: ctx.QueryBool("recommend"), | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("datasets", err) | ctx.ServerError("datasets", err) | ||||
| @@ -256,6 +256,10 @@ func HTTP(ctx *context.Context) { | |||||
| models.EnvPusherName + "=" + authUser.Name, | models.EnvPusherName + "=" + authUser.Name, | ||||
| models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), | models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), | ||||
| models.EnvIsDeployKey + "=false", | models.EnvIsDeployKey + "=false", | ||||
| models.EnvRepoSize + "=" + fmt.Sprint(repo.Size), | |||||
| models.EnvRepoMaxFileSize + "=" + fmt.Sprint(setting.Repository.Upload.FileMaxSize), | |||||
| models.EnvRepoMaxSize + "=" + fmt.Sprint(setting.Repository.RepoMaxSize), | |||||
| models.EnvPushSizeCheckFlag + "=" + fmt.Sprint(setting.Repository.Upload.ShellFlag), | |||||
| } | } | ||||
| if !authUser.KeepEmailPrivate { | if !authUser.KeepEmailPrivate { | ||||
| environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) | environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) | ||||
| @@ -247,7 +247,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm | |||||
| func NotebookShow(ctx *context.Context) { | func NotebookShow(ctx *context.Context) { | ||||
| ctx.Data["PageIsCloudBrain"] = true | ctx.Data["PageIsCloudBrain"] = true | ||||
| debugListType := ctx.Query("debugListType") | debugListType := ctx.Query("debugListType") | ||||
| if debugListType == "" { | |||||
| debugListType = "all" | |||||
| } | |||||
| var ID = ctx.Params(":id") | var ID = ctx.Params(":id") | ||||
| task, err := models.GetCloudbrainByIDWithDeleted(ID) | task, err := models.GetCloudbrainByIDWithDeleted(ID) | ||||
| if err != nil { | if err != nil { | ||||
| @@ -1027,10 +1029,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | gitRepo, _ := git.OpenRepository(repo.RepoPath()) | ||||
| commitID, _ := gitRepo.GetBranchCommitID(branch_name) | commitID, _ := gitRepo.GetBranchCommitID(branch_name) | ||||
| if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||||
| Branch: branch_name, | |||||
| }); err != nil { | |||||
| log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | |||||
| if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||||
| log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) | |||||
| trainJobErrorNewDataPrepare(ctx, form) | trainJobErrorNewDataPrepare(ctx, form) | ||||
| ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) | ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) | ||||
| return | return | ||||
| @@ -1245,9 +1245,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | gitRepo, _ := git.OpenRepository(repo.RepoPath()) | ||||
| commitID, _ := gitRepo.GetBranchCommitID(branch_name) | commitID, _ := gitRepo.GetBranchCommitID(branch_name) | ||||
| if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||||
| Branch: branch_name, | |||||
| }); err != nil { | |||||
| if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||||
| log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) | log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) | ||||
| versionErrorDataPrepare(ctx, form) | versionErrorDataPrepare(ctx, form) | ||||
| ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) | ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) | ||||
| @@ -1475,9 +1473,9 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error { | |||||
| return errors.New("启动文件必须是python文件") | return errors.New("启动文件必须是python文件") | ||||
| } | } | ||||
| if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { | |||||
| log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) | |||||
| return errors.New("计算节点数必须在1-25之间") | |||||
| if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { | |||||
| log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) | |||||
| return errors.New("计算节点数必须在1-2之间") | |||||
| } | } | ||||
| if form.BranchName == "" { | if form.BranchName == "" { | ||||
| log.Error("the branch must not be null!", form.BranchName) | log.Error("the branch must not be null!", form.BranchName) | ||||
| @@ -1493,9 +1491,9 @@ func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) err | |||||
| return errors.New("启动文件必须是python文件") | return errors.New("启动文件必须是python文件") | ||||
| } | } | ||||
| if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { | |||||
| log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) | |||||
| return errors.New("计算节点数必须在1-25之间") | |||||
| if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { | |||||
| log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) | |||||
| return errors.New("计算节点数必须在1-2之间") | |||||
| } | } | ||||
| if form.ModelName == "" { | if form.ModelName == "" { | ||||
| @@ -1874,9 +1872,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||||
| gitRepo, _ := git.OpenRepository(repo.RepoPath()) | gitRepo, _ := git.OpenRepository(repo.RepoPath()) | ||||
| commitID, _ := gitRepo.GetBranchCommitID(branch_name) | commitID, _ := gitRepo.GetBranchCommitID(branch_name) | ||||
| if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||||
| Branch: branch_name, | |||||
| }); err != nil { | |||||
| if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||||
| log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | ||||
| inferenceJobErrorNewDataPrepare(ctx, form) | inferenceJobErrorNewDataPrepare(ctx, form) | ||||
| ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) | ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) | ||||
| @@ -19,6 +19,130 @@ const ( | |||||
| PAGE_SIZE = 2000 | PAGE_SIZE = 2000 | ||||
| ) | ) | ||||
| func getExcelHeader(ctx *context.Context) map[string]string { | |||||
| excelHeader := make([]string, 0) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.id")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.name")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.logincount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) | |||||
| excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) | |||||
| excelHeaderMap := make(map[string]string, 0) | |||||
| var i byte | |||||
| i = 0 | |||||
| for _, value := range excelHeader { | |||||
| excelColumn := getColumn(i) + fmt.Sprint(1) | |||||
| log.Info("excelColumn=" + excelColumn) | |||||
| excelHeaderMap[excelColumn] = value | |||||
| i++ | |||||
| } | |||||
| return excelHeaderMap | |||||
| } | |||||
| func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { | |||||
| rows := fmt.Sprint(row) | |||||
| var tmp byte | |||||
| tmp = 0 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) | |||||
| tmp = tmp + 1 | |||||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) | |||||
| tmp = tmp + 1 | |||||
| formatTime = userRecord.DataDate | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) | |||||
| } | |||||
| func getColumn(tmp byte) string { | |||||
| var tmpA byte | |||||
| tmpA = 'A' | |||||
| if tmp < 26 { | |||||
| return string(tmpA + tmp) | |||||
| } else { | |||||
| return "A" + string(tmpA+(tmp-26)) | |||||
| } | |||||
| } | |||||
| func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { | func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { | ||||
| page := ctx.QueryInt("page") | page := ctx.QueryInt("page") | ||||
| if page <= 0 { | if page <= 0 { | ||||
| @@ -37,30 +161,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||||
| sheetName := ctx.Tr("user.static.sheetname") | sheetName := ctx.Tr("user.static.sheetname") | ||||
| index := xlsx.NewSheet(sheetName) | index := xlsx.NewSheet(sheetName) | ||||
| xlsx.DeleteSheet("Sheet1") | xlsx.DeleteSheet("Sheet1") | ||||
| dataHeader := map[string]string{ | |||||
| "A1": ctx.Tr("user.static.id"), | |||||
| "B1": ctx.Tr("user.static.name"), | |||||
| "C1": ctx.Tr("user.static.codemergecount"), | |||||
| "D1": ctx.Tr("user.static.UserIndex"), | |||||
| "E1": ctx.Tr("user.static.commitcount"), | |||||
| "F1": ctx.Tr("user.static.issuecount"), | |||||
| "G1": ctx.Tr("user.static.commentcount"), | |||||
| "H1": ctx.Tr("user.static.focusrepocount"), | |||||
| "I1": ctx.Tr("user.static.starrepocount"), | |||||
| "J1": ctx.Tr("user.static.logincount"), | |||||
| "K1": ctx.Tr("user.static.watchedcount"), | |||||
| "L1": ctx.Tr("user.static.commitcodesize"), | |||||
| "M1": ctx.Tr("user.static.solveissuecount"), | |||||
| "N1": ctx.Tr("user.static.encyclopediascount"), | |||||
| "O1": ctx.Tr("user.static.createrepocount"), | |||||
| "P1": ctx.Tr("user.static.openiindex"), | |||||
| "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||||
| "R1": ctx.Tr("user.static.CloudBrainRunTime"), | |||||
| "S1": ctx.Tr("user.static.CommitDatasetNum"), | |||||
| "T1": ctx.Tr("user.static.CommitModelCount"), | |||||
| "U1": ctx.Tr("user.static.registdate"), | |||||
| "V1": ctx.Tr("user.static.countdate"), | |||||
| } | |||||
| dataHeader := getExcelHeader(ctx) | |||||
| for k, v := range dataHeader { | for k, v := range dataHeader { | ||||
| //设置单元格的值 | //设置单元格的值 | ||||
| xlsx.SetCellValue(sheetName, k, v) | xlsx.SetCellValue(sheetName, k, v) | ||||
| @@ -74,31 +175,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||||
| log.Info("return count=" + fmt.Sprint(count)) | log.Info("return count=" + fmt.Sprint(count)) | ||||
| for _, userRecord := range re { | for _, userRecord := range re { | ||||
| row++ | row++ | ||||
| rows := fmt.Sprint(row) | |||||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||||
| xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||||
| xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||||
| xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) | |||||
| xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) | |||||
| xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) | |||||
| xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) | |||||
| xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) | |||||
| xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) | |||||
| xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | |||||
| xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | |||||
| xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||||
| xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) | |||||
| xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
| xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) | |||||
| xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) | |||||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||||
| xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) | |||||
| formatTime = userRecord.DataDate | |||||
| xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | |||||
| writeExcel(row, xlsx, sheetName, userRecord) | |||||
| } | } | ||||
| indexTotal += PAGE_SIZE | indexTotal += PAGE_SIZE | ||||
| @@ -236,62 +313,16 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
| sheetName := ctx.Tr("user.static.sheetname") | sheetName := ctx.Tr("user.static.sheetname") | ||||
| index := xlsx.NewSheet(sheetName) | index := xlsx.NewSheet(sheetName) | ||||
| xlsx.DeleteSheet("Sheet1") | xlsx.DeleteSheet("Sheet1") | ||||
| dataHeader := map[string]string{ | |||||
| "A1": ctx.Tr("user.static.id"), | |||||
| "B1": ctx.Tr("user.static.name"), | |||||
| "C1": ctx.Tr("user.static.codemergecount"), | |||||
| "D1": ctx.Tr("user.static.UserIndex"), | |||||
| "E1": ctx.Tr("user.static.commitcount"), | |||||
| "F1": ctx.Tr("user.static.issuecount"), | |||||
| "G1": ctx.Tr("user.static.commentcount"), | |||||
| "H1": ctx.Tr("user.static.focusrepocount"), | |||||
| "I1": ctx.Tr("user.static.starrepocount"), | |||||
| "J1": ctx.Tr("user.static.logincount"), | |||||
| "K1": ctx.Tr("user.static.watchedcount"), | |||||
| "L1": ctx.Tr("user.static.commitcodesize"), | |||||
| "M1": ctx.Tr("user.static.solveissuecount"), | |||||
| "N1": ctx.Tr("user.static.encyclopediascount"), | |||||
| "O1": ctx.Tr("user.static.createrepocount"), | |||||
| "P1": ctx.Tr("user.static.openiindex"), | |||||
| "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||||
| "R1": ctx.Tr("user.static.CloudBrainRunTime"), | |||||
| "S1": ctx.Tr("user.static.CommitDatasetNum"), | |||||
| "T1": ctx.Tr("user.static.CommitModelCount"), | |||||
| "U1": ctx.Tr("user.static.registdate"), | |||||
| "V1": ctx.Tr("user.static.countdate"), | |||||
| } | |||||
| dataHeader := getExcelHeader(ctx) | |||||
| for k, v := range dataHeader { | for k, v := range dataHeader { | ||||
| //设置单元格的值 | //设置单元格的值 | ||||
| xlsx.SetCellValue(sheetName, k, v) | xlsx.SetCellValue(sheetName, k, v) | ||||
| } | } | ||||
| for i, userRecord := range re { | for i, userRecord := range re { | ||||
| rows := fmt.Sprint(i + 2) | |||||
| xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | |||||
| xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | |||||
| xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||||
| xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
| xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | |||||
| xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | |||||
| xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | |||||
| xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) | |||||
| xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) | |||||
| xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) | |||||
| xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) | |||||
| xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) | |||||
| xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) | |||||
| xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | |||||
| xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | |||||
| xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||||
| xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) | |||||
| xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
| xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) | |||||
| xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) | |||||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||||
| xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) | |||||
| formatTime = userRecord.DataDate | |||||
| xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | |||||
| row := i + 2 | |||||
| writeExcel(row, xlsx, sheetName, userRecord) | |||||
| } | } | ||||
| //设置默认打开的表单 | //设置默认打开的表单 | ||||
| @@ -525,6 +525,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
| m.Group("/datasets", func() { | m.Group("/datasets", func() { | ||||
| m.Get("", admin.Datasets) | m.Get("", admin.Datasets) | ||||
| m.Put("/:id/action/:action", admin.DatasetAction) | |||||
| // m.Post("/delete", admin.DeleteDataset) | // m.Post("/delete", admin.DeleteDataset) | ||||
| }) | }) | ||||
| m.Group("/cloudbrains", func() { | m.Group("/cloudbrains", func() { | ||||
| @@ -534,6 +535,8 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
| m.Group("/images", func() { | m.Group("/images", func() { | ||||
| m.Get("", admin.Images) | m.Get("", admin.Images) | ||||
| m.Get("/data", repo.GetAllImages) | m.Get("/data", repo.GetAllImages) | ||||
| m.Get("/commit_image", admin.CloudBrainCommitImageShow) | |||||
| m.Post("/commit_image", bindIgnErr(auth.CommitAdminImageCloudBrainForm{}), repo.CloudBrainAdminCommitImage) | |||||
| }) | }) | ||||
| m.Put("/image/:id/action/:action", image.Action) | m.Put("/image/:id/action/:action", image.Action) | ||||
| @@ -608,12 +611,11 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
| m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) | m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) | ||||
| m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) | m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) | ||||
| m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) | m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) | ||||
| m.Post("/complete_multipart", repo.CompleteMultipart) | |||||
| m.Post("/update_chunk", repo.UpdateMultipart) | |||||
| }, reqSignIn) | }, reqSignIn) | ||||
| m.Group("/attachments", func() { | m.Group("/attachments", func() { | ||||
| m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) | m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) | ||||
| m.Post("/complete_multipart", repo.CompleteMultipart) | |||||
| }) | }) | ||||
| m.Group("/attachments", func() { | m.Group("/attachments", func() { | ||||
| @@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||||
| topicsQuery := elastic.NewMatchQuery("topics", Key) | topicsQuery := elastic.NewMatchQuery("topics", Key) | ||||
| boolQ.Should(topicsQuery) | boolQ.Should(topicsQuery) | ||||
| res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||||
| res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||||
| } | } | ||||
| } | } | ||||
| func getSort(SortBy string, ascending bool) elastic.Sorter { | |||||
| var sort elastic.Sorter | |||||
| sort = elastic.NewScoreSort() | |||||
| if SortBy != "" { | |||||
| if SortBy == "default" { | |||||
| return sort | |||||
| func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter { | |||||
| sort := make([]elastic.Sorter, 0) | |||||
| if SortBy == "default" || SortBy == "" { | |||||
| sort = append(sort, elastic.NewScoreSort()) | |||||
| if secondSortBy != "" { | |||||
| log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy) | |||||
| sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending)) | |||||
| } | } | ||||
| return elastic.NewFieldSort(SortBy).Order(ascending) | |||||
| } else { | |||||
| sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending)) | |||||
| } | } | ||||
| log.Info("sort size=" + fmt.Sprint(len(sort))) | |||||
| return sort | return sort | ||||
| } | } | ||||
| @@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||||
| topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") | topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") | ||||
| boolQ.Should(nameQuery, descriptionQuery, topicsQuery) | boolQ.Should(nameQuery, descriptionQuery, topicsQuery) | ||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||||
| } else { | } else { | ||||
| log.Info("query all content.") | log.Info("query all content.") | ||||
| //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | ||||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in | |||||
| boolQ.Must(UserOrOrgQuery) | boolQ.Must(UserOrOrgQuery) | ||||
| } | } | ||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||||
| fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") | fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") | ||||
| categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") | categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") | ||||
| boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) | boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) | ||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||||
| } else { | } else { | ||||
| log.Info("query all datasets.") | log.Info("query all datasets.") | ||||
| //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | ||||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
| res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in | |||||
| boolQ.Must(isIssueQuery) | boolQ.Must(isIssueQuery) | ||||
| } | } | ||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||||
| res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||||
| if err == nil { | if err == nil { | ||||
| searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
| log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
| @@ -804,6 +804,7 @@ func Cloudbrains(ctx *context.Context) { | |||||
| repos, _, err := models.SearchRepository(&models.SearchRepoOptions{ | repos, _, err := models.SearchRepository(&models.SearchRepoOptions{ | ||||
| Actor: ctx.User, | Actor: ctx.User, | ||||
| OwnerID: ctxUser.ID, | OwnerID: ctxUser.ID, | ||||
| Private: true, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("SearchRepository", err) | ctx.ServerError("SearchRepository", err) | ||||
| @@ -106,9 +106,9 @@ func Profile(ctx *context.Context) { | |||||
| for _, org := range orgs { | for _, org := range orgs { | ||||
| _, repoCount, err := models.SearchRepository(&models.SearchRepoOptions{ | _, repoCount, err := models.SearchRepository(&models.SearchRepoOptions{ | ||||
| OwnerID: org.ID, | |||||
| Private: ctx.IsSigned, | |||||
| Actor: ctx.User, | |||||
| OwnerID: org.ID, | |||||
| Private: ctx.IsSigned, | |||||
| Actor: ctx.User, | |||||
| }) | }) | ||||
| if err != nil { | if err != nil { | ||||
| ctx.ServerError("SearchRepository", err) | ctx.ServerError("SearchRepository", err) | ||||
| @@ -175,6 +175,8 @@ func Profile(ctx *context.Context) { | |||||
| orderBy = models.SearchOrderByAlphabeticallyReverse | orderBy = models.SearchOrderByAlphabeticallyReverse | ||||
| case "alphabetically": | case "alphabetically": | ||||
| orderBy = models.SearchOrderByAlphabetically | orderBy = models.SearchOrderByAlphabetically | ||||
| case "downloadtimes": | |||||
| orderBy = models.SearchOrderByDownloadTimes | |||||
| case "moststars": | case "moststars": | ||||
| orderBy = models.SearchOrderByStarsReverse | orderBy = models.SearchOrderByStarsReverse | ||||
| case "feweststars": | case "feweststars": | ||||
| @@ -10,7 +10,7 @@ import ( | |||||
| "github.com/elliotchance/orderedmap" | "github.com/elliotchance/orderedmap" | ||||
| ) | ) | ||||
| var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31} | |||||
| var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} | |||||
| type ClientsManager struct { | type ClientsManager struct { | ||||
| Clients *orderedmap.OrderedMap | Clients *orderedmap.OrderedMap | ||||
| @@ -0,0 +1,128 @@ | |||||
| <style> | |||||
| .label_color{ | |||||
| color:#505559 !important; | |||||
| width: 6% !important; | |||||
| text-align: center; | |||||
| } | |||||
| </style> | |||||
| {{template "base/head" .}} | |||||
| <div id="mask"> | |||||
| <div id="loadingPage"> | |||||
| <div class="rect1"></div> | |||||
| <div class="rect2"></div> | |||||
| <div class="rect3"></div> | |||||
| <div class="rect4"></div> | |||||
| <div class="rect5"></div> | |||||
| </div> | |||||
| </div> | |||||
| <div class="repository"> | |||||
| {{template "repo/header" .}} | |||||
| <div class="alert"></div> | |||||
| <div class="ui container"> | |||||
| <div> | |||||
| <div class="ui negative message" style="display: none;"> | |||||
| </div> | |||||
| <div class="ui info message" style="display: none;"> | |||||
| </div> | |||||
| <div class="ui positive message" style="display: none;"> | |||||
| </div> | |||||
| <h4 class="ui top attached header"> | |||||
| {{.i18n.Tr "repo.submit_image"}} | |||||
| </h4> | |||||
| <div class="submit-image-tmplvalue" style="display: none;" data-link="{{$.Link}}" data-edit-page="{{.PageIsAdminImages}}"></div> | |||||
| <div class="ui attached segment" style="padding: 2em 3em;padding-bottom: 7rem;"> | |||||
| <div class="ui form" id="form_image"> | |||||
| <input type="hidden" name="edit" value="edit"> | |||||
| {{.CsrfTokenHtml}} | |||||
| <div class="inline field"> | |||||
| <label class="label_color" for="">{{$.i18n.Tr "dataset.dataset_available_clusters"}}</label> | |||||
| <div class="ui basic label" style="border: none !important;color:#3291f8;"> | |||||
| <svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="14" height="14"><path fill="none" d="M0 0h24v24H0z"></path><path d="M4 3h16a1 1 0 0 1 1 1v7H3V4a1 1 0 0 1 1-1zM3 13h18v7a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1v-7zm4 3v2h3v-2H7zM7 6v2h3V6H7z"></path></svg> | |||||
| CPU/GPU | |||||
| </div> | |||||
| <input type="hidden" value="{{.Type}}" name="type"> | |||||
| </div> | |||||
| <div class="inline required field"> | |||||
| <label class="label_color" for="">{{$.i18n.Tr "repo.images.name"}}</label> | |||||
| <input type="text" name="tag" required placeholder="{{$.i18n.Tr "repo.images.name_placerholder"}}" style="width: 80%;" maxlength="100"> | |||||
| <span class="tooltips" style="display: block;padding-left: 0.5rem;">{{.i18n.Tr "repo.images.name_rule"}}</span> | |||||
| </div> | |||||
| <div class="inline required field"> | |||||
| <label class="label_color" for="">{{$.i18n.Tr "repo.images"}}</label> | |||||
| <input type="text" name="place" required placeholder="{{$.i18n.Tr "cloudbrain.input_mirror"}}" style="width: 80%;" maxlength="100"> | |||||
| </div> | |||||
| <div class="inline required field"> | |||||
| <label class="label_color" for="">{{$.i18n.Tr "dataset.description"}}</label> | |||||
| <textarea style="width: 80%;" required id="description" name="description" rows="3" maxlength="255" placeholder={{.i18n.Tr "repo.modelarts.train_job.new_place"}} onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 255)"></textarea> | |||||
| </div> | |||||
| <div class="inline field" style="display: flex;align-items: center;"> | |||||
| <label class="label_color" for="">{{$.i18n.Tr "repo.model.manage.label"}}</label> | |||||
| <div class="ui multiple search selection dropdown" id="dropdown_image" style="width: 80%;"> | |||||
| <input type="hidden" name="topics" value="" required> | |||||
| <div class="default text" id="default_text">{{.i18n.Tr "repo.repo_label_helpe"}}</div> | |||||
| <div class="menu" id="course_label_item"></div> | |||||
| </div> | |||||
| </div> | |||||
| <span class="tooltips" style="display: block;padding-left: 0.5rem;margin-top: 0.5rem;margin-bottom: 1rem;">{{.i18n.Tr "repo.image.label_tooltips"}}</span> | |||||
| <div class="inline fields"> | |||||
| <label class="label_color" for="" style="visibility: hidden;"></label> | |||||
| <div class="field"> | |||||
| <div class="ui radio checkbox"> | |||||
| <input type="radio" name="isRecommend" checked="checked" value="true"> | |||||
| <label>{{.i18n.Tr "admin.images.recommend"}}</label> | |||||
| </div> | |||||
| </div> | |||||
| <div class="field" style="flex: 0.15;"> | |||||
| <div class="ui radio checkbox" > | |||||
| <input type="radio" name="isRecommend" value="false"> | |||||
| <label>{{.i18n.Tr "admin.images.unrecommend"}}</label> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| <div class="inline fields"> | |||||
| <label class="label_color" for="" style="visibility: hidden;"></label> | |||||
| <div class="field"> | |||||
| <div class="ui radio checkbox"> | |||||
| <input type="radio" name="isPrivate" checked="checked" value="false"> | |||||
| <label>{{.i18n.Tr "org.settings.visibility.public"}}</label> | |||||
| </div> | |||||
| </div> | |||||
| <div class="field" style="flex: 0.15;"> | |||||
| <div class="ui radio checkbox" > | |||||
| <input type="radio" name="isPrivate" value="true"> | |||||
| <label>{{.i18n.Tr "home.show_private"}}</label> | |||||
| </div> | |||||
| </div> | |||||
| <div class="field"> | |||||
| <span class="label_color">{{.i18n.Tr "repo.images.public_tooltips"}}</span> | |||||
| </div> | |||||
| </div> | |||||
| <div class="inline required field" style="padding-top: 2rem;"> | |||||
| <label class="label_color" for="" style="visibility: hidden;"></label> | |||||
| <button class="ui create_image green button" type="button"> | |||||
| {{.i18n.Tr "repo.cloudbrain.commit_image"}} | |||||
| </button> | |||||
| <a class="ui button" id="cancel_submit_image">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| <!-- 确认模态框 --> | |||||
| <div> | |||||
| <div class="ui modal image_confirm_submit"> | |||||
| <div class="header">{{.i18n.Tr "repo.submit_image"}}</div> | |||||
| <div class="content text red center"> | |||||
| <p><i class="exclamation icon"></i>{{.i18n.Tr "repo.image_overwrite"}}</p> | |||||
| </div> | |||||
| <div class="actions"> | |||||
| <button class="ui deny small button">{{.i18n.Tr "cloudbrain.operate_cancel"}}</button> | |||||
| <button class="ui green small approve button">{{.i18n.Tr "cloudbrain.operate_confirm"}}</button> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| {{template "base/footer" .}} | |||||
| @@ -162,9 +162,9 @@ | |||||
| {{end}} | {{end}} | ||||
| </div> | </div> | ||||
| <!-- 删除任务 --> | <!-- 删除任务 --> | ||||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}/train-job{{end}}/{{$JobID}}/del?isadminpage=true' method="post"> | |||||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if or (eq .JobType "SNN4IMAGENET") (eq .JobType "BRAINSCORE")}}/cloudbrain{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}/train-job{{end}}/{{$JobID}}/del?isadminpage=true' method="post"> | |||||
| {{$.CsrfTokenHtml}} | {{$.CsrfTokenHtml}} | ||||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?isadminpage=true" data-version="{{.VersionName}}" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?isadminpage=true" data-version="" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||||
| {{$.i18n.Tr "repo.delete"}} | {{$.i18n.Tr "repo.delete"}} | ||||
| </a> | </a> | ||||
| </form> | </form> | ||||
| @@ -3,12 +3,23 @@ | |||||
| {{template "admin/navbar" .}} | {{template "admin/navbar" .}} | ||||
| <div class="ui container"> | <div class="ui container"> | ||||
| {{template "base/alert" .}} | {{template "base/alert" .}} | ||||
| <div class="ui negative message" style="display: none;"> | |||||
| </div> | |||||
| <h4 class="ui top attached header"> | <h4 class="ui top attached header"> | ||||
| {{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}}) | {{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}}) | ||||
| </h4> | </h4> | ||||
| <div class="ui attached segment"> | <div class="ui attached segment"> | ||||
| {{template "admin/dataset/search" .}} | {{template "admin/dataset/search" .}} | ||||
| </div> | </div> | ||||
| <div class="ui attached segment"> | |||||
| <div class="ui ten wide column"> | |||||
| <div class="ui checkbox" id="dataset_check"> | |||||
| <input type="checkbox"> | |||||
| <label>{{.i18n.Tr "admin.datasets.only_recommend"}}</label> | |||||
| </div> | |||||
| </div> | |||||
| </div> | |||||
| <div class="ui attached table segment"> | <div class="ui attached table segment"> | ||||
| <table class="ui very basic striped table"> | <table class="ui very basic striped table"> | ||||
| <thead> | <thead> | ||||
| @@ -24,10 +35,10 @@ | |||||
| {{range .Datasets}} | {{range .Datasets}} | ||||
| <tr> | <tr> | ||||
| <td>{{.ID}}</td> | <td>{{.ID}}</td> | ||||
| <td><a href="{{AppSubUrl}}/">{{.Title}}</a></td> | |||||
| <td style="display: flex;align-items: center;"><a href="{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Alias}}/datasets">{{.Title}}</a>{{if .Recommend}}<img src="/img/jian.svg" style="margin-left: 0.5rem;">{{end}}</td> | |||||
| <td><i class="fa fa{{if .IsPrivate}}-check{{end}}-square-o"></i></td> | <td><i class="fa fa{{if .IsPrivate}}-check{{end}}-square-o"></i></td> | ||||
| <td><span title="{{.CreatedUnix.FormatLong}}">{{.CreatedUnix.FormatShort}}</span></td> | <td><span title="{{.CreatedUnix.FormatLong}}">{{.CreatedUnix.FormatShort}}</span></td> | ||||
| <td></td> | |||||
| <td>{{if .Recommend}}<span class="set_dataset" style="color: rgb(250, 140, 22);cursor: pointer;" data-url="{{$.Link}}/{{.ID}}/action/unrecommend">{{$.i18n.Tr "admin.datasets.unrecommend"}}</span>{{else}}<span class="set_dataset" style="color: rgb(19, 194, 141);cursor: pointer;" data-url="{{$.Link}}/{{.ID}}/action/recommend">{{$.i18n.Tr "admin.datasets.recommend"}}</span>{{end}}</td> | |||||
| </tr> | </tr> | ||||
| {{end}} | {{end}} | ||||
| </tbody> | </tbody> | ||||
| @@ -37,16 +48,4 @@ | |||||
| {{template "base/paginate" .}} | {{template "base/paginate" .}} | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui small basic delete modal"> | |||||
| <div class="ui icon header"> | |||||
| <i class="trash icon"></i> | |||||
| {{.i18n.Tr "dataset.settings.delete"}} | |||||
| </div> | |||||
| <div class="content"> | |||||
| <p>{{.i18n.Tr "dataset.settings.delete_desc"}}</p> | |||||
| {{.i18n.Tr "dataset.settings.delete_notices_2" `<span class="name"></span>` | Safe}}<br> | |||||
| </div> | |||||
| {{template "base/delete_modal_actions" .}} | |||||
| </div> | |||||
| {{template "base/footer" .}} | {{template "base/footer" .}} | ||||
| @@ -6,18 +6,18 @@ | |||||
| <i class="dropdown icon"></i> | <i class="dropdown icon"></i> | ||||
| </span> | </span> | ||||
| <div class="menu"> | <div class="menu"> | ||||
| <a class='{{if or (eq .SortType "oldest") (not .SortType)}}active{{end}} item' href='{{$.Link}}?sort=oldest&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a> | |||||
| <a class='{{if eq .SortType "newest"}}active{{end}} item' href='{{$.Link}}?sort=newest&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a> | |||||
| <a class='{{if eq .SortType "alphabetically"}}active{{end}} item' href='{{$.Link}}?sort=alphabetically&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.alphabetically"}}</a> | |||||
| <a class='{{if eq .SortType "reversealphabetically"}}active{{end}} item' href='{{$.Link}}?sort=reversealphabetically&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_alphabetically"}}</a> | |||||
| <a class='{{if eq .SortType "recentupdate"}}active{{end}} item' href='{{$.Link}}?sort=recentupdate&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a> | |||||
| <a class='{{if eq .SortType "leastupdate"}}active{{end}} item' href='{{$.Link}}?sort=leastupdate&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a> | |||||
| <a class='{{if eq .SortType "moststars"}}active{{end}} item' href='{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a> | |||||
| <a class='{{if eq .SortType "feweststars"}}active{{end}} item' href='{{$.Link}}?sort=feweststars&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.feweststars"}}</a> | |||||
| <a class='{{if eq .SortType "mostforks"}}active{{end}} item' href='{{$.Link}}?sort=mostforks&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.mostforks"}}</a> | |||||
| <a class='{{if eq .SortType "fewestforks"}}active{{end}} item' href='{{$.Link}}?sort=fewestforks&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.fewestforks"}}</a> | |||||
| <a class='{{if eq .SortType "size"}}active{{end}} item' href='{{$.Link}}?sort=size&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.by_size"}}</a> | |||||
| <a class='{{if eq .SortType "reversesize"}}active{{end}} item' href='{{$.Link}}?sort=reversesize&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_by_size"}}</a> | |||||
| <a class='{{if or (eq .SortType "oldest") (not .SortType)}}active{{end}} item' href='{{$.Link}}?sort=oldest&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a> | |||||
| <a class='{{if eq .SortType "newest"}}active{{end}} item' href='{{$.Link}}?sort=newest&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a> | |||||
| <a class='{{if eq .SortType "alphabetically"}}active{{end}} item' href='{{$.Link}}?sort=alphabetically&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.alphabetically"}}</a> | |||||
| <a class='{{if eq .SortType "reversealphabetically"}}active{{end}} item' href='{{$.Link}}?sort=reversealphabetically&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_alphabetically"}}</a> | |||||
| <a class='{{if eq .SortType "recentupdate"}}active{{end}} item' href='{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a> | |||||
| <a class='{{if eq .SortType "leastupdate"}}active{{end}} item' href='{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a> | |||||
| <a class='{{if eq .SortType "moststars"}}active{{end}} item' href='{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a> | |||||
| <a class='{{if eq .SortType "feweststars"}}active{{end}} item' href='{{$.Link}}?sort=feweststars&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.feweststars"}}</a> | |||||
| <a class='{{if eq .SortType "mostforks"}}active{{end}} item' href='{{$.Link}}?sort=mostforks&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.mostforks"}}</a> | |||||
| <a class='{{if eq .SortType "fewestforks"}}active{{end}} item' href='{{$.Link}}?sort=fewestforks&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.fewestforks"}}</a> | |||||
| <a class='{{if eq .SortType "size"}}active{{end}} item' href='{{$.Link}}?sort=size&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.by_size"}}</a> | |||||
| <a class='{{if eq .SortType "reversesize"}}active{{end}} item' href='{{$.Link}}?sort=reversesize&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_by_size"}}</a> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -26,4 +26,4 @@ | |||||
| <input name="q" value="{{.Keyword}}" placeholder='{{.i18n.Tr "explore.search"}}...' autofocus> | <input name="q" value="{{.Keyword}}" placeholder='{{.i18n.Tr "explore.search"}}...' autofocus> | ||||
| <button class="ui blue button">{{.i18n.Tr "explore.search"}}</button> | <button class="ui blue button">{{.i18n.Tr "explore.search"}}</button> | ||||
| </div> | </div> | ||||
| </form> | |||||
| </form> | |||||
| @@ -23,7 +23,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first"> | <el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><span class="panel_dataset_name">${dataset.Name} </span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name} </span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -49,7 +49,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second"> | <el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -74,7 +74,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third"> | <el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -99,7 +99,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth"> | <el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -23,7 +23,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first"> | <el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><span class="panel_dataset_name">${dataset.Name} </span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name} </span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -49,7 +49,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second"> | <el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -74,7 +74,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third"> | <el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -99,7 +99,7 @@ | |||||
| <el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth"> | <el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth"> | ||||
| <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index"> | <div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index"> | ||||
| <div style="width: 90%;"> | <div style="width: 90%;"> | ||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div> | |||||
| <div style="margin-top: 8px;display: flex;"> | <div style="margin-top: 8px;display: flex;"> | ||||
| <a :title="dataset.UserName" style="cursor: default;"> | <a :title="dataset.UserName" style="cursor: default;"> | ||||
| <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | <img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink"> | ||||
| @@ -121,11 +121,12 @@ | |||||
| <i class="dropdown icon"></i> | <i class="dropdown icon"></i> | ||||
| </span> | </span> | ||||
| <div class="menu"> | <div class="menu"> | ||||
| <a class="{{if eq .SortType "newest"}}active{{end}} item" href="{{$.Link}}?sort=newest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a> | |||||
| <a class="{{if eq .SortType "oldest"}}active{{end}} item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a> | |||||
| <a class="{{if eq .SortType "recentupdate"}}active{{end}} item" href="{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a> | |||||
| <a class="{{if eq .SortType "leastupdate"}}active{{end}} item" href="{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a> | |||||
| <!-- <a class="{{if eq .SortType "downloadtimes"}}active{{end}} item" href="{{$.Link}}?sort=downloadtimes&q={{$.Keyword}}&tab={{$.TabName}}">{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}</a> --> | |||||
| <a class="{{if eq .SortType "newest"}}active{{end}} item" href="{{$.Link}}?sort=newest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a> | |||||
| <a class="{{if eq .SortType "oldest"}}active{{end}} item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a> | |||||
| <a class="{{if eq .SortType "recentupdate"}}active{{end}} item" href="{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a> | |||||
| <a class="{{if eq .SortType "leastupdate"}}active{{end}} item" href="{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a> | |||||
| <a class="{{if eq .SortType "downloadtimes"}}active{{end}} item" href="{{$.Link}}?sort=downloadtimes&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}</a> | |||||
| <a class="{{if eq .SortType "moststars"}}active{{end}} item" href="{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -139,6 +140,7 @@ | |||||
| {{end}} | {{end}} | ||||
| <div class="ui row" style="clear: both;" id="dataset-base"> | <div class="ui row" style="clear: both;" id="dataset-base"> | ||||
| <el-checkbox v-model="checked" style="padding: 0.5rem 1rem;" @change="handleCheckedChange" >仅显示平台推荐</el-checkbox> | |||||
| <div class="ui two cards"> | <div class="ui two cards"> | ||||
| {{range $k, $v :=.Datasets}} | {{range $k, $v :=.Datasets}} | ||||
| <div class="ui card" @click="gotoDataset('{{.Repo.Link}}/datasets')" style="cursor: pointer;box-shadow: 0px 4px 4px 0px rgba(232,232,232,0.6);border: 1px solid rgba(232, 232, 232, 1);"> | <div class="ui card" @click="gotoDataset('{{.Repo.Link}}/datasets')" style="cursor: pointer;box-shadow: 0px 4px 4px 0px rgba(232,232,232,0.6);border: 1px solid rgba(232, 232, 232, 1);"> | ||||
| @@ -161,7 +163,7 @@ | |||||
| </span> | </span> | ||||
| {{end}} | {{end}} | ||||
| </div> | </div> | ||||
| <div style="font-size: 16px;color:#0366D6;font-family: SourceHanSansSC-medium;height: 27px;font-weight: bold;">{{.Title}}</div> | |||||
| <div style="font-size: 16px;color:#0366D6;font-family: SourceHanSansSC-medium;height: 27px;font-weight: bold;display: flex;align-items: center"><span title="{{.Title}}" class="nowrap" style="display: inline-block;">{{.Title}}</span>{{if .Recommend}}<img src="/img/jian.svg" style="margin-left: 0.5rem;">{{end}}</div> | |||||
| {{if or (.Category) (.Task) (.License)}} | {{if or (.Category) (.Task) (.License)}} | ||||
| <div style="font-size: 12px;margin-top: 5px;"> | <div style="font-size: 12px;margin-top: 5px;"> | ||||
| {{if .Category}} | {{if .Category}} | ||||
| @@ -179,7 +179,26 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <input id="store_category" type="hidden" name="get_benchmark_category"> | <input id="store_category" type="hidden" name="get_benchmark_category"> | ||||
| <div class="inline required field"> | |||||
| <label>{{.i18n.Tr "repo.modelarts.code_version"}}</label> | |||||
| <select class="ui dropdown width80 left2" id="code_version" name="branch_name"> | |||||
| {{if .branch_name}} | |||||
| <option name="branch_name" value="{{.branch_name}}">{{.branch_name}}</option> | |||||
| {{range $k, $v :=.Branches}} | |||||
| {{ if ne $v $.branch_name }} | |||||
| <option name="branch_name" value="{{$v}}">{{$v}}</option> | |||||
| {{end}} | |||||
| {{end}} | |||||
| {{else}} | |||||
| <option name="branch_name" value="{{.branchName}}">{{.branchName}}</option> | |||||
| {{range $k, $v :=.Branches}} | |||||
| {{ if ne $v $.branchName }} | |||||
| <option name="branch_name" value="{{$v}}">{{$v}}</option> | |||||
| {{end}} | |||||
| {{end}} | |||||
| {{end}} | |||||
| </select> | |||||
| </div> | |||||
| <div class="inline required field"> | <div class="inline required field"> | ||||
| <label>{{.i18n.Tr "cloudbrain.gpu_type"}}</label> | <label>{{.i18n.Tr "cloudbrain.gpu_type"}}</label> | ||||
| <select id="cloudbrain_gpu_type" class="ui search dropdown" placeholder="选择GPU类型" style='width:385px' name="gpu_type"> | <select id="cloudbrain_gpu_type" class="ui search dropdown" placeholder="选择GPU类型" style='width:385px' name="gpu_type"> | ||||
| @@ -187,7 +187,7 @@ td, th { | |||||
| {{.i18n.Tr "repo.cloudbrain"}} | {{.i18n.Tr "repo.cloudbrain"}} | ||||
| </a> | </a> | ||||
| <div class="divider"> / </div> | <div class="divider"> / </div> | ||||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||||
| {{$.i18n.Tr "repo.modelarts.notebook"}} | {{$.i18n.Tr "repo.modelarts.notebook"}} | ||||
| </a> | </a> | ||||
| <div class="divider"> / </div> | <div class="divider"> / </div> | ||||
| @@ -281,7 +281,7 @@ | |||||
| </span> | </span> | ||||
| <el-dropdown-menu slot="dropdown"> | <el-dropdown-menu slot="dropdown"> | ||||
| <el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item> | <el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item> | ||||
| <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item> | |||||
| <!-- <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>--> | |||||
| {{if and ($.CanWrite) (eq .DecompressState 1) }} | {{if and ($.CanWrite) (eq .DecompressState 1) }} | ||||
| <el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item> | <el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item> | ||||
| {{end}} | {{end}} | ||||
| @@ -193,7 +193,7 @@ td, th { | |||||
| {{.i18n.Tr "repo.cloudbrain"}} | {{.i18n.Tr "repo.cloudbrain"}} | ||||
| </a> | </a> | ||||
| <div class="divider"> / </div> | <div class="divider"> / </div> | ||||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||||
| <a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||||
| {{$.i18n.Tr "repo.modelarts.notebook"}} | {{$.i18n.Tr "repo.modelarts.notebook"}} | ||||
| </a> | </a> | ||||
| <div class="divider"> / </div> | <div class="divider"> / </div> | ||||
| @@ -233,8 +233,13 @@ | |||||
| <div class="ui labeled input" style="width: 5%;"> | <div class="ui labeled input" style="width: 5%;"> | ||||
| <input style="border-radius: 0;text-align: center;" name="work_server_number" id="trainjob_work_server_num" tabindex="3" autofocus required maxlength="255" value="1" readonly> | |||||
| <input style="border-radius: 0;text-align: center;"type="hidden" name="work_server_number" id="trainjob_work_server_num" tabindex="3" autofocus required maxlength="255" value="1" readonly> | |||||
| <div class="field" id="trainjob_work_server_num_select" name="work_server_number_select"> | |||||
| <select class="ui dropdown width" style='width: 100%;' name="work_server_id"> | |||||
| <option name="server_id" value="1">1</option> | |||||
| <option name="server_id" value="2">2</option> | |||||
| </select> | |||||
| </div> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -263,19 +268,20 @@ | |||||
| $('.menu .item') | $('.menu .item') | ||||
| .tab(); | .tab(); | ||||
| let sever_num = $('#trainjob_work_server_num') | |||||
| $('.add').click(function(){ | |||||
| sever_num.val(parseInt(sever_num.val())+1) | |||||
| if(sever_num.val()>=26){ | |||||
| sever_num.val(parseInt(sever_num.val())-1) | |||||
| } | |||||
| }) | |||||
| $('.min').click(function(){ | |||||
| sever_num.val(parseInt(sever_num.val())-1) | |||||
| if(sever_num.val()<=0){ | |||||
| sever_num.val(parseInt(sever_num.val())+1) | |||||
| } | |||||
| }) | |||||
| // let sever_num = $("#trainjob_work_server_num_select .text").text() //$('#trainjob_work_server_num') | |||||
| // console.log("sever_num:",sever_num) | |||||
| // $('.add').click(function(){ | |||||
| // sever_num.val(parseInt(sever_num.val())+1) | |||||
| // if(sever_num.val()>=26){ | |||||
| // sever_num.val(parseInt(sever_num.val())-1) | |||||
| // } | |||||
| // }) | |||||
| // $('.min').click(function(){ | |||||
| // sever_num.val(parseInt(sever_num.val())-1) | |||||
| // if(sever_num.val()<=0){ | |||||
| // sever_num.val(parseInt(sever_num.val())+1) | |||||
| // } | |||||
| // }) | |||||
| // 参数增加、删除、修改、保存 | // 参数增加、删除、修改、保存 | ||||
| function Add_parameter(i){ | function Add_parameter(i){ | ||||
| value = '<div class="two fields width85" id= "para'+ i +'">' + | value = '<div class="two fields width85" id= "para'+ i +'">' + | ||||
| @@ -349,7 +355,7 @@ | |||||
| // $("select[name='pool_id']").val(parameters[i]); | // $("select[name='pool_id']").val(parameters[i]); | ||||
| // break; | // break; | ||||
| case (6): | case (6): | ||||
| $("input[name='work_server_number']").val(parameters[i]); | |||||
| // $("input[name='work_server_number']").val(parameters[i]); | |||||
| break; | break; | ||||
| } | } | ||||
| } | } | ||||
| @@ -456,6 +462,10 @@ | |||||
| $("input#ai_engine_name").val(name1) | $("input#ai_engine_name").val(name1) | ||||
| $("input#ai_flaver_name").val(name2) | $("input#ai_flaver_name").val(name2) | ||||
| let val_server_num_select = $("#trainjob_work_server_num_select .text").text() | |||||
| // console.log("val_server_num_select:",val_server_num_select) | |||||
| $("input#trainjob_work_server_num").val(val_server_num_select) | |||||
| } | } | ||||
| $('.ui.create_train_job.green.button').click(function(e) { | $('.ui.create_train_job.green.button').click(function(e) { | ||||
| get_name() | get_name() | ||||
| @@ -249,7 +249,7 @@ td, th { | |||||
| <div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);"> | <div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);"> | ||||
| <a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | <a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a> | ||||
| <a class="item" data-tab="second{{$k}}" onclick="loadLog({{.VersionName}})">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||||
| <a class="item log_bottom" data-tab="second{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a> | |||||
| <a class="item" data-tab="third{{$k}}" onclick="loadModelFile({{.VersionName}},'','','init')">{{$.i18n.Tr "repo.model_download"}}</a> | <a class="item" data-tab="third{{$k}}" onclick="loadModelFile({{.VersionName}},'','','init')">{{$.i18n.Tr "repo.model_download"}}</a> | ||||
| </div> | </div> | ||||
| <div class="ui tab active" data-tab="first{{$k}}"> | <div class="ui tab active" data-tab="first{{$k}}"> | ||||
| @@ -420,7 +420,13 @@ td, th { | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <div class="ui tab" data-tab="second{{$k}}"> | <div class="ui tab" data-tab="second{{$k}}"> | ||||
| <div> | |||||
| <div style="position: relative;"> | |||||
| <span> | |||||
| <a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | |||||
| </span> | |||||
| <span> | |||||
| <a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" class="log_bottom" data-version="{{.VersionName}}"><i class="icon-to-bottom"></i></a> | |||||
| </span> | |||||
| <div class="ui message message{{.VersionName}}" style="display: none;"> | <div class="ui message message{{.VersionName}}" style="display: none;"> | ||||
| <div id="header"></div> | <div id="header"></div> | ||||
| </div> | </div> | ||||
| @@ -861,9 +867,12 @@ td, th { | |||||
| console.log(err); | console.log(err); | ||||
| }); | }); | ||||
| } | } | ||||
| if(scrollTop == 0 && scrollLeft==0){ | |||||
| if(scrollTop == 1 && scrollLeft==0){ | |||||
| let start_line = $(`#log${version_name} input[name=start_line]`).val() | let start_line = $(`#log${version_name} input[name=start_line]`).val() | ||||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { | $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { | ||||
| console.log("11111") | |||||
| if (data.Lines == 0){ | if (data.Lines == 0){ | ||||
| $(`.message${version_name} #header`).text('您已翻阅至日志顶部') | $(`.message${version_name} #header`).text('您已翻阅至日志顶部') | ||||
| $(`.message${version_name}`).css('display', 'block') | $(`.message${version_name}`).css('display', 'block') | ||||
| @@ -879,4 +888,73 @@ td, th { | |||||
| }); | }); | ||||
| } | } | ||||
| } | } | ||||
| function scrollAnimation(dom, currentY, targetY, currentX) { | |||||
| let needScrollTop = targetY - currentY; | |||||
| let _currentY = currentY; | |||||
| setTimeout(() => { | |||||
| // 一次调用滑动帧数,每次调用会不一样 | |||||
| //取总距离的十分之一 | |||||
| const dist = Math.ceil(needScrollTop / 10); | |||||
| _currentY += dist; | |||||
| //移动一个十分之一 | |||||
| console.log(_currentY, targetY) | |||||
| dom.scrollTo(currentX || 0, _currentY); | |||||
| // 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果 | |||||
| if (needScrollTop > 10 || needScrollTop < -10) { | |||||
| scrollAnimation(dom, _currentY, targetY) | |||||
| } else { | |||||
| dom.scrollTo(0, targetY) | |||||
| } | |||||
| }, 1) | |||||
| } | |||||
| $('.log_top').click(function(){ | |||||
| let logContentDom = document.querySelector('.log') | |||||
| if(!logContentDom) | |||||
| return | |||||
| let version_name = $('.log_top').data('version') | |||||
| $(`#log_file${version_name}`).siblings('pre').remove() | |||||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=asc`, (data) => { | |||||
| $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 | |||||
| $(`#log${version_name} input[name=start_line]`).val(data.StartLine) | |||||
| $(`#log${version_name}`).prepend('<pre>' + data.Content) | |||||
| scrollAnimation(logContentDom, logContentDom.scrollTop, 0); | |||||
| }) | |||||
| }) | |||||
| $('.log_bottom').click(function(){ | |||||
| let logContentDom = document.querySelector('.log') | |||||
| let version_name = $('.log_bottom').data('version') | |||||
| console.log($(`#log${version_name}`).siblings('pre')) | |||||
| $(`#log_file${version_name}`).siblings('pre').remove() | |||||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => { | |||||
| $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 | |||||
| $(`#log${version_name} input[name=start_line]`).val(data.StartLine) | |||||
| $(`#log${version_name}`).append('<pre>' + data.Content) | |||||
| $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${data.EndLine}&lines=50&order=desc`, (data) => { | |||||
| if (data.Lines == 0){ | |||||
| $(`.message${version_name} #header`).text('您已翻阅至日志底部') | |||||
| $(`.message${version_name}`).css('display', 'block') | |||||
| setTimeout(function(){ | |||||
| $(`.message${version_name}`).css('display', 'none') | |||||
| }, 1000) | |||||
| }else{ | |||||
| if(end_line===data.EndLine){ | |||||
| return | |||||
| } | |||||
| else{ | |||||
| $(`#log${version_name} input[name=end_line]`).val(data.EndLine) | |||||
| $(`#log${version_name}`).append('<pre>' + data.Content) | |||||
| } | |||||
| } | |||||
| }).fail(function(err) { | |||||
| console.log(err); | |||||
| }); | |||||
| scrollAnimation(logContentDom, logContentDom.scrollTop+1, logContentDom.scrollHeight - logContentDom.clientHeight); | |||||
| }) | |||||
| }) | |||||
| </script> | </script> | ||||
| @@ -147,9 +147,9 @@ | |||||
| {{end}} | {{end}} | ||||
| </div> | </div> | ||||
| <!-- 删除任务 --> | <!-- 删除任务 --> | ||||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/train-job{{else}}/cloudbrain/train-job{{end}}{{end}}/{{$JobID}}/del?ishomepage=true' method="post"> | |||||
| <form class="ui compact buttons" id="delForm-{{$JobID}}" action='{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Name}}{{if eq .JobType "BENCHMARK"}}/cloudbrain/benchmark{{else if or (eq .JobType "SNN4IMAGENET") (eq .JobType "BRAINSCORE")}}/cloudbrain{{else if eq .JobType "DEBUG"}}{{if eq .ComputeResource "NPU"}}/modelarts/notebook{{else}}/cloudbrain{{end}}{{else if eq .JobType "TRAIN"}}{{if eq .ComputeResource "NPU"}}/modelarts/train-job{{else}}/cloudbrain/train-job{{end}}{{end}}/{{$JobID}}/del?ishomepage=true' method="post"> | |||||
| {{$.CsrfTokenHtml}} | {{$.CsrfTokenHtml}} | ||||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?ishomepage=true" data-version="{{.VersionName}}" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||||
| <a style="padding: 0.5rem 1rem;margin-left:0.2rem" id="ai-delete-{{$JobID}}" data-repopath="{{.Repo.OwnerName}}/{{.Repo.Name}}/modelarts/inference-job/{{$JobID}}/del_version?ishomepage=true" data-version="" class="ui basic ai_delete blue button" style="border-radius: .28571429rem;"> | |||||
| {{$.i18n.Tr "repo.delete"}} | {{$.i18n.Tr "repo.delete"}} | ||||
| </a> | </a> | ||||
| </form> | </form> | ||||
| @@ -71,7 +71,7 @@ | |||||
| {{ $index := index .GetIssueInfos 0}} | {{ $index := index .GetIssueInfos 0}} | ||||
| {{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} | {{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} | ||||
| {{else if eq .GetOpType 24}} | {{else if eq .GetOpType 24}} | ||||
| {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}} | |||||
| {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}} | |||||
| {{else if eq .GetOpType 25}} | {{else if eq .GetOpType 25}} | ||||
| {{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} | {{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} | ||||
| {{else if eq .GetOpType 26}} | {{else if eq .GetOpType 26}} | ||||
| @@ -27,6 +27,7 @@ import createDropzone from '../features/dropzone.js'; | |||||
| const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | ||||
| const chunkSize = 1024 * 1024 * 64; | const chunkSize = 1024 * 1024 * 64; | ||||
| const md5ChunkSize = 1024 * 1024 * 1; | |||||
| export default { | export default { | ||||
| props:{ | props:{ | ||||
| @@ -190,10 +191,12 @@ export default { | |||||
| let currentChunk = 0; | let currentChunk = 0; | ||||
| const time = new Date().getTime(); | const time = new Date().getTime(); | ||||
| // console.log('计算MD5...') | |||||
| this.status = this.dropzoneParams.data('md5-computing'); | this.status = this.dropzoneParams.data('md5-computing'); | ||||
| file.totalChunkCounts = chunks; | file.totalChunkCounts = chunks; | ||||
| loadNext(); | |||||
| if (file.size==0) { | |||||
| file.totalChunkCounts = 1 | |||||
| } | |||||
| loadMd5Next(); | |||||
| fileReader.onload = (e) => { | fileReader.onload = (e) => { | ||||
| fileLoaded.call(this, e); | fileLoaded.call(this, e); | ||||
| @@ -207,13 +210,12 @@ export default { | |||||
| spark.append(e.target.result); // Append array buffer | spark.append(e.target.result); // Append array buffer | ||||
| currentChunk++; | currentChunk++; | ||||
| if (currentChunk < chunks) { | if (currentChunk < chunks) { | ||||
| // console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); | |||||
| this.status = `${this.dropzoneParams.data('loading-file')} ${( | this.status = `${this.dropzoneParams.data('loading-file')} ${( | ||||
| (currentChunk / chunks) * | (currentChunk / chunks) * | ||||
| 100 | 100 | ||||
| ).toFixed(2)}% (${currentChunk}/${chunks})`; | ).toFixed(2)}% (${currentChunk}/${chunks})`; | ||||
| this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | ||||
| loadNext(); | |||||
| loadMd5Next(); | |||||
| return; | return; | ||||
| } | } | ||||
| @@ -235,6 +237,13 @@ export default { | |||||
| start + chunkSize >= file.size ? file.size : start + chunkSize; | start + chunkSize >= file.size ? file.size : start + chunkSize; | ||||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | ||||
| } | } | ||||
| function loadMd5Next() { | |||||
| const start = currentChunk * chunkSize; | |||||
| const end = | |||||
| start + md5ChunkSize >= file.size ? file.size : start + md5ChunkSize; | |||||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||||
| } | |||||
| }, | }, | ||||
| async computeMD5Success(md5edFile) { | async computeMD5Success(md5edFile) { | ||||
| @@ -1,484 +0,0 @@ | |||||
| <template> | |||||
| <div class="dropzone-wrapper dataset-files"> | |||||
| <div | |||||
| id="dataset" | |||||
| class="dropzone" | |||||
| /> | |||||
| <p class="upload-info"> | |||||
| {{ file_status_text }} | |||||
| <strong class="success text red">{{ status }}</strong> | |||||
| </p> | |||||
| <p>说明:<br> | |||||
| - 只有zip格式的数据集才能发起云脑任务;<br> | |||||
| - 云脑1提供 <span class="text blue">CPU / GPU</span> 资源,云脑2提供 <span class="text blue">Ascend NPU</span> 资源;调试使用的数据集也需要上传到对应的环境。 | |||||
| </p> | |||||
| </div> | |||||
| </template> | |||||
| <script> | |||||
| /* eslint-disable eqeqeq */ | |||||
| // import Dropzone from 'dropzone/dist/dropzone.js'; | |||||
| // import 'dropzone/dist/dropzone.css' | |||||
| import SparkMD5 from 'spark-md5'; | |||||
| import axios from 'axios'; | |||||
| import qs from 'qs'; | |||||
| import createDropzone from '../features/dropzone.js'; | |||||
| const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | |||||
| const CloudBrainType = 1; | |||||
| export default { | |||||
| data() { | |||||
| return { | |||||
| dropzoneUploader: null, | |||||
| maxFiles: 1, | |||||
| maxFilesize: 1 * 1024 * 1024 * 1024 * 1024, | |||||
| acceptedFiles: '*/*', | |||||
| progress: 0, | |||||
| status: '', | |||||
| dropzoneParams: {}, | |||||
| file_status_text: '' | |||||
| }; | |||||
| }, | |||||
| async mounted() { | |||||
| this.dropzoneParams = $('div#minioUploader-params'); | |||||
| this.file_status_text = this.dropzoneParams.data('file-status'); | |||||
| this.status = this.dropzoneParams.data('file-init-status'); | |||||
| let previewTemplate = ''; | |||||
| previewTemplate += '<div class="dz-preview dz-file-preview">\n '; | |||||
| previewTemplate += ' <div class="dz-details">\n '; | |||||
| previewTemplate += ' <div class="dz-filename">'; | |||||
| previewTemplate += | |||||
| ' <span data-dz-name data-dz-thumbnail></span>'; | |||||
| previewTemplate += ' </div>\n '; | |||||
| previewTemplate += ' <div class="dz-size" data-dz-size style="white-space: nowrap"></div>\n '; | |||||
| previewTemplate += ' </div>\n '; | |||||
| previewTemplate += ' <div class="dz-progress ui active progress">'; | |||||
| previewTemplate += | |||||
| ' <div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div>\n '; | |||||
| previewTemplate += ' </div>\n '; | |||||
| previewTemplate += ' <div class="dz-success-mark">'; | |||||
| previewTemplate += ' <span>上传成功</span>'; | |||||
| previewTemplate += ' </div>\n '; | |||||
| previewTemplate += ' <div class="dz-error-mark">'; | |||||
| previewTemplate += ' <span>上传失败</span>'; | |||||
| previewTemplate += ' </div>\n '; | |||||
| previewTemplate += ' <div class="dz-error-message">'; | |||||
| previewTemplate += ' <span data-dz-errormessage></span>'; | |||||
| previewTemplate += ' </div>\n'; | |||||
| previewTemplate += '</div>'; | |||||
| const $dropzone = $('div#dataset'); | |||||
| console.log('createDropzone'); | |||||
| const dropzoneUploader = await createDropzone($dropzone[0], { | |||||
| url: '/todouploader', | |||||
| maxFiles: this.maxFiles, | |||||
| maxFilesize: this.maxFileSize, | |||||
| timeout: 0, | |||||
| autoQueue: false, | |||||
| dictDefaultMessage: this.dropzoneParams.data('default-message'), | |||||
| dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'), | |||||
| dictFileTooBig: this.dropzoneParams.data('file-too-big'), | |||||
| dictRemoveFile: this.dropzoneParams.data('remove-file'), | |||||
| previewTemplate | |||||
| }); | |||||
| dropzoneUploader.on('addedfile', (file) => { | |||||
| setTimeout(() => { | |||||
| // eslint-disable-next-line no-unused-expressions | |||||
| file.accepted && this.onFileAdded(file); | |||||
| }, 200); | |||||
| }); | |||||
| dropzoneUploader.on('maxfilesexceeded', function (file) { | |||||
| if (this.files[0].status !== 'success') { | |||||
| alert(this.dropzoneParams.data('waitting-uploading')); | |||||
| this.removeFile(file); | |||||
| return; | |||||
| } | |||||
| this.removeAllFiles(); | |||||
| this.addFile(file); | |||||
| }); | |||||
| this.dropzoneUploader = dropzoneUploader; | |||||
| }, | |||||
| methods: { | |||||
| resetStatus() { | |||||
| this.progress = 0; | |||||
| this.status = ''; | |||||
| }, | |||||
| updateProgress(file, progress) { | |||||
| file.previewTemplate.querySelector( | |||||
| '.dz-upload' | |||||
| ).style.width = `${progress}%`; | |||||
| }, | |||||
| emitDropzoneSuccess(file) { | |||||
| file.status = 'success'; | |||||
| this.dropzoneUploader.emit('success', file); | |||||
| this.dropzoneUploader.emit('complete', file); | |||||
| }, | |||||
| emitDropzoneFailed(file) { | |||||
| this.status = this.dropzoneParams.data('falied'); | |||||
| file.status = 'error'; | |||||
| this.dropzoneUploader.emit('error', file); | |||||
| // this.dropzoneUploader.emit('complete', file); | |||||
| }, | |||||
| onFileAdded(file) { | |||||
| file.datasetId = document | |||||
| .getElementById('datasetId') | |||||
| .getAttribute('datasetId'); | |||||
| this.resetStatus(); | |||||
| this.computeMD5(file); | |||||
| }, | |||||
| finishUpload(file) { | |||||
| this.emitDropzoneSuccess(file); | |||||
| setTimeout(() => { | |||||
| window.location.reload(); | |||||
| }, 1000); | |||||
| }, | |||||
| computeMD5(file) { | |||||
| this.resetStatus(); | |||||
| const blobSlice = | |||||
| File.prototype.slice || | |||||
| File.prototype.mozSlice || | |||||
| File.prototype.webkitSlice, | |||||
| chunkSize = 1024 * 1024 * 64, | |||||
| chunks = Math.ceil(file.size / chunkSize), | |||||
| spark = new SparkMD5.ArrayBuffer(), | |||||
| fileReader = new FileReader(); | |||||
| let currentChunk = 0; | |||||
| const time = new Date().getTime(); | |||||
| // console.log('计算MD5...') | |||||
| this.status = this.dropzoneParams.data('md5-computing'); | |||||
| file.totalChunkCounts = chunks; | |||||
| loadNext(); | |||||
| fileReader.onload = (e) => { | |||||
| fileLoaded.call(this, e); | |||||
| }; | |||||
| fileReader.onerror = (err) => { | |||||
| console.warn('oops, something went wrong.', err); | |||||
| file.cancel(); | |||||
| }; | |||||
| function fileLoaded(e) { | |||||
| spark.append(e.target.result); // Append array buffer | |||||
| currentChunk++; | |||||
| if (currentChunk < chunks) { | |||||
| // console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); | |||||
| this.status = `${this.dropzoneParams.data('loading-file')} ${( | |||||
| (currentChunk / chunks) * | |||||
| 100 | |||||
| ).toFixed(2)}% (${currentChunk}/${chunks})`; | |||||
| this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||||
| loadNext(); | |||||
| return; | |||||
| } | |||||
| const md5 = spark.end(); | |||||
| console.log( | |||||
| `MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${ | |||||
| file.size | |||||
| } 用时:${(new Date().getTime() - time) / 1000} s` | |||||
| ); | |||||
| spark.destroy(); // 释放缓存 | |||||
| file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识 | |||||
| file.cmd5 = false; // 取消计算md5状态 | |||||
| this.computeMD5Success(file); | |||||
| } | |||||
| function loadNext() { | |||||
| const start = currentChunk * chunkSize; | |||||
| const end = | |||||
| start + chunkSize >= file.size ? file.size : start + chunkSize; | |||||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||||
| } | |||||
| }, | |||||
| async computeMD5Success(md5edFile) { | |||||
| const file = await this.getSuccessChunks(md5edFile); | |||||
| try { | |||||
| if (file.uploadID == '' || file.uuid == '') { | |||||
| // 未上传过 | |||||
| await this.newMultiUpload(file); | |||||
| if (file.uploadID != '' && file.uuid != '') { | |||||
| file.chunks = ''; | |||||
| this.multipartUpload(file); | |||||
| } else { | |||||
| // 失败如何处理 | |||||
| return; | |||||
| } | |||||
| return; | |||||
| } | |||||
| if (file.uploaded == '1') { | |||||
| // 已上传成功 | |||||
| // 秒传 | |||||
| if (file.attachID == '0') { | |||||
| // 删除数据集记录,未删除文件 | |||||
| await addAttachment(file); | |||||
| } | |||||
| //不同数据集上传同一个文件 | |||||
| if (file.datasetID != '' ) { | |||||
| if (file.datasetName != "" && file.realName != "") { | |||||
| var info = "该文件已上传,对应数据集(" + file.datasetName + ")-文件(" + file.realName + ")"; | |||||
| window.alert(info); | |||||
| window.location.reload(); | |||||
| } | |||||
| } | |||||
| console.log('文件已上传完成'); | |||||
| this.progress = 100; | |||||
| this.status = this.dropzoneParams.data('upload-complete'); | |||||
| this.finishUpload(file); | |||||
| } else { | |||||
| // 断点续传 | |||||
| this.multipartUpload(file); | |||||
| } | |||||
| } catch (error) { | |||||
| this.emitDropzoneFailed(file); | |||||
| console.log(error); | |||||
| } | |||||
| async function addAttachment(file) { | |||||
| return await axios.post( | |||||
| '/attachments/add', | |||||
| qs.stringify({ | |||||
| uuid: file.uuid, | |||||
| file_name: file.name, | |||||
| size: file.size, | |||||
| dataset_id: file.datasetId, | |||||
| type: CloudBrainType, | |||||
| _csrf: csrf, | |||||
| }) | |||||
| ); | |||||
| } | |||||
| }, | |||||
| async getSuccessChunks(file) { | |||||
| const params = { | |||||
| params: { | |||||
| md5: file.uniqueIdentifier, | |||||
| type: CloudBrainType, | |||||
| file_name: file.name, | |||||
| _csrf: csrf | |||||
| } | |||||
| }; | |||||
| try { | |||||
| const response = await axios.get('/attachments/get_chunks', params); | |||||
| file.uploadID = response.data.uploadID; | |||||
| file.uuid = response.data.uuid; | |||||
| file.uploaded = response.data.uploaded; | |||||
| file.chunks = response.data.chunks; | |||||
| file.attachID = response.data.attachID; | |||||
| file.datasetID = response.data.datasetID; | |||||
| file.datasetName = response.data.datasetName; | |||||
| file.realName = response.data.fileName; | |||||
| return file; | |||||
| } catch (error) { | |||||
| this.emitDropzoneFailed(file); | |||||
| console.log('getSuccessChunks catch: ', error); | |||||
| return null; | |||||
| } | |||||
| }, | |||||
| async newMultiUpload(file) { | |||||
| const res = await axios.get('/attachments/new_multipart', { | |||||
| params: { | |||||
| totalChunkCounts: file.totalChunkCounts, | |||||
| md5: file.uniqueIdentifier, | |||||
| size: file.size, | |||||
| fileType: file.type, | |||||
| type: CloudBrainType, | |||||
| file_name: file.name, | |||||
| _csrf: csrf | |||||
| } | |||||
| }); | |||||
| file.uploadID = res.data.uploadID; | |||||
| file.uuid = res.data.uuid; | |||||
| }, | |||||
| multipartUpload(file) { | |||||
| const blobSlice = | |||||
| File.prototype.slice || | |||||
| File.prototype.mozSlice || | |||||
| File.prototype.webkitSlice, | |||||
| chunkSize = 1024 * 1024 * 64, | |||||
| chunks = Math.ceil(file.size / chunkSize), | |||||
| fileReader = new FileReader(), | |||||
| time = new Date().getTime(); | |||||
| let currentChunk = 0; | |||||
| function loadNext() { | |||||
| const start = currentChunk * chunkSize; | |||||
| const end = | |||||
| start + chunkSize >= file.size ? file.size : start + chunkSize; | |||||
| fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||||
| } | |||||
| function checkSuccessChunks() { | |||||
| const index = successChunks.indexOf((currentChunk + 1).toString()); | |||||
| if (index == -1) { | |||||
| return false; | |||||
| } | |||||
| return true; | |||||
| } | |||||
| async function getUploadChunkUrl(currentChunk, partSize) { | |||||
| const res = await axios.get('/attachments/get_multipart_url', { | |||||
| params: { | |||||
| uuid: file.uuid, | |||||
| uploadID: file.uploadID, | |||||
| size: partSize, | |||||
| chunkNumber: currentChunk + 1, | |||||
| type: CloudBrainType, | |||||
| file_name: file.name, | |||||
| _csrf: csrf | |||||
| } | |||||
| }); | |||||
| urls[currentChunk] = res.data.url; | |||||
| } | |||||
| async function uploadMinio(url, e) { | |||||
| let urls = []; | |||||
| const res = await axios.put(url, e.target.result, { | |||||
| headers: { | |||||
| 'Content-Type': '' | |||||
| }}); | |||||
| etags[currentChunk] = res.headers.etag; | |||||
| } | |||||
| async function uploadMinioNewMethod(url,e){ | |||||
| var xhr = new XMLHttpRequest(); | |||||
| xhr.open('PUT', url, false); | |||||
| xhr.setRequestHeader('Content-Type', '') | |||||
| xhr.send(e.target.result); | |||||
| var etagValue = xhr.getResponseHeader('ETag'); | |||||
| //console.log(etagValue); | |||||
| etags[currentChunk] = etagValue; | |||||
| } | |||||
| async function updateChunk(currentChunk) { | |||||
| await axios.post( | |||||
| '/attachments/update_chunk', | |||||
| qs.stringify({ | |||||
| uuid: file.uuid, | |||||
| chunkNumber: currentChunk + 1, | |||||
| etag: etags[currentChunk], | |||||
| type: CloudBrainType, | |||||
| _csrf: csrf | |||||
| }) | |||||
| ); | |||||
| } | |||||
| async function uploadChunk(e) { | |||||
| try { | |||||
| if (!checkSuccessChunks()) { | |||||
| const start = currentChunk * chunkSize; | |||||
| const partSize = | |||||
| start + chunkSize >= file.size ? file.size - start : chunkSize; | |||||
| // 获取分片上传url | |||||
| await getUploadChunkUrl(currentChunk, partSize); | |||||
| if (urls[currentChunk] != '') { | |||||
| // 上传到minio | |||||
| await uploadMinioNewMethod(urls[currentChunk], e); | |||||
| if (etags[currentChunk] != '') { | |||||
| // 更新数据库:分片上传结果 | |||||
| //await updateChunk(currentChunk); | |||||
| } else { | |||||
| console.log("上传到minio uploadChunk etags[currentChunk] == ''");// TODO | |||||
| } | |||||
| } else { | |||||
| console.log("uploadChunk urls[currentChunk] != ''");// TODO | |||||
| } | |||||
| } | |||||
| } catch (error) { | |||||
| this.emitDropzoneFailed(file); | |||||
| console.log(error); | |||||
| } | |||||
| } | |||||
| async function completeUpload() { | |||||
| return await axios.post( | |||||
| '/attachments/complete_multipart', | |||||
| qs.stringify({ | |||||
| uuid: file.uuid, | |||||
| uploadID: file.uploadID, | |||||
| file_name: file.name, | |||||
| size: file.size, | |||||
| dataset_id: file.datasetId, | |||||
| type: CloudBrainType, | |||||
| _csrf: csrf | |||||
| }) | |||||
| ); | |||||
| } | |||||
| const successChunks = []; | |||||
| let successParts = []; | |||||
| successParts = file.chunks.split(','); | |||||
| for (let i = 0; i < successParts.length; i++) { | |||||
| successChunks[i] = successParts[i].split('-')[0]; | |||||
| } | |||||
| const urls = []; // TODO const ? | |||||
| const etags = []; | |||||
| console.log('上传分片...'); | |||||
| this.status = this.dropzoneParams.data('uploading'); | |||||
| loadNext(); | |||||
| fileReader.onload = async (e) => { | |||||
| await uploadChunk(e); | |||||
| fileReader.abort(); | |||||
| currentChunk++; | |||||
| if (currentChunk < chunks) { | |||||
| console.log( | |||||
| `第${currentChunk}个分片上传完成, 开始第${currentChunk + | |||||
| 1}/${chunks}个分片上传` | |||||
| ); | |||||
| this.progress = Math.ceil((currentChunk / chunks) * 100); | |||||
| this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||||
| this.status = `${this.dropzoneParams.data('uploading')} ${( | |||||
| (currentChunk / chunks) * | |||||
| 100 | |||||
| ).toFixed(2)}%`; | |||||
| await loadNext(); | |||||
| } else { | |||||
| await completeUpload(); | |||||
| console.log( | |||||
| `文件上传完成:${file.name} \n分片:${chunks} 大小:${ | |||||
| file.size | |||||
| } 用时:${(new Date().getTime() - time) / 1000} s` | |||||
| ); | |||||
| this.progress = 100; | |||||
| this.status = this.dropzoneParams.data('upload-complete'); | |||||
| this.finishUpload(file); | |||||
| } | |||||
| }; | |||||
| } | |||||
| } | |||||
| }; | |||||
| </script> | |||||
| <style> | |||||
| .dropzone-wrapper { | |||||
| margin: 0; | |||||
| } | |||||
| .ui .dropzone { | |||||
| border: 2px dashed #0087f5; | |||||
| box-shadow: none !important; | |||||
| padding: 0; | |||||
| min-height: 5rem; | |||||
| border-radius: 4px; | |||||
| } | |||||
| .dataset .dataset-files #dataset .dz-preview.dz-file-preview, | |||||
| .dataset .dataset-files #dataset .dz-preview.dz-processing { | |||||
| display: flex; | |||||
| align-items: center; | |||||
| } | |||||
| .dataset .dataset-files #dataset .dz-preview { | |||||
| border-bottom: 1px solid #dadce0; | |||||
| min-height: 0; | |||||
| } | |||||
| .upload-info{ | |||||
| margin-top: 0.2em; | |||||
| } | |||||
| </style> | |||||
| @@ -63,19 +63,28 @@ | |||||
| </template> | </template> | ||||
| </el-table-column> | </el-table-column> | ||||
| <el-table-column | <el-table-column | ||||
| prop="CodeMergeCount" | |||||
| label="PR数" | |||||
| prop="UserIndex" | |||||
| label="归一化用户指数" | |||||
| width="120px" | |||||
| align="center"> | align="center"> | ||||
| </el-table-column> | |||||
| <template slot-scope="scope"> | |||||
| {{scope.row.UserIndex | rounding}} | |||||
| </template> | |||||
| </el-table-column> | |||||
| <el-table-column | <el-table-column | ||||
| prop="UserIndex" | |||||
| prop="UserIndexPrimitive" | |||||
| label="用户指数" | label="用户指数" | ||||
| width="120px" | width="120px" | ||||
| align="center"> | align="center"> | ||||
| <template slot-scope="scope"> | <template slot-scope="scope"> | ||||
| {{scope.row.UserIndex | rounding}} | |||||
| {{scope.row.UserIndexPrimitive | rounding}} | |||||
| </template> | </template> | ||||
| </el-table-column> | </el-table-column> | ||||
| <el-table-column | |||||
| prop="CodeMergeCount" | |||||
| label="PR数" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | <el-table-column | ||||
| prop="CommitCount" | prop="CommitCount" | ||||
| label="commit数" | label="commit数" | ||||
| @@ -160,6 +169,48 @@ | |||||
| label="提交模型数" | label="提交模型数" | ||||
| width="120px" | width="120px" | ||||
| align="center"> | align="center"> | ||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="FocusOtherUser" | |||||
| label="关注他人数" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="CollectDataset" | |||||
| label="收藏数据集" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="CollectedDataset" | |||||
| label="被收藏数据集" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="RecommendDataset" | |||||
| label="被推荐数据集数" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="CollectImage" | |||||
| label="收藏镜像数" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="CollectedImage" | |||||
| label="被收藏镜像数" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | |||||
| <el-table-column | |||||
| prop="RecommendImage" | |||||
| label="被推荐镜像数" | |||||
| width="120px" | |||||
| align="center"> | |||||
| </el-table-column> | </el-table-column> | ||||
| <el-table-column | <el-table-column | ||||
| prop="RegistDate" | prop="RegistDate" | ||||
| @@ -214,7 +265,7 @@ | |||||
| value_time: '', | value_time: '', | ||||
| search:'', | search:'', | ||||
| data:'', | data:'', | ||||
| columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'用户指数',key:'UserIndex'},{title:'系统统计时间',key:'CountDate'}], | |||||
| columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'归一化用户指数',key:'UserIndex'},{title:'用户指数',key:'UserIndexPrimitive'},{title:'关注他人数',key:'FocusOtherUser'},{title:'收藏数据集',key:'CollectDataset'},{title:'被收藏数据集',key:'CollectedDataset'},{title:'被推荐数据集数',key:'RecommendDataset'},{title:'收藏镜像数',key:'CollectImage'},{title:'被收藏镜像数',key:'CollectedImage'},{title:'被推荐镜像数',key:'RecommendImage'},{title:'系统统计时间',key:'CountDate'}], | |||||
| blob:'', | blob:'', | ||||
| fileName:'', | fileName:'', | ||||
| dynamic:7, | dynamic:7, | ||||
| @@ -1,6 +1,7 @@ | |||||
| <template> | <template> | ||||
| <div> | |||||
| <div > | |||||
| <div class="ui container" style="width: 80%;"> | <div class="ui container" style="width: 80%;"> | ||||
| <div class="ui grid"> | |||||
| <div class="row" style="border: 1px solid #d4d4d5;margin-top: 15px;padding-top: 0;"> | <div class="row" style="border: 1px solid #d4d4d5;margin-top: 15px;padding-top: 0;"> | ||||
| <div class="ui attached segment"> | <div class="ui attached segment"> | ||||
| @@ -11,12 +12,10 @@ | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| <el-row style="padding: 1rem;"> | |||||
| <el-col :span="2" style="margin-right: 1rem;"> | |||||
| <el-checkbox v-model="checked" style="padding: 0.5rem 1rem;">仅显示平台推荐</el-checkbox> | |||||
| </el-col> | |||||
| <el-col :span="6"> | |||||
| <el-dropdown @command="handleCommand" trigger="click" style="border: 1px solid rgba(34,36,38,.15);border-radius: 4px;padding: 0.5rem 1rem;"> | |||||
| <div class="ui ten wide column" style="margin: 1rem 0;"> | |||||
| <el-checkbox v-model="checked" style="padding: 0.5rem 1rem;">仅显示平台推荐</el-checkbox> | |||||
| <el-dropdown @command="handleCommand" trigger="click" style="border: 1px solid rgba(34,36,38,.15);border-radius: 4px;padding: 0.5rem 1rem;"> | |||||
| <span class="el-dropdown-link"> | <span class="el-dropdown-link"> | ||||
| {{dropdownPrivate}}<i class="el-icon-caret-bottom el-icon--right"></i> | {{dropdownPrivate}}<i class="el-icon-caret-bottom el-icon--right"></i> | ||||
| </span> | </span> | ||||
| @@ -25,10 +24,12 @@ | |||||
| <el-dropdown-item :command="{label:'公开',private:false}">公开</el-dropdown-item> | <el-dropdown-item :command="{label:'公开',private:false}">公开</el-dropdown-item> | ||||
| <el-dropdown-item :command="{label:'公开',private:true}">私有</el-dropdown-item> | <el-dropdown-item :command="{label:'公开',private:true}">私有</el-dropdown-item> | ||||
| </el-dropdown-menu> | </el-dropdown-menu> | ||||
| </el-dropdown> | |||||
| </el-col> | |||||
| </el-row> | |||||
| <el-row> | |||||
| </el-dropdown> | |||||
| </div> | |||||
| <div class="ui six wide column right aligned" style="margin: 1rem 0;"> | |||||
| <a class="ui blue small button" href="/admin/images/commit_image">创建云脑镜像</a> | |||||
| </div> | |||||
| <div class="ui sixteen wide column" style="padding: 0;"> | |||||
| <el-table | <el-table | ||||
| :data="tableDataCustom" | :data="tableDataCustom" | ||||
| style="width: 100%" | style="width: 100%" | ||||
| @@ -116,12 +117,8 @@ | |||||
| <svg width="1.4em" height="1.4em" viewBox="0 0 32 32" class="heart-stroke"><path d="M4.4 6.54c-1.761 1.643-2.6 3.793-2.36 6.056.24 2.263 1.507 4.521 3.663 6.534a29110.9 29110.9 0 0010.296 9.633l10.297-9.633c2.157-2.013 3.424-4.273 3.664-6.536.24-2.264-.599-4.412-2.36-6.056-1.73-1.613-3.84-2.29-6.097-1.955-1.689.25-3.454 1.078-5.105 2.394l-.4.319-.398-.319c-1.649-1.316-3.414-2.143-5.105-2.394a7.612 7.612 0 00-1.113-.081c-1.838 0-3.541.694-4.983 2.038z"></path></svg> | <svg width="1.4em" height="1.4em" viewBox="0 0 32 32" class="heart-stroke"><path d="M4.4 6.54c-1.761 1.643-2.6 3.793-2.36 6.056.24 2.263 1.507 4.521 3.663 6.534a29110.9 29110.9 0 0010.296 9.633l10.297-9.633c2.157-2.013 3.424-4.273 3.664-6.536.24-2.264-.599-4.412-2.36-6.056-1.73-1.613-3.84-2.29-6.097-1.955-1.689.25-3.454 1.078-5.105 2.394l-.4.319-.398-.319c-1.649-1.316-3.414-2.143-5.105-2.394a7.612 7.612 0 00-1.113-.081c-1.838 0-3.541.694-4.983 2.038z"></path></svg> | ||||
| <span style="line-height: 2;margin-left:0.3rem;">{{scope.row.numStars}}</span> | <span style="line-height: 2;margin-left:0.3rem;">{{scope.row.numStars}}</span> | ||||
| </div> | </div> | ||||
| <template v-if="!scope.row.isPrivate"> | |||||
| <span style="padding: 0 1rem;color: rgb(250, 140, 22);cursor:pointer;" v-if="scope.row.type==5" @click="unSetRecommend(scope.$index,scope.row.id)">取消推荐</span> | <span style="padding: 0 1rem;color: rgb(250, 140, 22);cursor:pointer;" v-if="scope.row.type==5" @click="unSetRecommend(scope.$index,scope.row.id)">取消推荐</span> | ||||
| <span style="padding: 0 1rem;color: rgb(19, 194, 141);cursor:pointer;" v-else @click="setRecommend(scope.$index,scope.row.id)">设为推荐</span> | |||||
| </template> | |||||
| <span style="padding: 0 1rem;color: rgb(19, 194, 141);cursor:pointer;" v-if="scope.row.type!==5 && !scope.row.isPrivate" @click="setRecommend(scope.$index,scope.row.id)">设为推荐</span> | |||||
| <span style="padding: 0 1rem;color:#0366d6;cursor:pointer;" @click="copyUrl(scope.row.place)">复制地址</span> | <span style="padding: 0 1rem;color:#0366d6;cursor:pointer;" @click="copyUrl(scope.row.place)">复制地址</span> | ||||
| <div style="padding-left:1rem;cursor:pointer;"> | <div style="padding-left:1rem;cursor:pointer;"> | ||||
| <el-dropdown size="medium"> | <el-dropdown size="medium"> | ||||
| @@ -138,7 +135,7 @@ | |||||
| </template> | </template> | ||||
| </el-table-column> | </el-table-column> | ||||
| </el-table> | </el-table> | ||||
| </el-row> | |||||
| </div> | |||||
| <div class="ui container" style="padding:2rem 0;text-align:center"> | <div class="ui container" style="padding:2rem 0;text-align:center"> | ||||
| <el-pagination | <el-pagination | ||||
| background | background | ||||
| @@ -152,6 +149,7 @@ | |||||
| </el-pagination> | </el-pagination> | ||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| </div> | |||||
| </div> | </div> | ||||
| </div> | </div> | ||||
| @@ -26,6 +26,15 @@ export default async function initImage(){ | |||||
| } | } | ||||
| ] | ] | ||||
| }, | }, | ||||
| place:{ | |||||
| identifier : 'place', | |||||
| rules: [ | |||||
| { | |||||
| type: 'empty', | |||||
| } | |||||
| ] | |||||
| }, | |||||
| } | } | ||||
| }) | }) | ||||
| } | } | ||||
| @@ -75,8 +84,9 @@ export default async function initImage(){ | |||||
| type:'POST', | type:'POST', | ||||
| data:formData, | data:formData, | ||||
| success:function(res){ | success:function(res){ | ||||
| console.log(res) | |||||
| if(res.Code===1){ | if(res.Code===1){ | ||||
| $('.ui.info.message').text(res.Message).show().delay(1500).fadeOut(); | |||||
| $('.ui.negative.message').text(res.Message).show().delay(2500).fadeOut(); | |||||
| }else if(res.Code==0){ | }else if(res.Code==0){ | ||||
| if(location.href.indexOf('imageAdmin')!==-1){ | if(location.href.indexOf('imageAdmin')!==-1){ | ||||
| location.href = `${window.config.AppSubUrl}/admin/images` | location.href = `${window.config.AppSubUrl}/admin/images` | ||||
| @@ -105,6 +115,11 @@ export default async function initImage(){ | |||||
| $("textarea[name='description']").parent().addClass('error') | $("textarea[name='description']").parent().addClass('error') | ||||
| return false | return false | ||||
| } | } | ||||
| if($("input[name='place']").length>0&&!$("input[name='place']").val()){ | |||||
| console.log("1111111",$("input[name='place']")) | |||||
| $("input[name='place']").parent().addClass('error') | |||||
| return false | |||||
| } | |||||
| const postData = { | const postData = { | ||||
| _csrf:$("input[name='_csrf']").val(), | _csrf:$("input[name='_csrf']").val(), | ||||
| @@ -115,6 +130,10 @@ export default async function initImage(){ | |||||
| topics:$("input[name='topics']").val(), | topics:$("input[name='topics']").val(), | ||||
| id:$("input[name='id']").val() | id:$("input[name='id']").val() | ||||
| } | } | ||||
| if($("input[name='place']").val()&&$("input[name='isRecommend']:checked").val()){ | |||||
| postData.isRecommend = $("input[name='isRecommend']:checked").val() | |||||
| postData.place = $("input[name='place']").val() | |||||
| } | |||||
| let formData = $params(postData) | let formData = $params(postData) | ||||
| if($("input[name='edit']").val()=="edit"){ | if($("input[name='edit']").val()=="edit"){ | ||||
| postImage(formData) | postImage(formData) | ||||
| @@ -143,15 +162,16 @@ export default async function initImage(){ | |||||
| } | } | ||||
| }) | }) | ||||
| } | } | ||||
| return false | |||||
| }) | }) | ||||
| $('#cancel_submit_image').click(()=>{ | $('#cancel_submit_image').click(()=>{ | ||||
| console.log(pageform) | |||||
| if(link.includes('cloudbrain')){ | if(link.includes('cloudbrain')){ | ||||
| let repoLink = link.split('cloudbrain')[0] | let repoLink = link.split('cloudbrain')[0] | ||||
| location.href = `${window.config.AppSubUrl}${repoLink}debugjob?debugListType=all` | location.href = `${window.config.AppSubUrl}${repoLink}debugjob?debugListType=all` | ||||
| }else if(pageform=='imageSquare'){ | }else if(pageform=='imageSquare'){ | ||||
| location.href = `${window.config.AppSubUrl}/explore/images?type=myimage` | location.href = `${window.config.AppSubUrl}/explore/images?type=myimage` | ||||
| }else if(pageform=='imageAdmin'){ | |||||
| }else if(pageform){ | |||||
| location.href = `${window.config.AppSubUrl}/admin/images` | location.href = `${window.config.AppSubUrl}/admin/images` | ||||
| } | } | ||||
| }) | }) | ||||
| @@ -34,7 +34,6 @@ import { | |||||
| } from './features/notification.js'; | } from './features/notification.js'; | ||||
| import {createCodeEditor} from './features/codeeditor.js'; | import {createCodeEditor} from './features/codeeditor.js'; | ||||
| import MinioUploader from './components/MinioUploader.vue'; | import MinioUploader from './components/MinioUploader.vue'; | ||||
| import ObsUploader from './components/ObsUploader.vue'; | |||||
| import EditAboutInfo from './components/EditAboutInfo.vue'; | import EditAboutInfo from './components/EditAboutInfo.vue'; | ||||
| // import Images from './components/Images.vue'; | // import Images from './components/Images.vue'; | ||||
| import EditTopics from './components/EditTopics.vue'; | import EditTopics from './components/EditTopics.vue'; | ||||
| @@ -2974,7 +2973,6 @@ $(document).ready(async () => { | |||||
| initCodeView(); | initCodeView(); | ||||
| initVueApp(); | initVueApp(); | ||||
| initVueUploader(); | initVueUploader(); | ||||
| initObsUploader(); | |||||
| initVueDataset(); | initVueDataset(); | ||||
| initVueEditAbout(); | initVueEditAbout(); | ||||
| initVueEditTopic(); | initVueEditTopic(); | ||||
| @@ -3718,6 +3716,63 @@ function initVueEditAbout() { | |||||
| } | } | ||||
| function initVueDataset() { | function initVueDataset() { | ||||
| if($('#dataset_check').length){ | |||||
| if(location.search.indexOf('recommend=true')!==-1){ | |||||
| $('#dataset_check').checkbox('set checked') | |||||
| }else{ | |||||
| $('#dataset_check').checkbox('set unchecked') | |||||
| } | |||||
| $('#dataset_check').checkbox({ | |||||
| onChecked: function() { | |||||
| if(location.search){ | |||||
| const params = new URLSearchParams(location.search) | |||||
| if(params.has('recommend')){ | |||||
| params.delete('recommend') | |||||
| location.href = AppSubUrl + location.pathname + '?' + params.toString() + '&recommend=true' | |||||
| }else{ | |||||
| location.href = `${window.config.AppSubUrl}/admin/datasets${location.search}&recommend=true` | |||||
| } | |||||
| }else{ | |||||
| location.href = `${window.config.AppSubUrl}/admin/datasets?recommend=true` | |||||
| } | |||||
| }, | |||||
| onUnchecked: function() { | |||||
| if(location.search=='?recommend=true'){ | |||||
| location.href = AppSubUrl + location.pathname | |||||
| }else{ | |||||
| const params = new URLSearchParams(location.search) | |||||
| params.delete('recommend') | |||||
| location.href = AppSubUrl + location.pathname + '?' + params.toString() | |||||
| } | |||||
| }, | |||||
| }) | |||||
| } | |||||
| $('.set_dataset').on('click', function(){ | |||||
| const $this = $(this); | |||||
| let link = $this.data('url') | |||||
| $.ajax({ | |||||
| url:link, | |||||
| type:'PUT', | |||||
| success:function(res){ | |||||
| console.log(res) | |||||
| if(res.Code==0){ | |||||
| window.location.href = '/admin/datasets' | |||||
| }else{ | |||||
| $('.ui.negative.message').text(res.Message).show().delay(1500).fadeOut(); | |||||
| } | |||||
| }, | |||||
| error: function(xhr){ | |||||
| // 隐藏 loading | |||||
| // 只有请求不正常(状态码不为200)才会执行 | |||||
| $('.ui.negative.message').html(xhr.responseText).show().delay(1500).fadeOut(); | |||||
| console.log(xhr) | |||||
| }, | |||||
| complete:function(xhr){ | |||||
| // $("#mask").css({"display":"none","z-index":"1"}) | |||||
| } | |||||
| }) | |||||
| }); | |||||
| const el = document.getElementById('dataset-base'); | const el = document.getElementById('dataset-base'); | ||||
| if (!el) { | if (!el) { | ||||
| return; | return; | ||||
| @@ -3775,24 +3830,13 @@ function initVueDataset() { | |||||
| if(document.getElementById('dataset-file-desc')){ | if(document.getElementById('dataset-file-desc')){ | ||||
| dataset_file_desc = document.getElementById('dataset-file-desc').value | dataset_file_desc = document.getElementById('dataset-file-desc').value | ||||
| } | } | ||||
| // getEditInit(){ | |||||
| // if($('#dataset-edit-value')){ | |||||
| // $this = $('#dataset-edit-value') | |||||
| // this.ruleForm.title = $this.data('edit-title') || '' | |||||
| // this.ruleForm.description = $this.data('edit-description') || '' | |||||
| // this.ruleForm.category = $this.data('edit-category') || '' | |||||
| // this.ruleForm.task = $this.data('edit-task') || '' | |||||
| // this.ruleForm.license = $this.data('edit-license') || '' | |||||
| // this.ruleForm.id = $this.data('edit-id')|| '' | |||||
| // } | |||||
| // }, | |||||
| new Vue({ | new Vue({ | ||||
| delimiters: ['${', '}'], | delimiters: ['${', '}'], | ||||
| el, | el, | ||||
| data: { | data: { | ||||
| suburl: AppSubUrl, | suburl: AppSubUrl, | ||||
| url:'', | url:'', | ||||
| checked:false, | |||||
| type:0, | type:0, | ||||
| desc:'', | desc:'', | ||||
| descfile:'', | descfile:'', | ||||
| @@ -3870,8 +3914,7 @@ function initVueDataset() { | |||||
| }, | }, | ||||
| }, | }, | ||||
| components: { | components: { | ||||
| MinioUploader, | |||||
| ObsUploader | |||||
| MinioUploader | |||||
| }, | }, | ||||
| mounted(){ | mounted(){ | ||||
| // if(document.getElementById('postPath')){ | // if(document.getElementById('postPath')){ | ||||
| @@ -3890,6 +3933,12 @@ function initVueDataset() { | |||||
| this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) | this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) | ||||
| } | } | ||||
| const params = new URLSearchParams(location.search) | |||||
| if (params.has('recommend') && params.get('recommend')=='true'){ | |||||
| this.checked = true | |||||
| }else{ | |||||
| this.checked = false | |||||
| } | |||||
| }, | }, | ||||
| created(){ | created(){ | ||||
| if(document.getElementById('postPath')){ | if(document.getElementById('postPath')){ | ||||
| @@ -3930,6 +3979,30 @@ function initVueDataset() { | |||||
| } | } | ||||
| }, | }, | ||||
| handleCheckedChange(val){ | |||||
| if(val){ | |||||
| if(location.search){ | |||||
| const params = new URLSearchParams(location.search) | |||||
| if(params.has('recommend')){ | |||||
| params.delete('recommend') | |||||
| let search = params.toString() | |||||
| location.href = `${AppSubUrl}/explore/datasets?${search}&recommend=${val}` | |||||
| }else{ | |||||
| location.href = `${AppSubUrl}/explore/datasets${location.search}&recommend=${val}` | |||||
| } | |||||
| }else{ | |||||
| location.href = `${AppSubUrl}/explore/datasets?recommend=${val}` | |||||
| } | |||||
| }else{ | |||||
| if(location.search=='?recommend=true'){ | |||||
| location.href = AppSubUrl + location.pathname | |||||
| }else{ | |||||
| const params = new URLSearchParams(location.search) | |||||
| params.delete('recommend') | |||||
| location.href = AppSubUrl + location.pathname + '?' + params.toString() | |||||
| } | |||||
| } | |||||
| }, | |||||
| createDataset(formName){ | createDataset(formName){ | ||||
| let _this = this | let _this = this | ||||
| this.$refs[formName].validate((valid)=>{ | this.$refs[formName].validate((valid)=>{ | ||||
| @@ -4396,19 +4469,6 @@ function initVueDataAnalysis() { | |||||
| render: h => h(DataAnalysis) | render: h => h(DataAnalysis) | ||||
| }); | }); | ||||
| } | } | ||||
| // 新增 | |||||
| function initObsUploader() { | |||||
| const el = document.getElementById('obsUploader'); | |||||
| if (!el) { | |||||
| return; | |||||
| } | |||||
| new Vue({ | |||||
| el: '#obsUploader', | |||||
| components: {ObsUploader}, | |||||
| template: '<ObsUploader />' | |||||
| }); | |||||
| } | |||||
| function initVueWxAutorize() { | function initVueWxAutorize() { | ||||
| const el = document.getElementById('WxAutorize'); | const el = document.getElementById('WxAutorize'); | ||||
| if (!el) { | if (!el) { | ||||
| @@ -248,7 +248,22 @@ footer .column{margin-bottom:0!important; padding-bottom:0!important;} | |||||
| .icon-bind{background-position: -550px -52px;} | .icon-bind{background-position: -550px -52px;} | ||||
| .icon-unbind{background-position: -568px -52px;} | .icon-unbind{background-position: -568px -52px;} | ||||
| .CREATING, .STOPPING, .DELETING, .STARTING, i.WAITING ,.INIT,.KILLING{display:inline-block;background-image:url('/img/loading.gif');background-repeat:no-repeat;width:16px;height:16px;background-size:16px 16px;margin-right:5px;} | .CREATING, .STOPPING, .DELETING, .STARTING, i.WAITING ,.INIT,.KILLING{display:inline-block;background-image:url('/img/loading.gif');background-repeat:no-repeat;width:16px;height:16px;background-size:16px 16px;margin-right:5px;} | ||||
| .icon-to-top{ | |||||
| background:url("/img/icons.svg"); | |||||
| background-position: -540px -208px; | |||||
| width: 30px; | |||||
| height: 30px; | |||||
| display: inline-block; | |||||
| cursor: pointer; | |||||
| } | |||||
| .icon-to-bottom{ | |||||
| background:url("/img/icons.svg"); | |||||
| background-position: -574px -208px; | |||||
| width: 30px; | |||||
| height: 30px; | |||||
| display: inline-block; | |||||
| cursor: pointer; | |||||
| } | |||||
| i.COMPLETED,i.SUCCEEDED{display:inline-block;width:18px;height:18px;background:url("/img/icons.svg");background-position: -496px -52px;background-position: -441px -52px;} | i.COMPLETED,i.SUCCEEDED{display:inline-block;width:18px;height:18px;background:url("/img/icons.svg");background-position: -496px -52px;background-position: -441px -52px;} | ||||
| .text_over{ | .text_over{ | ||||
| overflow: hidden; | overflow: hidden; | ||||