diff --git a/models/dataset.go b/models/dataset.go index e841261c7..d3a142742 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -155,10 +155,6 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.RepoID > 0 { cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) } - if opts.RecommendOnly { - cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) - } - if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) cond = cond.And(builder.Eq{"attachment.is_private": false}) @@ -197,6 +193,10 @@ func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.C cond = cond.And(builder.Eq{"dataset.license": opts.License}) } + if opts.RecommendOnly { + cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) + } + return cond } diff --git a/models/helper_environment.go b/models/helper_environment.go index bc9d4c8fc..7248f6f61 100644 --- a/models/helper_environment.go +++ b/models/helper_environment.go @@ -12,15 +12,19 @@ import ( // env keys for git hooks need const ( - EnvRepoName = "GITEA_REPO_NAME" - EnvRepoUsername = "GITEA_REPO_USER_NAME" - EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" - EnvPusherName = "GITEA_PUSHER_NAME" - EnvPusherEmail = "GITEA_PUSHER_EMAIL" - EnvPusherID = "GITEA_PUSHER_ID" - EnvKeyID = "GITEA_KEY_ID" - EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" - EnvIsInternal = "GITEA_INTERNAL_PUSH" + EnvRepoName = "GITEA_REPO_NAME" + EnvRepoUsername = "GITEA_REPO_USER_NAME" + EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" + EnvPusherName = "GITEA_PUSHER_NAME" + EnvPusherEmail = "GITEA_PUSHER_EMAIL" + EnvPusherID = "GITEA_PUSHER_ID" + EnvKeyID = "GITEA_KEY_ID" + EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" + EnvIsInternal = "GITEA_INTERNAL_PUSH" + EnvRepoSize = "REPO_CURRENT_SIZE" + EnvRepoMaxFileSize = "REPO_MAX_FILE_SIZE" + EnvRepoMaxSize = "REPO_MAX_SIZE" + EnvPushSizeCheckFlag = "PUSH_SIZE_CHECK_FLAG" ) // InternalPushingEnvironment returns an os environment to switch off hooks on push diff --git a/models/issue.go b/models/issue.go index 19f00d5f3..3ed49ce42 100755 --- a/models/issue.go +++ b/models/issue.go @@ -775,6 +775,41 @@ func (issue *Issue) ChangeContent(doer *User, content string) (err error) { return sess.Commit() } +// ChangeRef changes issue ref, as the given user. +func (issue *Issue) ChangeRef(doer *User, newRef string) (err error) { + oldRef := issue.Ref + issue.Ref = newRef + if oldRef == newRef { + return nil + } + + sess := x.NewSession() + defer sess.Close() + if err = sess.Begin(); err != nil { + return err + } + + if err = updateIssueCols(sess, issue, "ref"); err != nil { + sess.Rollback() + return fmt.Errorf("UpdateIssueCols: %v", err) + } + + var opts = &CreateCommentOptions{ + Type: CommentTypeRef, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldRef: oldRef, + NewRef: newRef, + } + if _, err = createComment(sess, opts); err != nil { + sess.Rollback() + return err + } + + return sess.Commit() +} + // GetTasks returns the amount of tasks in the issues content func (issue *Issue) GetTasks() int { return len(issueTasksPat.FindAllStringIndex(issue.Content, -1)) diff --git a/models/issue_comment.go b/models/issue_comment.go index 60d38452c..8197eba85 100755 --- a/models/issue_comment.go +++ b/models/issue_comment.go @@ -90,6 +90,8 @@ const ( CommentTypeReviewRequest // merge pull request CommentTypeMergePull + // Ref changed + CommentTypeRef ) // CommentTag defines comment tag type diff --git a/models/repo_list.go b/models/repo_list.go index 5bf0ecf03..253cc968c 100755 --- a/models/repo_list.go +++ b/models/repo_list.go @@ -221,6 +221,7 @@ const ( SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC" SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC" SearchOrderByWatches SearchOrderBy = "num_watches DESC" + SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC" ) // SearchRepositoryCondition creates a query condition according search repository options diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 34d84555c..2d7592baf 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -82,17 +82,27 @@ type UserBusinessAnalysisAll struct { DataDate string `xorm:"NULL"` //cloudbraintask - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysis struct { @@ -159,17 +169,27 @@ type UserBusinessAnalysis struct { DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisQueryOptions struct { @@ -410,8 +430,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS log.Info("truncate all data from table: " + tableName) statictisSess.Exec("TRUNCATE TABLE " + tableName) - log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) - log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) + StartTimeNextDay := pageStartTime.AddDate(0, 0, 1) + EndTimeNextDay := pageEndTime.AddDate(0, 0, 1) + log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05")) + log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05")) start_unix := pageStartTime.Unix() end_unix := pageEndTime.Unix() @@ -426,8 +448,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS CommentCountMap := queryComment(start_unix, end_unix) FocusRepoCountMap := queryWatch(start_unix, end_unix) StarRepoCountMap := queryStar(start_unix, end_unix) - WatchedCountMap := queryFollow(start_unix, end_unix) - CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix) + WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) + CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) SolveIssueCountMap := querySolveIssue(start_unix, end_unix) CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) @@ -436,6 +458,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix) CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) AiModelManageMap := queryUserModel(start_unix, end_unix) + + CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) + RecommendDataset := queryRecommedDataSet(start_unix, end_unix) + CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) + RecommendImage := queryRecommedImage(start_unix, end_unix) + DataDate := currentTimeNow.Format("2006-01-02") + " 00:01" cond := "type != 1 and is_active=true" @@ -472,6 +500,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap) + dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) @@ -496,13 +525,20 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) - dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) - userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex - if maxUserIndex < dateRecordAll.UserIndex { - maxUserIndex = dateRecordAll.UserIndex + dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) + dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) + dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset) + dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage) + dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage) + dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage) + + dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) + userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive + if maxUserIndex < dateRecordAll.UserIndexPrimitive { + maxUserIndex = dateRecordAll.UserIndexPrimitive } - if minUserIndex > dateRecordAll.UserIndex { - minUserIndex = dateRecordAll.UserIndex + if minUserIndex > dateRecordAll.UserIndexPrimitive { + minUserIndex = dateRecordAll.UserIndexPrimitive } dateRecordBatch = append(dateRecordBatch, dateRecordAll) if len(dateRecordBatch) >= BATCH_INSERT_SIZE { @@ -552,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static insertBatchSql := "INSERT INTO public." + tableName + "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + - "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " + + "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " + "VALUES" for i, record := range dateRecords { @@ -560,7 +596,8 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static ", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) + ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + - ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')" + ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," + + fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")" if i < (len(dateRecords) - 1) { insertBatchSql += "," } @@ -628,7 +665,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, CommentCountMap := queryComment(start_unix, end_unix) FocusRepoCountMap := queryWatch(start_unix, end_unix) StarRepoCountMap := queryStar(start_unix, end_unix) - WatchedCountMap := queryFollow(start_unix, end_unix) + WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) CommitCodeSizeMap, err := GetAllUserKPIStats() if err != nil { @@ -643,6 +680,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) AiModelManageMap := queryUserModel(start_unix, end_unix) + + CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) + RecommendDataset := queryRecommedDataSet(start_unix, end_unix) + CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) + RecommendImage := queryRecommedImage(start_unix, end_unix) + statictisSess := xStatistic.NewSession() defer statictisSess.Close() @@ -683,13 +726,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) - + dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap) if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { dateRecord.CommitCodeSize = 0 } else { dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) } - dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) @@ -715,7 +757,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) - dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight) + + dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset) + dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset) + dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset) + dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage) + dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) + dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) + + dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) _, err = statictisSess.Insert(&dateRecord) if err != nil { @@ -765,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 } - if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 { + if getUserActivate(dateRecord) > 0 { userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1 } @@ -802,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + codeLine := float64(dateRecord.CommitCodeSize) + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) + if codeLine >= limitCodeLine { + codeLine = limitCodeLine + } + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) @@ -810,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) + result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) + result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) + result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) + result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) + result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) + result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) + + return result +} + +func getUserActivate(dateRecord UserBusinessAnalysis) int { + var result int + result += dateRecord.CodeMergeCount + result += dateRecord.CommitCount + result += dateRecord.IssueCount + result += dateRecord.CommentCount + result += dateRecord.FocusRepoCount + result += dateRecord.StarRepoCount + result += dateRecord.SolveIssueCount + result += dateRecord.EncyclopediasCount + result += dateRecord.CreateRepoCount + result += dateRecord.CloudBrainTaskNum + result += dateRecord.CommitModelCount + result += dateRecord.CommitDatasetNum + result += dateRecord.FocusOtherUser + result += dateRecord.CollectDataset + result += dateRecord.CollectImage + result += dateRecord.CommitCodeSize return result } @@ -831,12 +914,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - codeLine := float64(dateRecord.CommitCodeSize) / 1000 - limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) + codeLine := float64(dateRecord.CommitCodeSize) + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) if codeLine >= limitCodeLine { codeLine = limitCodeLine } - result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) @@ -844,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) + result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) + result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) + result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) + result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) + result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) + result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) + return result } @@ -1134,17 +1224,18 @@ func queryStar(start_unix int64, end_unix int64) map[int64]int { return resultMap } -func queryFollow(start_unix int64, end_unix int64) map[int64]int { +func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { sess := x.NewSession() defer sess.Close() resultMap := make(map[int64]int) + resultFocusedByOtherMap := make(map[int64]int) cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) count, err := sess.Where(cond).Count(new(Follow)) if err != nil { log.Info("query follow error. return.") - return resultMap + return resultMap, resultFocusedByOtherMap } var indexTotal int64 indexTotal = 0 @@ -1160,6 +1251,11 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { } else { resultMap[followRecord.FollowID] += 1 } + if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok { + resultFocusedByOtherMap[followRecord.UserID] = 1 + } else { + resultFocusedByOtherMap[followRecord.UserID] += 1 + } } indexTotal += PAGE_SIZE @@ -1168,7 +1264,215 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { } } - return resultMap + return resultMap, resultFocusedByOtherMap +} + +func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + userIdDdatasetMap := make(map[int64]int) + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true" + count, err := sess.Where(cond).Count(new(Dataset)) + if err != nil { + log.Info("query recommend dataset error. return.") + return userIdDdatasetMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + datasetList := make([]*Dataset, 0) + sess.Find(&datasetList) + log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) + for _, datasetRecord := range datasetList { + if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { + userIdDdatasetMap[datasetRecord.UserID] = 1 + } else { + userIdDdatasetMap[datasetRecord.UserID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return userIdDdatasetMap +} + +func queryAllDataSet() (map[int64]int64, map[int64]int64) { + sess := x.NewSession() + defer sess.Close() + datasetUserIdMap := make(map[int64]int64) + userIdDdatasetMap := make(map[int64]int64) + count, err := sess.Count(new(Dataset)) + if err != nil { + log.Info("query dataset error. return.") + return datasetUserIdMap, userIdDdatasetMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + datasetList := make([]*Dataset, 0) + sess.Find(&datasetList) + log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) + for _, datasetRecord := range datasetList { + datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID + if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { + userIdDdatasetMap[datasetRecord.UserID] = 1 + } else { + userIdDdatasetMap[datasetRecord.UserID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return datasetUserIdMap, userIdDdatasetMap +} + +func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + userIdImageMap := make(map[int64]int) + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5" + count, err := sess.Where(cond).Count(new(Image)) + if err != nil { + log.Info("query recommend image error. return.") + return userIdImageMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + imageList := make([]*Image, 0) + sess.Find(&imageList) + log.Info("query imageList size=" + fmt.Sprint(len(imageList))) + for _, imageRecord := range imageList { + if _, ok := userIdImageMap[imageRecord.UID]; !ok { + userIdImageMap[imageRecord.UID] = 1 + } else { + userIdImageMap[imageRecord.UID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return userIdImageMap +} + +func queryAllImage() (map[int64]int64, map[int64]int64) { + sess := x.NewSession() + defer sess.Close() + imageUserIdMap := make(map[int64]int64) + userIdDImageMap := make(map[int64]int64) + count, err := sess.Count(new(Image)) + if err != nil { + log.Info("query image error. return.") + return imageUserIdMap, userIdDImageMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + imageList := make([]*Image, 0) + sess.Find(&imageList) + log.Info("query imageList size=" + fmt.Sprint(len(imageList))) + for _, imageRecord := range imageList { + imageUserIdMap[imageRecord.ID] = imageRecord.UID + if _, ok := userIdDImageMap[imageRecord.UID]; !ok { + userIdDImageMap[imageRecord.UID] = 1 + } else { + userIdDImageMap[imageRecord.UID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return imageUserIdMap, userIdDImageMap +} + +func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { + sess := x.NewSession() + defer sess.Close() + datasetCollect := make(map[int64]int) + datasetCollected := make(map[int64]int) + datasetUserIdMap, _ := queryAllDataSet() + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + count, err := sess.Where(cond).Count(new(DatasetStar)) + if err != nil { + log.Info("query follow error. return.") + return datasetCollect, datasetCollected + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + datasetStarList := make([]*DatasetStar, 0) + sess.Find(&datasetStarList) + log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList))) + for _, datasetStarRecord := range datasetStarList { + if _, ok := datasetCollect[datasetStarRecord.UID]; !ok { + datasetCollect[datasetStarRecord.UID] = 1 + } else { + datasetCollect[datasetStarRecord.UID] += 1 + } + if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok { + datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1 + } else { + datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return datasetCollect, datasetCollected +} + +func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { + sess := x.NewSession() + defer sess.Close() + imageCollect := make(map[int64]int) + imageCollected := make(map[int64]int) + imageUserIdMap, _ := queryAllDataSet() + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + count, err := sess.Where(cond).Count(new(ImageStar)) + if err != nil { + log.Info("query follow error. return.") + return imageCollect, imageCollected + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + imageStarList := make([]*ImageStar, 0) + sess.Find(&imageStarList) + log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) + for _, imageStarRecord := range imageStarList { + if _, ok := imageCollect[imageStarRecord.UID]; !ok { + imageCollect[imageStarRecord.UID] = 1 + } else { + imageCollect[imageStarRecord.UID] += 1 + } + if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok { + imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1 + } else { + imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return imageCollect, imageCollected } func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { diff --git a/models/user_business_struct.go b/models/user_business_struct.go index 17d9f046f..86aecd545 100644 --- a/models/user_business_struct.go +++ b/models/user_business_struct.go @@ -45,17 +45,26 @@ type UserBusinessAnalysisCurrentYear struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisLast30Day struct { @@ -101,17 +110,26 @@ type UserBusinessAnalysisLast30Day struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisLastMonth struct { @@ -157,17 +175,26 @@ type UserBusinessAnalysisLastMonth struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisCurrentMonth struct { @@ -213,17 +240,26 @@ type UserBusinessAnalysisCurrentMonth struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisCurrentWeek struct { @@ -269,17 +305,27 @@ type UserBusinessAnalysisCurrentWeek struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisYesterday struct { @@ -325,17 +371,27 @@ type UserBusinessAnalysisYesterday struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserAnalysisPara struct { diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index eaf680c65..f1d3cb344 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -475,6 +475,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e ComputeResource: task.ComputeResource, CreatedUnix: createTime, UpdatedUnix: createTime, + BranchName: task.BranchName, } err = models.RestartCloudbrain(task, newTask) diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index 6050f21f7..7bcc5b550 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -19,7 +19,11 @@ import ( "xorm.io/builder" ) -func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { +const ( + SIZE_LIMIT_SCRIPT_NAME = "size_limit" +) + +func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []string) { hookNames = []string{"pre-receive", "update", "post-receive"} hookTpls = []string{ fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), @@ -31,6 +35,11 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf), fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } + sizeLimitTpls = []string{ + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), + fmt.Sprintf(""), + fmt.Sprintf(""), + } return } @@ -41,7 +50,7 @@ func CreateDelegateHooks(repoPath string) error { // createDelegateHooks creates all the hooks scripts for the repo func createDelegateHooks(repoPath string) (err error) { - hookNames, hookTpls, giteaHookTpls := getHookTemplates() + hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() hookDir := filepath.Join(repoPath, "hooks") for i, hookName := range hookNames { @@ -74,8 +83,26 @@ func createDelegateHooks(repoPath string) (err error) { if err = ensureExecutable(newHookPath); err != nil { return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err) } + + if err = writeHookTpl(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i]); err != nil { + return err + } + } + + return nil +} + +func writeHookTpl(hookPath, content string) error { + if content == "" { + return nil + } + if err := ioutil.WriteFile(hookPath, []byte(content), 0777); err != nil { + return fmt.Errorf("write new hook file '%s': %v", hookPath, err) } + if err := ensureExecutable(hookPath); err != nil { + return fmt.Errorf("Unable to set %s executable. Error %v", hookPath, err) + } return nil } @@ -101,7 +128,7 @@ func ensureExecutable(filename string) error { // CheckDelegateHooks checks the hooks scripts for the repo func CheckDelegateHooks(repoPath string) ([]string, error) { - hookNames, hookTpls, giteaHookTpls := getHookTemplates() + hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() hookDir := filepath.Join(repoPath, "hooks") results := make([]string, 0, 10) @@ -146,10 +173,34 @@ func CheckDelegateHooks(repoPath string) ([]string, error) { if !checkExecutable(newHookPath) { results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath)) } + if err = checkHookFile(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i], results); err != nil { + return results, err + } } return results, nil } +func generateHookScriptPath(hookDir, hookName, fileName string) string { + return filepath.Join(hookDir, hookName+".d", fileName) +} + +func checkHookFile(filePath, tpl string, results []string) error { + if tpl == "" { + return nil + } + contents, err := ioutil.ReadFile(filePath) + if err != nil { + return err + } + if string(contents) != tpl { + results = append(results, fmt.Sprintf("old hook file %s is out of date", filePath)) + } + if !checkExecutable(filePath) { + results = append(results, fmt.Sprintf("old hook file %s is not executable", filePath)) + } + return nil +} + // SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks // to make sure the binary and custom conf path are up-to-date. func SyncRepositoryHooks(ctx context.Context) error { diff --git a/modules/setting/repository.go b/modules/setting/repository.go index dceb48f16..ee4f8b379 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -56,6 +56,7 @@ var ( FileMaxSize int64 MaxFiles int TotalMaxSize int64 + ShellFlag int } `ini:"-"` // Repository local settings @@ -125,6 +126,7 @@ var ( FileMaxSize int64 MaxFiles int TotalMaxSize int64 + ShellFlag int }{ Enabled: true, TempPath: "data/tmp/uploads", @@ -132,6 +134,7 @@ var ( FileMaxSize: 30, MaxFiles: 10, TotalMaxSize: 1024, + ShellFlag: 0, }, // Repository local settings diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go index e7a694683..ac590a057 100644 --- a/modules/ssh/ssh.go +++ b/modules/ssh/ssh.go @@ -69,8 +69,17 @@ func sessionHandler(session ssh.Session) { os.Environ(), "SSH_ORIGINAL_COMMAND="+command, "SKIP_MINWINSVC=1", + models.EnvRepoMaxFileSize+"="+fmt.Sprint(setting.Repository.Upload.FileMaxSize), + models.EnvRepoMaxSize+"="+fmt.Sprint(setting.Repository.RepoMaxSize), + models.EnvPushSizeCheckFlag+"="+fmt.Sprint(setting.Repository.Upload.ShellFlag), ) + if strings.HasPrefix(command, "git-receive-pack") { + repo := getRepoFromCommandStr(command) + if repo != nil { + cmd.Env = append(cmd.Env, models.EnvRepoSize+"="+fmt.Sprint(repo.Size)) + } + } stdout, err := cmd.StdoutPipe() if err != nil { log.Error("SSH: StdoutPipe: %v", err) @@ -131,6 +140,23 @@ func sessionHandler(session ssh.Session) { } } +func getRepoFromCommandStr(command string) *models.Repository { + repoPath := strings.TrimPrefix(command, "git-receive-pack '") + repoPath = strings.TrimSuffix(repoPath, ".git'") + if repoPath != "" { + nameArray := strings.Split(repoPath, "/") + if len(nameArray) >= 2 { + ownerName := nameArray[0] + repoName := nameArray[1] + if repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName); err == nil { + return repo + } + } + } + return nil + +} + func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool { if ctx.User() != setting.SSH.BuiltinServerUser { return false diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 006a1e046..dbb9354aa 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -40,6 +40,14 @@ import ( "github.com/editorconfig/editorconfig-core-go/v2" ) +const ( + REF_HEADS_PREFIX = "refs/heads/" + REF_TAGS_PREFIX = "refs/tags/" + REF_TYPE_BRANCH = "branch" + REF_TYPE_TAG = "tag" + REF_TYPE_PATTERN = "(refs/heads/|refs/tags/)" +) + // Used from static.go && dynamic.go var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}[\s]*$`) @@ -317,6 +325,8 @@ func NewFuncMap() []template.FuncMap { "DatasetPathJoin": func(arr []string, index int, seq string) string { return strings.Join(arr[1:index+1], seq) }, + "GetRefType": GetRefType, + "GetRefName": GetRefName, }} } @@ -444,10 +454,12 @@ func SafeJS(raw string) template.JS { func Str2html(raw string) template.HTML { return template.HTML(markup.Sanitize(raw)) } + // -func subOne(length int)int{ - return length-1 +func subOne(length int) int { + return length - 1 } + // Escape escapes a HTML string func Escape(raw string) string { return html.EscapeString(raw) @@ -758,3 +770,18 @@ func licenses() []string { func tasks() []string { return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"} } + +func GetRefType(ref string) string { + if strings.HasPrefix(ref, REF_HEADS_PREFIX) { + return REF_TYPE_BRANCH + } + if strings.HasPrefix(ref, REF_TAGS_PREFIX) { + return REF_TYPE_TAG + } + return "" +} + +func GetRefName(ref string) string { + reg := regexp.MustCompile(REF_TYPE_PATTERN) + return reg.ReplaceAllString(ref, "") +} diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 6d1ac24c7..ddddf89d6 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count static.CloudBrainRunTime=CloudBrain Run Time static.CommitDatasetNum=Commit Dataset Count static.CommitModelCount=Commit Model Count -static.UserIndex=User Index +static.UserIndex=Normalized user index +static.UserIndexPrimitive=User Index static.countdate=Count Date +static.FocusOtherUser=Focus Other User Count +static.CollectDataset=Collect Dataset Count +static.CollectedDataset=Collected Dataset Count +static.RecommendDataset=Recommended Dataset Count +static.CollectImage=Collect Image Count +static.CollectedImage=Collected Image Count +static.RecommendImage=Recommended Image Count static.all=All static.public.user_business_analysis_current_month=Current_Month static.public.user_business_analysis_current_week=Current_Week @@ -1335,6 +1343,7 @@ issues.new.labels = Labels issues.new.add_labels_title = Apply labels issues.new.no_label = No Label issues.new.clear_labels = Clear labels +issues.new.clear_branch_tag = Clear branch or tag issues.new.no_items = No items issues.new.milestone = Milestone issues.new.add_milestone_title = Set milestone @@ -1364,6 +1373,13 @@ issues.remove_label_at = removed the
" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "" if(currentPage > 1){ html += "" + getLabel(isZh,"search_home_page") + ""; diff --git a/routers/home.go b/routers/home.go index 5dec05ebe..500ffbbd6 100755 --- a/routers/home.go +++ b/routers/home.go @@ -309,9 +309,11 @@ func ExploreDatasets(ctx *context.Context) { orderBy = models.SearchOrderByStarsReverse case "feweststars": orderBy = models.SearchOrderByStars + case "default": + orderBy = models.SearchOrderByDefault default: - ctx.Data["SortType"] = "recentupdate" - orderBy = models.SearchOrderByRecentUpdated + ctx.Data["SortType"] = "default" + orderBy = models.SearchOrderByDefault } keyword := strings.Trim(ctx.Query("q"), " ") diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 4ef205af2..b3b07f352 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -40,13 +40,11 @@ const ( tplCloudBrainBenchmarkNew base.TplName = "repo/cloudbrain/benchmark/new" tplCloudBrainBenchmarkShow base.TplName = "repo/cloudbrain/benchmark/show" - tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit" - tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit" - + tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit" + tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit" tplCloudBrainTrainJobNew base.TplName = "repo/cloudbrain/trainjob/new" tplCloudBrainTrainJobShow base.TplName = "repo/cloudbrain/trainjob/show" - ) var ( @@ -601,6 +599,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath ctx.Data["model_path"] = cloudbrain.ModelMountPath ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) + ctx.Data["branchName"] = task.BranchName ctx.HTML(200, tpName) } @@ -1402,11 +1401,11 @@ func SyncCloudbrainStatus() { maxDuration = setting.MaxDuration } - if task.Duration >= maxDuration { - log.Info("begin to stop job(%s), because of the duration", task.JobName) + if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) { + log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName) err = cloudbrain.StopJob(task.JobID) if err != nil { - log.Error("StopJob(%s) failed:%v", task.JobName, err) + log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err) continue } task.Status = string(models.JobStopped) @@ -1416,7 +1415,8 @@ func SyncCloudbrainStatus() { task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { - log.Error("UpdateJob(%s) failed:%v", task.JobName, err) + log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) + continue } } } diff --git a/routers/repo/http.go b/routers/repo/http.go index 87406a2c3..d96fea82e 100644 --- a/routers/repo/http.go +++ b/routers/repo/http.go @@ -256,6 +256,10 @@ func HTTP(ctx *context.Context) { models.EnvPusherName + "=" + authUser.Name, models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), models.EnvIsDeployKey + "=false", + models.EnvRepoSize + "=" + fmt.Sprint(repo.Size), + models.EnvRepoMaxFileSize + "=" + fmt.Sprint(setting.Repository.Upload.FileMaxSize), + models.EnvRepoMaxSize + "=" + fmt.Sprint(setting.Repository.RepoMaxSize), + models.EnvPushSizeCheckFlag + "=" + fmt.Sprint(setting.Repository.Upload.ShellFlag), } if !authUser.KeepEmailPrivate { environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) diff --git a/routers/repo/issue.go b/routers/repo/issue.go index 42a6b9609..d28936594 100755 --- a/routers/repo/issue.go +++ b/routers/repo/issue.go @@ -432,7 +432,7 @@ func RetrieveRepoMetas(ctx *context.Context, repo *models.Repository, isPull boo return nil } - brs, _, err := ctx.Repo.GitRepo.GetBranches(0,0) + brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0) if err != nil { ctx.ServerError("GetBranches", err) return nil @@ -1302,6 +1302,35 @@ func UpdateIssueContent(ctx *context.Context) { }) } +// UpdateIssueRef change issue's code reference +func UpdateIssueRef(ctx *context.Context) { + issues := getActionIssues(ctx) + if ctx.Written() { + return + } + + issue := issues[0] + if issue == nil { + log.Error("UpdateIssueRef param error ") + return + } + + if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { + ctx.Error(403) + return + } + + ref := ctx.Query("id") + if err := issue_service.ChangeRef(issue, ctx.User, ref); err != nil { + ctx.ServerError("ChangeRef", err) + return + } + + ctx.JSON(200, map[string]interface{}{ + "ref": issue.Ref, + }) +} + // UpdateIssueMilestone change issue's milestone func UpdateIssueMilestone(ctx *context.Context) { issues := getActionIssues(ctx) diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index b713f385f..81e1664a4 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -51,6 +51,9 @@ const ( func DebugJobIndex(ctx *context.Context) { listType := ctx.Query("debugListType") + if listType == "" { + listType = models.AllResource + } ctx.Data["ListType"] = listType MustEnableCloudbrain(ctx) repo := ctx.Repo.Repository @@ -1473,9 +1476,9 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error { return errors.New("启动文件必须是python文件") } - if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { - log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) - return errors.New("计算节点数必须在1-25之间") + if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { + log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) + return errors.New("计算节点数必须在1-2之间") } if form.BranchName == "" { log.Error("the branch must not be null!", form.BranchName) @@ -1491,9 +1494,9 @@ func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) err return errors.New("启动文件必须是python文件") } - if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { - log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) - return errors.New("计算节点数必须在1-25之间") + if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { + log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) + return errors.New("计算节点数必须在1-2之间") } if form.ModelName == "" { diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 995465b09..2280e8288 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -19,6 +19,130 @@ const ( PAGE_SIZE = 2000 ) +func getExcelHeader(ctx *context.Context) map[string]string { + excelHeader := make([]string, 0) + excelHeader = append(excelHeader, ctx.Tr("user.static.id")) + excelHeader = append(excelHeader, ctx.Tr("user.static.name")) + excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex")) + excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive")) + excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.logincount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize")) + excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount")) + + excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) + + excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) + excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) + + excelHeaderMap := make(map[string]string, 0) + var i byte + i = 0 + for _, value := range excelHeader { + excelColumn := getColumn(i) + fmt.Sprint(1) + log.Info("excelColumn=" + excelColumn) + excelHeaderMap[excelColumn] = value + i++ + } + return excelHeaderMap +} + +func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { + rows := fmt.Sprint(row) + var tmp byte + tmp = 0 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) + tmp = tmp + 1 + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) + tmp = tmp + 1 + + formatTime = userRecord.DataDate + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) +} +func getColumn(tmp byte) string { + var tmpA byte + tmpA = 'A' + if tmp < 26 { + return string(tmpA + tmp) + } else { + return "A" + string(tmpA+(tmp-26)) + } +} + func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { page := ctx.QueryInt("page") if page <= 0 { @@ -37,30 +161,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac sheetName := ctx.Tr("user.static.sheetname") index := xlsx.NewSheet(sheetName) xlsx.DeleteSheet("Sheet1") - dataHeader := map[string]string{ - "A1": ctx.Tr("user.static.id"), - "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.UserIndex"), - "D1": ctx.Tr("user.static.codemergecount"), - "E1": ctx.Tr("user.static.commitcount"), - "F1": ctx.Tr("user.static.issuecount"), - "G1": ctx.Tr("user.static.commentcount"), - "H1": ctx.Tr("user.static.focusrepocount"), - "I1": ctx.Tr("user.static.starrepocount"), - "J1": ctx.Tr("user.static.logincount"), - "K1": ctx.Tr("user.static.watchedcount"), - "L1": ctx.Tr("user.static.commitcodesize"), - "M1": ctx.Tr("user.static.solveissuecount"), - "N1": ctx.Tr("user.static.encyclopediascount"), - "O1": ctx.Tr("user.static.createrepocount"), - "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), - "R1": ctx.Tr("user.static.CloudBrainRunTime"), - "S1": ctx.Tr("user.static.CommitDatasetNum"), - "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.registdate"), - "V1": ctx.Tr("user.static.countdate"), - } + dataHeader := getExcelHeader(ctx) for k, v := range dataHeader { //设置单元格的值 xlsx.SetCellValue(sheetName, k, v) @@ -74,31 +175,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac log.Info("return count=" + fmt.Sprint(count)) for _, userRecord := range re { row++ - rows := fmt.Sprint(row) - xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate - xlsx.SetCellValue(sheetName, "V"+rows, formatTime) + writeExcel(row, xlsx, sheetName, userRecord) } indexTotal += PAGE_SIZE @@ -236,62 +313,16 @@ func QueryUserStaticDataPage(ctx *context.Context) { sheetName := ctx.Tr("user.static.sheetname") index := xlsx.NewSheet(sheetName) xlsx.DeleteSheet("Sheet1") - dataHeader := map[string]string{ - "A1": ctx.Tr("user.static.id"), - "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.UserIndex"), - "D1": ctx.Tr("user.static.codemergecount"), - "E1": ctx.Tr("user.static.commitcount"), - "F1": ctx.Tr("user.static.issuecount"), - "G1": ctx.Tr("user.static.commentcount"), - "H1": ctx.Tr("user.static.focusrepocount"), - "I1": ctx.Tr("user.static.starrepocount"), - "J1": ctx.Tr("user.static.logincount"), - "K1": ctx.Tr("user.static.watchedcount"), - "L1": ctx.Tr("user.static.commitcodesize"), - "M1": ctx.Tr("user.static.solveissuecount"), - "N1": ctx.Tr("user.static.encyclopediascount"), - "O1": ctx.Tr("user.static.createrepocount"), - "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), - "R1": ctx.Tr("user.static.CloudBrainRunTime"), - "S1": ctx.Tr("user.static.CommitDatasetNum"), - "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.registdate"), - "V1": ctx.Tr("user.static.countdate"), - } + + dataHeader := getExcelHeader(ctx) for k, v := range dataHeader { //设置单元格的值 xlsx.SetCellValue(sheetName, k, v) } for i, userRecord := range re { - rows := fmt.Sprint(i + 2) - - xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate - xlsx.SetCellValue(sheetName, "V"+rows, formatTime) + row := i + 2 + writeExcel(row, xlsx, sheetName, userRecord) } //设置默认打开的表单 diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 8929666e5..912ed6238 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -892,6 +892,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/labels", reqRepoIssuesOrPullsWriter, repo.UpdateIssueLabel) m.Post("/milestone", reqRepoIssuesOrPullsWriter, repo.UpdateIssueMilestone) m.Post("/assignee", reqRepoIssuesOrPullsWriter, repo.UpdateIssueAssignee) + m.Post("/ref", reqRepoIssuesOrPullsWriter, repo.UpdateIssueRef) m.Post("/request_review", reqRepoIssuesOrPullsReader, repo.UpdatePullReviewRequest) m.Post("/status", reqRepoIssuesOrPullsWriter, repo.UpdateIssueStatus) m.Post("/resolve_conversation", reqRepoIssuesOrPullsReader, repo.UpdateResolveConversation) diff --git a/services/issue/content.go b/services/issue/content.go index 1081e30b5..387930449 100644 --- a/services/issue/content.go +++ b/services/issue/content.go @@ -21,3 +21,12 @@ func ChangeContent(issue *models.Issue, doer *models.User, content string) (err return nil } + +// ChangeRef changes issue ref, as the given user. +func ChangeRef(issue *models.Issue, doer *models.User, ref string) (err error) { + if err := issue.ChangeRef(doer, ref); err != nil { + return err + } + + return nil +} diff --git a/templates/admin/cloudbrain/imagecommit.tmpl b/templates/admin/cloudbrain/imagecommit.tmpl index e504f08b0..21ab73104 100644 --- a/templates/admin/cloudbrain/imagecommit.tmpl +++ b/templates/admin/cloudbrain/imagecommit.tmpl @@ -29,7 +29,7 @@

{{.i18n.Tr "repo.submit_image"}}

- +
@@ -125,5 +125,4 @@
-{{template "base/footer" .}} - \ No newline at end of file +{{template "base/footer" .}} \ No newline at end of file diff --git a/templates/admin/cloudbrain/images.html b/templates/admin/cloudbrain/images.tmpl similarity index 100% rename from templates/admin/cloudbrain/images.html rename to templates/admin/cloudbrain/images.tmpl diff --git a/templates/admin/dataset/list.tmpl b/templates/admin/dataset/list.tmpl index 3d32ec7a7..9e4e72b68 100644 --- a/templates/admin/dataset/list.tmpl +++ b/templates/admin/dataset/list.tmpl @@ -4,7 +4,6 @@
{{template "base/alert" .}}

{{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}}) @@ -12,6 +11,15 @@
{{template "admin/dataset/search" .}}
+
+
+
+ + +
+
+
+
@@ -27,7 +35,7 @@ {{range .Datasets}} - + @@ -40,4 +48,4 @@ {{template "base/paginate" .}} -{{template "base/footer" .}} \ No newline at end of file +{{template "base/footer" .}} diff --git a/templates/admin/dataset/search.tmpl b/templates/admin/dataset/search.tmpl index 5d01d836c..6b42fb6cf 100644 --- a/templates/admin/dataset/search.tmpl +++ b/templates/admin/dataset/search.tmpl @@ -6,18 +6,18 @@ @@ -26,4 +26,4 @@ - + \ No newline at end of file diff --git a/templates/explore/datasets.tmpl b/templates/explore/datasets.tmpl index 3ffb0ec73..5739ebb10 100644 --- a/templates/explore/datasets.tmpl +++ b/templates/explore/datasets.tmpl @@ -121,12 +121,13 @@ @@ -140,15 +141,17 @@ {{end}}
+ 仅显示平台推荐
{{range $k, $v :=.Datasets}}
- {{.Repo.OwnerName}} / {{.Repo.Alias}}{{if .Recommend}}{{end}} - + {{.Repo.OwnerName}} / {{.Repo.Alias}} {{if $.IsSigned}} + + {{.DownloadTimes}}
@@ -156,6 +159,8 @@
{{else}} + + {{.DownloadTimes}}
@@ -163,7 +168,7 @@
{{end}}
-
{{.Title}}
+
{{.Title}}{{if .Recommend}}{{end}}
{{if or (.Category) (.Task) (.License)}}
{{if .Category}} diff --git a/templates/mail/auth/activate.tmpl b/templates/mail/auth/activate.tmpl old mode 100644 new mode 100755 index 37fdbd7c7..d2996f38d --- a/templates/mail/auth/activate.tmpl +++ b/templates/mail/auth/activate.tmpl @@ -11,5 +11,6 @@

{{AppUrl}}user/activate?code={{.Code}}

Not working? Try copying and pasting it to your browser.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/auth/activate_email.tmpl b/templates/mail/auth/activate_email.tmpl old mode 100644 new mode 100755 index ebcaa0ee7..50ef9adcd --- a/templates/mail/auth/activate_email.tmpl +++ b/templates/mail/auth/activate_email.tmpl @@ -11,5 +11,6 @@

{{AppUrl}}user/activate_email?code={{.Code}}&email={{.Email}}

Not working? Try copying and pasting it to your browser.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/auth/register_notify.tmpl b/templates/mail/auth/register_notify.tmpl old mode 100644 new mode 100755 index ea1857030..83950c260 --- a/templates/mail/auth/register_notify.tmpl +++ b/templates/mail/auth/register_notify.tmpl @@ -11,5 +11,6 @@

{{AppUrl}}user/login

If this account has been created for you, please set your password first.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/auth/reset_passwd.tmpl b/templates/mail/auth/reset_passwd.tmpl old mode 100644 new mode 100755 index e01d57cea..f24c1dc31 --- a/templates/mail/auth/reset_passwd.tmpl +++ b/templates/mail/auth/reset_passwd.tmpl @@ -12,5 +12,6 @@

{{AppUrl}}user/recover_account?code={{.Code}}

Not working? Try copying and pasting it to your browser.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/issue/assigned.tmpl b/templates/mail/issue/assigned.tmpl old mode 100644 new mode 100755 index d302a16f2..bd9fd015d --- a/templates/mail/issue/assigned.tmpl +++ b/templates/mail/issue/assigned.tmpl @@ -15,6 +15,8 @@ ---
View it on {{AppName}}. +
+ 退订(TD)

diff --git a/templates/mail/issue/default.tmpl b/templates/mail/issue/default.tmpl old mode 100644 new mode 100755 index 7cd397527..e3b426d40 --- a/templates/mail/issue/default.tmpl +++ b/templates/mail/issue/default.tmpl @@ -53,6 +53,8 @@ ---
View it on {{AppName}}. +
+ 退订(TD)

diff --git a/templates/mail/notify/collaborator.tmpl b/templates/mail/notify/collaborator.tmpl old mode 100644 new mode 100755 index 947b40439..d878a0ff5 --- a/templates/mail/notify/collaborator.tmpl +++ b/templates/mail/notify/collaborator.tmpl @@ -15,6 +15,8 @@ ---
View it on {{AppName}}. +
+ 退订(TD)

diff --git a/templates/repo/attachment/upload.tmpl b/templates/repo/attachment/upload.tmpl index 42aac99df..56dc52417 100644 --- a/templates/repo/attachment/upload.tmpl +++ b/templates/repo/attachment/upload.tmpl @@ -13,17 +13,15 @@ {{.CsrfTokenHtml}} - CPU/GPU - NPU - + CPU/GPU + NPU - - - + +
+ + + +
{{.ID}}{{.Title}}{{if .Recommend}}{{end}}{{.Title}}{{if .Recommend}}{{end}} {{.CreatedUnix.FormatShort}} {{if .Recommend}}{{$.i18n.Tr "admin.datasets.unrecommend"}}{{else}}{{$.i18n.Tr "admin.datasets.recommend"}}{{end}}
+ {{$.i18n.Tr "repo.modelarts.code_version"}} + +
+ {{.BranchName}} +
+
{{$.i18n.Tr "cloudbrain.gpu_type"}} diff --git a/templates/repo/issue/branch_selector_field.tmpl b/templates/repo/issue/branch_selector_field.tmpl index 4f80c13e5..265c14765 100644 --- a/templates/repo/issue/branch_selector_field.tmpl +++ b/templates/repo/issue/branch_selector_field.tmpl @@ -14,12 +14,12 @@
- + {{svg "octicon-git-branch" 16}} {{.i18n.Tr "repo.branches"}} - + {{.i18n.Tr "repo.tags"}} diff --git a/templates/repo/issue/view_content/comments.tmpl b/templates/repo/issue/view_content/comments.tmpl index e3c7df674..796054005 100644 --- a/templates/repo/issue/view_content/comments.tmpl +++ b/templates/repo/issue/view_content/comments.tmpl @@ -594,5 +594,40 @@ {{end}}
+ {{else if eq .Type 29}} +
+ {{svg "octicon-git-branch" 16}} + + + + + {{.Poster.GetDisplayName}} + + {{ $refOldName:= GetRefName .OldRef }} + {{ $refNewName:= GetRefName .NewRef }} + + {{if .OldRef }} + {{if .NewRef }} + {{$.i18n.Tr "repo.issues.change_branch_tag_at" ($refOldName|Escape) ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{ $getRefOldType:= GetRefType .OldRef }} + {{ if eq $getRefOldType "branch"}} + {{$.i18n.Tr "repo.issues.remove_branch_at" ($refOldName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.remove_tag_at" ($refOldName|Escape) $createdStr | Safe}} + {{end}} + {{end}} + {{else}} + {{if .NewRef}} + {{ $getRefNewType:= GetRefType .NewRef }} + {{ if eq $getRefNewType "branch"}} + {{$.i18n.Tr "repo.issues.add_branch_at" ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.add_tag_at" ($refNewName|Escape) $createdStr | Safe}} + {{end}} + {{end}} + {{end}} + +
{{end}} {{end}} diff --git a/templates/repo/issue/view_content/sidebar.tmpl b/templates/repo/issue/view_content/sidebar.tmpl index bcc69a48b..e6a61a567 100644 --- a/templates/repo/issue/view_content/sidebar.tmpl +++ b/templates/repo/issue/view_content/sidebar.tmpl @@ -1,6 +1,52 @@
- {{template "repo/issue/branch_selector_field" .}} + + {{if and (not .Issue.IsPull) (not .PageIsComparePull)}} + + + +
+ {{end}} {{if .Issue.IsPull }} @@ -600,3 +646,4 @@
{{end}} {{end}} + diff --git a/templates/repo/modelarts/trainjob/new.tmpl b/templates/repo/modelarts/trainjob/new.tmpl index a37c5712a..cff51c524 100755 --- a/templates/repo/modelarts/trainjob/new.tmpl +++ b/templates/repo/modelarts/trainjob/new.tmpl @@ -233,8 +233,13 @@
- - + +
+ +
@@ -263,19 +268,20 @@ $('.menu .item') .tab(); - let sever_num = $('#trainjob_work_server_num') - $('.add').click(function(){ - sever_num.val(parseInt(sever_num.val())+1) - if(sever_num.val()>=26){ - sever_num.val(parseInt(sever_num.val())-1) - } - }) - $('.min').click(function(){ - sever_num.val(parseInt(sever_num.val())-1) - if(sever_num.val()<=0){ - sever_num.val(parseInt(sever_num.val())+1) - } - }) + // let sever_num = $("#trainjob_work_server_num_select .text").text() //$('#trainjob_work_server_num') + // console.log("sever_num:",sever_num) + // $('.add').click(function(){ + // sever_num.val(parseInt(sever_num.val())+1) + // if(sever_num.val()>=26){ + // sever_num.val(parseInt(sever_num.val())-1) + // } + // }) + // $('.min').click(function(){ + // sever_num.val(parseInt(sever_num.val())-1) + // if(sever_num.val()<=0){ + // sever_num.val(parseInt(sever_num.val())+1) + // } + // }) // 参数增加、删除、修改、保存 function Add_parameter(i){ value = '
' + @@ -349,7 +355,7 @@ // $("select[name='pool_id']").val(parameters[i]); // break; case (6): - $("input[name='work_server_number']").val(parameters[i]); + // $("input[name='work_server_number']").val(parameters[i]); break; } } @@ -456,6 +462,10 @@ $("input#ai_engine_name").val(name1) $("input#ai_flaver_name").val(name2) + let val_server_num_select = $("#trainjob_work_server_num_select .text").text() + // console.log("val_server_num_select:",val_server_num_select) + $("input#trainjob_work_server_num").val(val_server_num_select) + } $('.ui.create_train_job.green.button').click(function(e) { get_name() diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index 88cabb4ab..3b370b26d 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -249,7 +249,7 @@ td, th {
@@ -422,15 +422,15 @@ td, th {
- + - + -
+

@@ -836,15 +836,28 @@ td, th {
             html += "
" $(`#dir_list${version_name}`).append(html) } + function debounce(fn,delay){ + let timer; + return (...args) => { + // 判断定时器是否存在,清除定时器 + if (timer) { + clearTimeout(timer); + } + // 重新调用setTimeout + timer = setTimeout(() => { + fn.apply(this, args); + }, delay); + }; + } + const fn = debounce(logScroll, 500) function logScroll(version_name) { - let container = document.querySelector(`#log${version_name}`) let scrollTop = container.scrollTop let scrollHeight = container.scrollHeight let clientHeight = container.clientHeight let scrollLeft = container.scrollLeft - if((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight) && (scrollLeft===0)){ + if(((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0 && scrollLeft==0){ let end_line = $(`#log${version_name} input[name=end_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${end_line}&lines=50&order=desc`, (data) => { if (data.Lines == 0){ @@ -867,7 +880,7 @@ td, th { console.log(err); }); } - if(scrollTop == 0 && scrollLeft==0){ + if([0,1,2,3,4,5,6,7,8,9,10].includes(scrollTop) && scrollLeft==0){ let start_line = $(`#log${version_name} input[name=start_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { if (data.Lines == 0){ @@ -894,48 +907,69 @@ td, th { const dist = Math.ceil(needScrollTop / 10); _currentY += dist; //移动一个十分之一 - console.log(_currentY, targetY) - dom.scrollTo(currentX || 0, _currentY); + dom.scrollTo(currentX || 0, _currentY,'smooth'); // 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果 if (needScrollTop > 10 || needScrollTop < -10) { scrollAnimation(dom, _currentY, targetY) } else { - dom.scrollTo(0, targetY) + dom.scrollTo(0, targetY,'smooth') } }, 1) } - $('#log_top').click(function(){ - let logContentDom = document.querySelector('.log') - if(!logContentDom) - return - scrollAnimation(logContentDom, logContentDom.scrollTop, 0); + $('.log_top').click(function(){ + // let logContentDom = document.querySelector('.log') + // if(!logContentDom) + // return + // let version_name = $('.log_top').data('version') + let version_name = $(this).data('version') + let logContentDom = document.querySelector(`#log${version_name}`) + + $(`#log_file${version_name}`).siblings('pre').remove() + $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=asc`, (data) => { + + $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 + $(`#log${version_name} input[name=start_line]`).val(data.StartLine) + $(`#log${version_name}`).prepend('
' + data.Content)
+            $(`.message${version_name} #header`).text('您已翻阅至日志顶部')
+            $(`.message${version_name}`).css('display', 'block')
+            setTimeout(function(){
+                $(`.message${version_name}`).css('display', 'none')
+            }, 1000)
+            scrollAnimation(logContentDom, logContentDom.scrollTop, 0);
+        })
+
     })
-    $('#log_bottom').click(function(){
-        let logContentDom = document.querySelector('.log')
-			if(!logContentDom)
-				return
-			//如果内容撑大到可以滚动,则触发滚动
-			// if(!['10','11','12'].includes(context.taskInfo.statusCode)){
-            //     context.getLogContent(0, context.lines, 'goDown')
-            // }
-            if(logContentDom.scrollHeight > logContentDom.clientHeight){
-                console.log("1111")
-                scrollAnimation(logContentDom, logContentDom.scrollTop, logContentDom.scrollHeight - logContentDom.clientHeight);
-            }
-			else{
-                logScroll(version_name)
-                logContentDom.scrollTo(0,logContentDom.scrollTop-1);
-            }
-			// if(this.checkCurrentCanScrollBottom()){
-			// 	// this.goDown();
-				
-				
-			// 	this.scrollAnimation(logContentDom, logContentDom.scrollTop, logContentDom.scrollHeight - logContentDom.clientHeight);
-			// }else{
-			// 	//如果内容不足,则往后追加内容
-			// 	this.goDown();
-			// 	logContentDom.scrollTo(0,logContentDom.scrollTop-1);
-			// }
+    $('.log_bottom').click(function(e){
+        let version_name = $(this).data('version')
+        let logContentDom = document.querySelector(`#log${version_name}`)
+        $(`#log_file${version_name}`).siblings('pre').remove()
+        $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => {
+            
+            $(`#log${version_name} input[name=end_line]`).val(data.EndLine)   //如果变动就改变所对应的值
+            $(`#log${version_name} input[name=start_line]`).val(data.StartLine)
+            $(`#log${version_name}`).append('
' + data.Content)
+            $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${data.EndLine}&lines=50&order=desc`, (data) => {
+                if (data.Lines == 0){
+                    $(`.message${version_name} #header`).text('您已翻阅至日志底部')
+                    $(`.message${version_name}`).css('display', 'block')
+                    setTimeout(function(){
+                        $(`.message${version_name}`).css('display', 'none')
+                    }, 1000)
+                }else{
+                    if(end_line===data.EndLine){
+                        return
+                    }
+                    else{
+                        $(`#log${version_name} input[name=end_line]`).val(data.EndLine)
+                        $(`#log${version_name}`).append('
' + data.Content)
+                    }
+
+                }
+            }).fail(function(err) {
+                console.log(err);
+            });
+            scrollAnimation(logContentDom, logContentDom.scrollTop+1, logContentDom.scrollHeight - logContentDom.clientHeight);
+        })
     })
 
diff --git a/web_src/js/components/MinioUploader.vue b/web_src/js/components/MinioUploader.vue
index b47700e20..167bb8c5a 100755
--- a/web_src/js/components/MinioUploader.vue
+++ b/web_src/js/components/MinioUploader.vue
@@ -1,25 +1,31 @@