From 260232e417828f0ca6ea784517ed517577a29104 Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Tue, 19 Apr 2022 10:12:24 +0800 Subject: [PATCH 01/63] fix issue --- web_src/js/components/MinioUploader.vue | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/web_src/js/components/MinioUploader.vue b/web_src/js/components/MinioUploader.vue index 8c33608e7..d6c21e535 100755 --- a/web_src/js/components/MinioUploader.vue +++ b/web_src/js/components/MinioUploader.vue @@ -8,7 +8,7 @@ {{ file_status_text }} {{ status }}

- {{upload}} + {{upload}} {{cancel}} + {{if and (not .Issue.IsPull) (not .PageIsComparePull)}} + + + +
+ {{end}} {{if .Issue.IsPull }} From eaaa456e5f940370e8fcf7c09476e1a65889e785 Mon Sep 17 00:00:00 2001 From: zouap Date: Thu, 21 Apr 2022 16:01:31 +0800 Subject: [PATCH 09/63] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81?= =?UTF-8?q?=E3=80=82=E8=A7=A3=E5=86=B3Bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- models/user_business_analysis.go | 66 ++++++++------- models/user_business_struct.go | 140 ++++++++++++++++--------------- public/home/search.js | 20 ++--- 3 files changed, 118 insertions(+), 108 deletions(-) diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 1bd597bf5..316fec78b 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -82,17 +82,19 @@ type UserBusinessAnalysisAll struct { DataDate string `xorm:"NULL"` //cloudbraintask - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -167,17 +169,19 @@ type UserBusinessAnalysis struct { DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -528,13 +532,13 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage) dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage) - dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) - userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex - if maxUserIndex < dateRecordAll.UserIndex { - maxUserIndex = dateRecordAll.UserIndex + dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) + userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive + if maxUserIndex < dateRecordAll.UserIndexPrimitive { + maxUserIndex = dateRecordAll.UserIndexPrimitive } - if minUserIndex > dateRecordAll.UserIndex { - minUserIndex = dateRecordAll.UserIndex + if minUserIndex > dateRecordAll.UserIndexPrimitive { + minUserIndex = dateRecordAll.UserIndexPrimitive } dateRecordBatch = append(dateRecordBatch, dateRecordAll) if len(dateRecordBatch) >= BATCH_INSERT_SIZE { @@ -584,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static insertBatchSql := "INSERT INTO public." + tableName + "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + - "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image) " + + "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " + "VALUES" for i, record := range dateRecords { @@ -593,7 +597,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," + - fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + ")" + fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")" if i < (len(dateRecords) - 1) { insertBatchSql += "," } @@ -761,7 +765,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) - dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight) + dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) _, err = statictisSess.Insert(&dateRecord) if err != nil { diff --git a/models/user_business_struct.go b/models/user_business_struct.go index bcf2f06c0..86aecd545 100644 --- a/models/user_business_struct.go +++ b/models/user_business_struct.go @@ -45,17 +45,18 @@ type UserBusinessAnalysisCurrentYear struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -109,17 +110,18 @@ type UserBusinessAnalysisLast30Day struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -173,17 +175,18 @@ type UserBusinessAnalysisLastMonth struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -237,17 +240,18 @@ type UserBusinessAnalysisCurrentMonth struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -301,17 +305,19 @@ type UserBusinessAnalysisCurrentWeek struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` @@ -365,17 +371,19 @@ type UserBusinessAnalysisYesterday struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` CollectDataset int `xorm:"NOT NULL DEFAULT 0"` diff --git a/public/home/search.js b/public/home/search.js index c55d1807c..2fac95358 100644 --- a/public/home/search.js +++ b/public/home/search.js @@ -810,14 +810,7 @@ var repoAndOrgEN={ function page(current){ currentPage=current; - startIndex = currentPage -1; - if(startIndex < 1){ - startIndex = 1; - } - endIndex = currentPage + 2; - if(endIndex >= totalPage){ - endIndex = totalPage; - } + doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); } @@ -888,9 +881,14 @@ function getYPosition(e){ var html =""; console.log("currentPage=" + currentPage); console.log("privateTotal=" + privateTotal); - // if(totalPage==0){ - // return; - // } + startIndex = currentPage -1; + if(startIndex < 1){ + startIndex = 1; + } + endIndex = currentPage + 2; + if(endIndex >= totalPage){ + endIndex = totalPage; + } html += "" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "" if(currentPage > 1){ html += "" + getLabel(isZh,"search_home_page") + ""; From 395c4a9bf9302a4dd0cbff52beddf1058e8ff256 Mon Sep 17 00:00:00 2001 From: zouap Date: Thu, 21 Apr 2022 16:08:30 +0800 Subject: [PATCH 10/63] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81?= =?UTF-8?q?=E3=80=82=E8=A7=A3=E5=86=B3Bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- models/user_business_analysis.go | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 316fec78b..48b9205c6 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -852,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + codeLine := float64(dateRecord.CommitCodeSize) + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) + if codeLine >= limitCodeLine { + codeLine = limitCodeLine + } + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) @@ -881,12 +886,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - codeLine := float64(dateRecord.CommitCodeSize) / 1000 - limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) + codeLine := float64(dateRecord.CommitCodeSize) + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) if codeLine >= limitCodeLine { codeLine = limitCodeLine } - result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) From 20eb12740f4198c1fa1d3db3aaf78893f0930bdf Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Thu, 21 Apr 2022 16:25:57 +0800 Subject: [PATCH 11/63] #1821 update --- models/issue.go | 32 ++++++++++++++++++++++++++++++++ models/issue_comment.go | 2 ++ routers/repo/issue.go | 25 ++++++++++++++++++++++++- routers/routes/routes.go | 1 + services/issue/content.go | 9 +++++++++ 5 files changed, 68 insertions(+), 1 deletion(-) diff --git a/models/issue.go b/models/issue.go index 19f00d5f3..3a7097977 100755 --- a/models/issue.go +++ b/models/issue.go @@ -775,6 +775,38 @@ func (issue *Issue) ChangeContent(doer *User, content string) (err error) { return sess.Commit() } +// ChangeRef changes issue ref, as the given user. +func (issue *Issue) ChangeRef(doer *User, newRef string) (err error) { + oldRef := issue.Ref + issue.Ref = newRef + + sess := x.NewSession() + defer sess.Close() + if err = sess.Begin(); err != nil { + return err + } + + if err = updateIssueCols(sess, issue, "ref"); err != nil { + sess.Rollback() + return fmt.Errorf("UpdateIssueCols: %v", err) + } + + var opts = &CreateCommentOptions{ + Type: CommentTypeRef, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldRef: oldRef, + NewRef: newRef, + } + if _, err = createComment(sess, opts); err != nil { + sess.Rollback() + return err + } + + return sess.Commit() +} + // GetTasks returns the amount of tasks in the issues content func (issue *Issue) GetTasks() int { return len(issueTasksPat.FindAllStringIndex(issue.Content, -1)) diff --git a/models/issue_comment.go b/models/issue_comment.go index 60d38452c..ec9441bde 100755 --- a/models/issue_comment.go +++ b/models/issue_comment.go @@ -48,6 +48,8 @@ const ( CommentTypePullRef // Labels changed CommentTypeLabel + // Ref changed + CommentTypeRef // Milestone changed CommentTypeMilestone // Assignees changed diff --git a/routers/repo/issue.go b/routers/repo/issue.go index 42a6b9609..875c02024 100755 --- a/routers/repo/issue.go +++ b/routers/repo/issue.go @@ -432,7 +432,7 @@ func RetrieveRepoMetas(ctx *context.Context, repo *models.Repository, isPull boo return nil } - brs, _, err := ctx.Repo.GitRepo.GetBranches(0,0) + brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0) if err != nil { ctx.ServerError("GetBranches", err) return nil @@ -1302,6 +1302,29 @@ func UpdateIssueContent(ctx *context.Context) { }) } +// UpdateIssueRef change issue's code reference +func UpdateIssueRef(ctx *context.Context) { + issue := GetActionIssue(ctx) + if ctx.Written() { + return + } + + if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { + ctx.Error(403) + return + } + + ref := ctx.Query("ref") + if err := issue_service.ChangeRef(issue, ctx.User, ref); err != nil { + ctx.ServerError("ChangeRef", err) + return + } + + ctx.JSON(200, map[string]interface{}{ + "ref": issue.Ref, + }) +} + // UpdateIssueMilestone change issue's milestone func UpdateIssueMilestone(ctx *context.Context) { issues := getActionIssues(ctx) diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 9df429e8b..7af42867c 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -868,6 +868,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/:index", func() { m.Post("/title", repo.UpdateIssueTitle) m.Post("/content", repo.UpdateIssueContent) + m.Post("/ref", repo.UpdateIssueRef) m.Post("/watch", repo.IssueWatch) m.Group("/dependency", func() { m.Post("/add", repo.AddDependency) diff --git a/services/issue/content.go b/services/issue/content.go index 1081e30b5..387930449 100644 --- a/services/issue/content.go +++ b/services/issue/content.go @@ -21,3 +21,12 @@ func ChangeContent(issue *models.Issue, doer *models.User, content string) (err return nil } + +// ChangeRef changes issue ref, as the given user. +func ChangeRef(issue *models.Issue, doer *models.User, ref string) (err error) { + if err := issue.ChangeRef(doer, ref); err != nil { + return err + } + + return nil +} From ee0f763ef6498b5ec45082bc2f0b77e7cb5c9a92 Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Thu, 21 Apr 2022 16:41:32 +0800 Subject: [PATCH 12/63] #1860 update limit size default value --- modules/repository/hooks.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index d9766c4ed..2b29b7fe0 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -36,7 +36,7 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []str fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } sizeLimitTpls = []string{ - fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"100\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"10240\" \nreadonly CHECK_FLAG_ON=0\n\n\nstatus=\"$EXIT_SUCCESS\"\n\nfunction readINI()\n{\n FILENAME='%s'; SECTION=$1; KEY=$2\n RESULT=`awk -F '=' '/\\['$SECTION'\\]/{a=1}a==1&&$1~/'$KEY'/{print $2;exit}' $FILENAME`\n echo $RESULT\n}\n\n# skip this hook entirely if shell check is not open\ncheck_flag=$(readINI 'repository.upload' 'SHELL_FLAG')\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get the maximum filesize configured for this repository or the default\nfunction get_file_maxsize() {\n local value;\n value=$(readINI 'repository.upload' 'FILE_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE_MB\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"$(get_file_maxsize)\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\nfunction get_repo_maxsize() {\n local value;\n value=$(readINI 'repository' 'REPO_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n\nsizelimit_mb=\"$(get_repo_maxsize)\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nstr=`du -sb .`\narr=($str)\nreposize_b=${arr[0]}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=0\n\n\nstatus=\"$EXIT_SUCCESS\"\n\nfunction readINI()\n{\n FILENAME='%s'; SECTION=$1; KEY=$2\n RESULT=`awk -F '=' '/\\['$SECTION'\\]/{a=1}a==1&&$1~/'$KEY'/{print $2;exit}' $FILENAME`\n echo $RESULT\n}\n\n# skip this hook entirely if shell check is not open\ncheck_flag=$(readINI 'repository.upload' 'SHELL_FLAG')\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get the maximum filesize configured for this repository or the default\nfunction get_file_maxsize() {\n local value;\n value=$(readINI 'repository.upload' 'FILE_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE_MB\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"$(get_file_maxsize)\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\nfunction get_repo_maxsize() {\n local value;\n value=$(readINI 'repository' 'REPO_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n\nsizelimit_mb=\"$(get_repo_maxsize)\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nstr=`du -sb .`\narr=($str)\nreposize_b=${arr[0]}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), fmt.Sprintf(""), fmt.Sprintf(""), } From 96ddf38afd49150454fc6306cbe6884003dd2175 Mon Sep 17 00:00:00 2001 From: liuzx Date: Fri, 22 Apr 2022 09:57:13 +0800 Subject: [PATCH 13/63] fix-1931 --- routers/repo/modelarts.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 318726e8e..33e380fec 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -1475,9 +1475,9 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error { return errors.New("启动文件必须是python文件") } - if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { - log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) - return errors.New("计算节点数必须在1-25之间") + if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { + log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) + return errors.New("计算节点数必须在1-2之间") } if form.BranchName == "" { log.Error("the branch must not be null!", form.BranchName) @@ -1493,9 +1493,9 @@ func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) err return errors.New("启动文件必须是python文件") } - if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { - log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) - return errors.New("计算节点数必须在1-25之间") + if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { + log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) + return errors.New("计算节点数必须在1-2之间") } if form.ModelName == "" { From 02226743123ee84a5c2dde730540c7b1ea93df43 Mon Sep 17 00:00:00 2001 From: zouap Date: Fri, 22 Apr 2022 10:25:39 +0800 Subject: [PATCH 14/63] =?UTF-8?q?=E7=94=A8=E6=88=B7=E5=88=86=E6=9E=90?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E7=9B=B8=E5=85=B3=E5=B1=9E=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- models/user_business_analysis.go | 37 +++- options/locale/locale_en-US.ini | 10 +- options/locale/locale_zh-CN.ini | 10 +- routers/repo/user_data_analysis.go | 273 ++++++++++++++++--------- web_src/js/components/UserAnalysis.vue | 55 ++++- 5 files changed, 281 insertions(+), 104 deletions(-) diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 48b9205c6..2d7592baf 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -815,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 } - if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 { + if getUserActivate(dateRecord) > 0 { userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1 } @@ -865,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) + result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) + result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) + result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) + result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) + result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) + result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) + + return result +} + +func getUserActivate(dateRecord UserBusinessAnalysis) int { + var result int + result += dateRecord.CodeMergeCount + result += dateRecord.CommitCount + result += dateRecord.IssueCount + result += dateRecord.CommentCount + result += dateRecord.FocusRepoCount + result += dateRecord.StarRepoCount + result += dateRecord.SolveIssueCount + result += dateRecord.EncyclopediasCount + result += dateRecord.CreateRepoCount + result += dateRecord.CloudBrainTaskNum + result += dateRecord.CommitModelCount + result += dateRecord.CommitDatasetNum + result += dateRecord.FocusOtherUser + result += dateRecord.CollectDataset + result += dateRecord.CollectImage + result += dateRecord.CommitCodeSize return result } @@ -899,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) + result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) + result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) + result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) + result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) + result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) + result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) + return result } diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index f53bea7b1..c96e6af3f 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count static.CloudBrainRunTime=CloudBrain Run Time static.CommitDatasetNum=Commit Dataset Count static.CommitModelCount=Commit Model Count -static.UserIndex=User Index +static.UserIndex=Normalized user index +static.UserIndexPrimitive=User Index static.countdate=Count Date +static.FocusOtherUser=Focus Other User Count +static.CollectDataset=Collect Dataset Count +static.CollectedDataset=Collected Dataset Count +static.RecommendDataset=Recommended Dataset Count +static.CollectImage=Collect Image Count +static.CollectedImage=Collected Image Count +static.RecommendImage=Recommended Image Count static.all=All static.public.user_business_analysis_current_month=Current_Month static.public.user_business_analysis_current_week=Current_Week diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index c82347d5e..27ba96c51 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -512,8 +512,16 @@ static.CloudBrainTaskNum=云脑任务数 static.CloudBrainRunTime=云脑运行时间(小时) static.CommitDatasetNum=上传(提交)数据集文件数 static.CommitModelCount=提交模型数 -static.UserIndex=用户指数 +static.UserIndex=归一化用户指数 +static.UserIndexPrimitive=用户指数 static.countdate=系统统计时间 +static.FocusOtherUser=关注他人数 +static.CollectDataset=收藏数据集 +static.CollectedDataset=被收藏数据集 +static.RecommendDataset=被推荐数据集数 +static.CollectImage=收藏镜像数 +static.CollectedImage=被收藏镜像数 +static.RecommendImage=被推荐镜像数 static.all=所有 static.public.user_business_analysis_current_month=本月 static.public.user_business_analysis_current_week=本周 diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 995465b09..bfc85bb80 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -19,6 +19,125 @@ const ( PAGE_SIZE = 2000 ) +func getExcelHeader(ctx *context.Context) map[string]string { + excelHeader := make([]string, 0) + excelHeader = append(excelHeader, ctx.Tr("user.static.id")) + excelHeader = append(excelHeader, ctx.Tr("user.static.name")) + excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex")) + excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive")) + excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.logincount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize")) + excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount")) + + excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) + + excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) + excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) + + excelHeaderMap := make(map[string]string, 0) + var i, tmp byte + tmp = 'A' + i = 0 + for j, value := range excelHeader { + excelColumn := string(tmp+i) + fmt.Sprint(j+1) + log.Info("excelColumn=" + excelColumn) + excelHeaderMap[excelColumn] = value + i++ + } + return excelHeaderMap +} + +func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { + rows := fmt.Sprint(row) + var tmp byte + tmp = 'A' + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.ID) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.Name) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CodeMergeCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.IssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommentCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.FocusRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.StarRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.LoginCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.WatchedCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitCodeSize) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.SolveIssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.EncyclopediasCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CreateRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CloudBrainTaskNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitDatasetNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitModelCount) + tmp = tmp + 1 + + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.FocusOtherUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectedDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.RecommendDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectedImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.RecommendImage) + tmp = tmp + 1 + + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, formatTime[0:len(formatTime)-3]) + formatTime = userRecord.DataDate + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, string(tmp)+rows, formatTime) + +} + func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { page := ctx.QueryInt("page") if page <= 0 { @@ -37,30 +156,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac sheetName := ctx.Tr("user.static.sheetname") index := xlsx.NewSheet(sheetName) xlsx.DeleteSheet("Sheet1") - dataHeader := map[string]string{ - "A1": ctx.Tr("user.static.id"), - "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.UserIndex"), - "D1": ctx.Tr("user.static.codemergecount"), - "E1": ctx.Tr("user.static.commitcount"), - "F1": ctx.Tr("user.static.issuecount"), - "G1": ctx.Tr("user.static.commentcount"), - "H1": ctx.Tr("user.static.focusrepocount"), - "I1": ctx.Tr("user.static.starrepocount"), - "J1": ctx.Tr("user.static.logincount"), - "K1": ctx.Tr("user.static.watchedcount"), - "L1": ctx.Tr("user.static.commitcodesize"), - "M1": ctx.Tr("user.static.solveissuecount"), - "N1": ctx.Tr("user.static.encyclopediascount"), - "O1": ctx.Tr("user.static.createrepocount"), - "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), - "R1": ctx.Tr("user.static.CloudBrainRunTime"), - "S1": ctx.Tr("user.static.CommitDatasetNum"), - "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.registdate"), - "V1": ctx.Tr("user.static.countdate"), - } + dataHeader := getExcelHeader(ctx) for k, v := range dataHeader { //设置单元格的值 xlsx.SetCellValue(sheetName, k, v) @@ -74,31 +170,32 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac log.Info("return count=" + fmt.Sprint(count)) for _, userRecord := range re { row++ - rows := fmt.Sprint(row) - xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate - xlsx.SetCellValue(sheetName, "V"+rows, formatTime) + writeExcel(row, xlsx, sheetName, userRecord) + // rows := fmt.Sprint(row) + // xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) + // xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) + // xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + // xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) + // xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) + // xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) + // xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) + // xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) + // xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) + // xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) + // xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) + // xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) + // xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) + // xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) + // xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) + // xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + // xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) + // xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + // xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) + // xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) + // formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + // xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) + // formatTime = userRecord.DataDate + // xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } indexTotal += PAGE_SIZE @@ -236,62 +333,40 @@ func QueryUserStaticDataPage(ctx *context.Context) { sheetName := ctx.Tr("user.static.sheetname") index := xlsx.NewSheet(sheetName) xlsx.DeleteSheet("Sheet1") - dataHeader := map[string]string{ - "A1": ctx.Tr("user.static.id"), - "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.UserIndex"), - "D1": ctx.Tr("user.static.codemergecount"), - "E1": ctx.Tr("user.static.commitcount"), - "F1": ctx.Tr("user.static.issuecount"), - "G1": ctx.Tr("user.static.commentcount"), - "H1": ctx.Tr("user.static.focusrepocount"), - "I1": ctx.Tr("user.static.starrepocount"), - "J1": ctx.Tr("user.static.logincount"), - "K1": ctx.Tr("user.static.watchedcount"), - "L1": ctx.Tr("user.static.commitcodesize"), - "M1": ctx.Tr("user.static.solveissuecount"), - "N1": ctx.Tr("user.static.encyclopediascount"), - "O1": ctx.Tr("user.static.createrepocount"), - "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), - "R1": ctx.Tr("user.static.CloudBrainRunTime"), - "S1": ctx.Tr("user.static.CommitDatasetNum"), - "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.registdate"), - "V1": ctx.Tr("user.static.countdate"), - } + + dataHeader := getExcelHeader(ctx) for k, v := range dataHeader { //设置单元格的值 xlsx.SetCellValue(sheetName, k, v) } for i, userRecord := range re { - rows := fmt.Sprint(i + 2) - - xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate - xlsx.SetCellValue(sheetName, "V"+rows, formatTime) + row := i + 2 + writeExcel(row, xlsx, sheetName, userRecord) + // xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) + // xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) + // xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + // xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) + // xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) + // xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) + // xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) + // xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) + // xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) + // xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) + // xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) + // xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) + // xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) + // xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) + // xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) + // xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + // xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) + // xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + // xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) + // xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) + // formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + // xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) + // formatTime = userRecord.DataDate + // xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } //设置默认打开的表单 diff --git a/web_src/js/components/UserAnalysis.vue b/web_src/js/components/UserAnalysis.vue index 117984f95..c0e8c7411 100755 --- a/web_src/js/components/UserAnalysis.vue +++ b/web_src/js/components/UserAnalysis.vue @@ -64,13 +64,22 @@ + + + + + + + + + + + + + + + + + Date: Fri, 22 Apr 2022 11:45:12 +0800 Subject: [PATCH 15/63] =?UTF-8?q?=E7=94=A8=E6=88=B7=E5=88=86=E6=9E=90?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E7=9B=B8=E5=85=B3=E5=B1=9E=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- routers/repo/user_data_analysis.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index bfc85bb80..63774877f 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -58,9 +58,9 @@ func getExcelHeader(ctx *context.Context) map[string]string { var i, tmp byte tmp = 'A' i = 0 - for j, value := range excelHeader { - excelColumn := string(tmp+i) + fmt.Sprint(j+1) - log.Info("excelColumn=" + excelColumn) + for _, value := range excelHeader { + excelColumn := string(tmp+i) + fmt.Sprint(1) + //log.Info("excelColumn=" + excelColumn) excelHeaderMap[excelColumn] = value i++ } From ef4fc4ad84fad74d314ed63868eefdff6a4f0769 Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Fri, 22 Apr 2022 17:02:57 +0800 Subject: [PATCH 16/63] fix issue --- templates/repo/modelarts/trainjob/show.tmpl | 81 +++++++++++++-------- web_src/less/openi.less | 17 ++++- 2 files changed, 66 insertions(+), 32 deletions(-) diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index 88cabb4ab..92f98939e 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -249,7 +249,7 @@ td, th {
@@ -422,10 +422,10 @@ td, th {
- + - +
-{{template "base/footer" .}} \ No newline at end of file +{{template "base/footer" .}} diff --git a/templates/explore/datasets.tmpl b/templates/explore/datasets.tmpl index 3ffb0ec73..94a843d74 100644 --- a/templates/explore/datasets.tmpl +++ b/templates/explore/datasets.tmpl @@ -145,8 +145,7 @@
- {{.Repo.OwnerName}} / {{.Repo.Alias}}{{if .Recommend}}{{end}} - + {{.Repo.OwnerName}} / {{.Repo.Alias}} {{if $.IsSigned}}
@@ -163,7 +162,7 @@ {{end}}
-
{{.Title}}
+
{{.Title}}{{if .Recommend}}{{end}}
{{if or (.Category) (.Task) (.License)}}
{{if .Category}} From 6b06f4ce3207cdc139ab2b015e21fb842155402b Mon Sep 17 00:00:00 2001 From: zouap Date: Fri, 22 Apr 2022 17:48:34 +0800 Subject: [PATCH 18/63] =?UTF-8?q?=E7=94=A8=E6=88=B7=E5=88=86=E6=9E=90?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E7=9B=B8=E5=85=B3=E5=B1=9E=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- routers/repo/user_data_analysis.go | 83 ++++++++++++++++-------------- 1 file changed, 43 insertions(+), 40 deletions(-) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 63774877f..6d43a815a 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -55,12 +55,11 @@ func getExcelHeader(ctx *context.Context) map[string]string { excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) excelHeaderMap := make(map[string]string, 0) - var i, tmp byte - tmp = 'A' - i = 0 + + i := 0 for _, value := range excelHeader { - excelColumn := string(tmp+i) + fmt.Sprint(1) - //log.Info("excelColumn=" + excelColumn) + excelColumn := getColumn(i) + fmt.Sprint(1) + log.Info("excelColumn=" + excelColumn) excelHeaderMap[excelColumn] = value i++ } @@ -69,73 +68,77 @@ func getExcelHeader(ctx *context.Context) map[string]string { func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { rows := fmt.Sprint(row) - var tmp byte - tmp = 'A' - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.ID) + tmp := 0 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.Name) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CodeMergeCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.IssueCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommentCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.FocusRepoCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.StarRepoCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.LoginCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.WatchedCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitCodeSize) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.SolveIssueCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.EncyclopediasCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CreateRepoCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CloudBrainTaskNum) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitDatasetNum) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CommitModelCount) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.FocusOtherUser) - tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectDataset) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectedDataset) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.RecommendDataset) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectImage) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.CollectedImage) - tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, userRecord.RecommendImage) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) + tmp = tmp + 1 formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, formatTime[0:len(formatTime)-3]) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) formatTime = userRecord.DataDate tmp = tmp + 1 - xlsx.SetCellValue(sheetName, string(tmp)+rows, formatTime) - + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) +} +func getColumn(tmp int) string { + if tmp < 26 { + return fmt.Sprint('A' + tmp) + } else { + return "A" + fmt.Sprint('A'+(tmp-26)) + } } func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { From 48229a6e7d9214e255b32eb2143f151b864d3a7d Mon Sep 17 00:00:00 2001 From: zouap Date: Fri, 22 Apr 2022 17:52:16 +0800 Subject: [PATCH 19/63] =?UTF-8?q?=E7=94=A8=E6=88=B7=E5=88=86=E6=9E=90?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E7=9B=B8=E5=85=B3=E5=B1=9E=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- routers/repo/user_data_analysis.go | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 6d43a815a..065991631 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -55,8 +55,8 @@ func getExcelHeader(ctx *context.Context) map[string]string { excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) excelHeaderMap := make(map[string]string, 0) - - i := 0 + var i byte + i = 0 for _, value := range excelHeader { excelColumn := getColumn(i) + fmt.Sprint(1) log.Info("excelColumn=" + excelColumn) @@ -68,7 +68,8 @@ func getExcelHeader(ctx *context.Context) map[string]string { func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { rows := fmt.Sprint(row) - tmp := 0 + var tmp byte + tmp = 0 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) @@ -133,11 +134,13 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) } -func getColumn(tmp int) string { +func getColumn(tmp byte) string { + var tmpA byte + tmpA = 'A' if tmp < 26 { - return fmt.Sprint('A' + tmp) + return string(tmpA + tmp) } else { - return "A" + fmt.Sprint('A'+(tmp-26)) + return "A" + string(tmpA+(tmp-26)) } } From b3f193d0c25027e0af7986bef37ae1aba0b5c645 Mon Sep 17 00:00:00 2001 From: zouap Date: Fri, 22 Apr 2022 17:55:42 +0800 Subject: [PATCH 20/63] =?UTF-8?q?=E7=94=A8=E6=88=B7=E5=88=86=E6=9E=90?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E7=9B=B8=E5=85=B3=E5=B1=9E=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- routers/repo/user_data_analysis.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 065991631..8dff133cf 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -112,7 +112,6 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) tmp = tmp + 1 - xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) @@ -124,11 +123,10 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) - + tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) tmp = tmp + 1 formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) formatTime = userRecord.DataDate tmp = tmp + 1 From adcb6af50adbb56e48d4bc7212ddf52c5754a121 Mon Sep 17 00:00:00 2001 From: zouap Date: Sun, 24 Apr 2022 09:50:19 +0800 Subject: [PATCH 21/63] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81?= =?UTF-8?q?=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- routers/repo/user_data_analysis.go | 52 ++---------------------------- 1 file changed, 2 insertions(+), 50 deletions(-) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 8dff133cf..2280e8288 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -128,8 +128,9 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode tmp = tmp + 1 formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate tmp = tmp + 1 + + formatTime = userRecord.DataDate xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) } func getColumn(tmp byte) string { @@ -175,31 +176,6 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac for _, userRecord := range re { row++ writeExcel(row, xlsx, sheetName, userRecord) - // rows := fmt.Sprint(row) - // xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - // xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - // xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - // xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) - // xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - // xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - // xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - // xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - // xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - // xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - // xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - // xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - // xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - // xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - // xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - // xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - // xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - // xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - // xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - // xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - // formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - // xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - // formatTime = userRecord.DataDate - // xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } indexTotal += PAGE_SIZE @@ -347,30 +323,6 @@ func QueryUserStaticDataPage(ctx *context.Context) { for i, userRecord := range re { row := i + 2 writeExcel(row, xlsx, sheetName, userRecord) - // xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - // xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - // xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - // xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) - // xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - // xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - // xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - // xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - // xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - // xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - // xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - // xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - // xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - // xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - // xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - // xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - // xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - // xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - // xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - // xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - // formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - // xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - // formatTime = userRecord.DataDate - // xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } //设置默认打开的表单 From 74fe35720148914b79364ac5ecc93c838114ce12 Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Sun, 24 Apr 2022 11:15:45 +0800 Subject: [PATCH 22/63] fix issue --- options/locale/locale_en-US.ini | 1 + options/locale/locale_zh-CN.ini | 1 + templates/admin/dataset/list.tmpl | 10 +++- templates/admin/dataset/search.tmpl | 27 ++++++----- templates/explore/datasets.tmpl | 15 +++--- web_src/js/index.js | 74 ++++++++++++++++++++++++----- 6 files changed, 96 insertions(+), 32 deletions(-) diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index f53bea7b1..8b78d369a 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -2512,6 +2512,7 @@ datasets.name=name datasets.private=Private datasets.recommend=Set recommend datasets.unrecommend=Set unrecommend +datasets.only_recommend = Only show platform recommendations cloudbrain.all_task_types=All Task Types cloudbrain.all_computing_resources=All Computing Resources diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index c82347d5e..ee57fd613 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -2522,6 +2522,7 @@ datasets.name=名称 datasets.private=私有 datasets.recommend=设为推荐 datasets.unrecommend=取消推荐 +datasets.only_recommend = 仅显示平台推荐 cloudbrain.all_task_types=全部任务类型 cloudbrain.all_computing_resources=全部计算资源 diff --git a/templates/admin/dataset/list.tmpl b/templates/admin/dataset/list.tmpl index ca1ca27af..9e4e72b68 100644 --- a/templates/admin/dataset/list.tmpl +++ b/templates/admin/dataset/list.tmpl @@ -4,7 +4,6 @@
{{template "base/alert" .}}

{{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}}) @@ -12,6 +11,15 @@
{{template "admin/dataset/search" .}}
+
+
+
+ + +
+
+
+
diff --git a/templates/admin/dataset/search.tmpl b/templates/admin/dataset/search.tmpl index 5d01d836c..749065401 100644 --- a/templates/admin/dataset/search.tmpl +++ b/templates/admin/dataset/search.tmpl @@ -6,18 +6,18 @@ @@ -27,3 +27,6 @@ + \ No newline at end of file diff --git a/templates/explore/datasets.tmpl b/templates/explore/datasets.tmpl index 94a843d74..7b2577900 100644 --- a/templates/explore/datasets.tmpl +++ b/templates/explore/datasets.tmpl @@ -121,12 +121,12 @@ @@ -140,6 +140,7 @@ {{end}}
+ 仅显示平台推荐
{{range $k, $v :=.Datasets}}
@@ -162,7 +163,7 @@ {{end}}
-
{{.Title}}{{if .Recommend}}{{end}}
+
{{.Title}}{{if .Recommend}}{{end}}
{{if or (.Category) (.Task) (.License)}}
{{if .Category}} diff --git a/web_src/js/index.js b/web_src/js/index.js index c558e7624..ecd637ad0 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -3700,6 +3700,37 @@ function initVueEditAbout() { } function initVueDataset() { + if($('#dataset_check').length){ + if(location.search.indexOf('recommend=true')!==-1){ + $('#dataset_check').checkbox('set checked') + }else{ + $('#dataset_check').checkbox('set unchecked') + } + $('#dataset_check').checkbox({ + onChecked: function() { + if(location.search){ + const params = new URLSearchParams(location.search) + if(params.has('recommend')){ + params.delete('recommend') + location.href = AppSubUrl + location.pathname + '?' + params.toString() + '&recommend=true' + }else{ + location.href = `${window.config.AppSubUrl}/admin/datasets${location.search}&recommend=true` + } + }else{ + location.href = `${window.config.AppSubUrl}/admin/datasets?recommend=true` + } + }, + onUnchecked: function() { + if(location.search=='?recommend=true'){ + location.href = AppSubUrl + location.pathname + }else{ + const params = new URLSearchParams(location.search) + params.delete('recommend') + location.href = AppSubUrl + location.pathname + '?' + params.toString() + } + }, + }) + } $('.set_dataset').on('click', function(){ const $this = $(this); let link = $this.data('url') @@ -3783,24 +3814,13 @@ function initVueDataset() { if(document.getElementById('dataset-file-desc')){ dataset_file_desc = document.getElementById('dataset-file-desc').value } - - // getEditInit(){ - // if($('#dataset-edit-value')){ - // $this = $('#dataset-edit-value') - // this.ruleForm.title = $this.data('edit-title') || '' - // this.ruleForm.description = $this.data('edit-description') || '' - // this.ruleForm.category = $this.data('edit-category') || '' - // this.ruleForm.task = $this.data('edit-task') || '' - // this.ruleForm.license = $this.data('edit-license') || '' - // this.ruleForm.id = $this.data('edit-id')|| '' - // } - // }, new Vue({ delimiters: ['${', '}'], el, data: { suburl: AppSubUrl, url:'', + checked:false, type:0, desc:'', descfile:'', @@ -3897,6 +3917,12 @@ function initVueDataset() { this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) } + const params = new URLSearchParams(location.search) + if (params.has('recommend') && params.get('recommend')=='true'){ + this.checked = true + }else{ + this.checked = false + } }, created(){ if(document.getElementById('postPath')){ @@ -3937,6 +3963,30 @@ function initVueDataset() { } }, + handleCheckedChange(val){ + if(val){ + if(location.search){ + const params = new URLSearchParams(location.search) + if(params.has('recommend')){ + params.delete('recommend') + let search = params.toString() + location.href = `${AppSubUrl}/explore/datasets?${search}&recommend=${val}` + }else{ + location.href = `${AppSubUrl}/explore/datasets${location.search}&recommend=${val}` + } + }else{ + location.href = `${AppSubUrl}/explore/datasets?recommend=${val}` + } + }else{ + if(location.search=='?recommend=true'){ + location.href = AppSubUrl + location.pathname + }else{ + const params = new URLSearchParams(location.search) + params.delete('recommend') + location.href = AppSubUrl + location.pathname + '?' + params.toString() + } + } + }, createDataset(formName){ let _this = this this.$refs[formName].validate((valid)=>{ From 8d5b6b93d1fc41650226dbc4c58c7dbe474e03d9 Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Sun, 24 Apr 2022 11:16:29 +0800 Subject: [PATCH 23/63] fix issue --- templates/admin/dataset/search.tmpl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/templates/admin/dataset/search.tmpl b/templates/admin/dataset/search.tmpl index 749065401..6b42fb6cf 100644 --- a/templates/admin/dataset/search.tmpl +++ b/templates/admin/dataset/search.tmpl @@ -26,7 +26,4 @@
- - \ No newline at end of file + \ No newline at end of file From c16f77e6a337994e32e593c1567e2bbd5c12c0d8 Mon Sep 17 00:00:00 2001 From: wangjr Date: Sun, 24 Apr 2022 11:19:23 +0800 Subject: [PATCH 24/63] =?UTF-8?q?=E5=88=86=E6=94=AF=E5=8F=AF=E7=BC=96?= =?UTF-8?q?=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../repo/issue/branch_selector_field.tmpl | 4 +- .../repo/issue/view_content/sidebar.tmpl | 14 +++-- web_src/js/index.js | 60 +++++++++++++------ 3 files changed, 54 insertions(+), 24 deletions(-) diff --git a/templates/repo/issue/branch_selector_field.tmpl b/templates/repo/issue/branch_selector_field.tmpl index 4f80c13e5..265c14765 100644 --- a/templates/repo/issue/branch_selector_field.tmpl +++ b/templates/repo/issue/branch_selector_field.tmpl @@ -14,12 +14,12 @@
- + {{svg "octicon-git-branch" 16}} {{.i18n.Tr "repo.branches"}} - + {{.i18n.Tr "repo.tags"}} diff --git a/templates/repo/issue/view_content/sidebar.tmpl b/templates/repo/issue/view_content/sidebar.tmpl index c954ce0d6..a08656702 100644 --- a/templates/repo/issue/view_content/sidebar.tmpl +++ b/templates/repo/issue/view_content/sidebar.tmpl @@ -8,21 +8,24 @@ {{if .Issue.Ref}}{{$.RefEndName}}{{else}}{{.i18n.Tr "repo.issues.no_ref"}}{{end}}
-
{{scope.row.numStars}}
- - - + 设为推荐 复制地址
diff --git a/web_src/js/features/images.js b/web_src/js/features/images.js index a6631f69c..0cafb3901 100644 --- a/web_src/js/features/images.js +++ b/web_src/js/features/images.js @@ -165,12 +165,13 @@ export default async function initImage(){ return false }) $('#cancel_submit_image').click(()=>{ + console.log(pageform) if(link.includes('cloudbrain')){ let repoLink = link.split('cloudbrain')[0] location.href = `${window.config.AppSubUrl}${repoLink}debugjob?debugListType=all` }else if(pageform=='imageSquare'){ location.href = `${window.config.AppSubUrl}/explore/images?type=myimage` - }else if(pageform=='imageAdmin'){ + }else if(pageform){ location.href = `${window.config.AppSubUrl}/admin/images` } }) From adb78aa4773d2ba3ee643ad44f8e705c3bee0332 Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Sun, 24 Apr 2022 14:28:05 +0800 Subject: [PATCH 26/63] #1860 fix bug --- models/helper_environment.go | 22 +++++++++++++--------- modules/repository/hooks.go | 2 +- modules/setting/repository.go | 3 +++ modules/ssh/ssh.go | 26 ++++++++++++++++++++++++++ routers/repo/http.go | 4 ++++ 5 files changed, 47 insertions(+), 10 deletions(-) diff --git a/models/helper_environment.go b/models/helper_environment.go index bc9d4c8fc..7248f6f61 100644 --- a/models/helper_environment.go +++ b/models/helper_environment.go @@ -12,15 +12,19 @@ import ( // env keys for git hooks need const ( - EnvRepoName = "GITEA_REPO_NAME" - EnvRepoUsername = "GITEA_REPO_USER_NAME" - EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" - EnvPusherName = "GITEA_PUSHER_NAME" - EnvPusherEmail = "GITEA_PUSHER_EMAIL" - EnvPusherID = "GITEA_PUSHER_ID" - EnvKeyID = "GITEA_KEY_ID" - EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" - EnvIsInternal = "GITEA_INTERNAL_PUSH" + EnvRepoName = "GITEA_REPO_NAME" + EnvRepoUsername = "GITEA_REPO_USER_NAME" + EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" + EnvPusherName = "GITEA_PUSHER_NAME" + EnvPusherEmail = "GITEA_PUSHER_EMAIL" + EnvPusherID = "GITEA_PUSHER_ID" + EnvKeyID = "GITEA_KEY_ID" + EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" + EnvIsInternal = "GITEA_INTERNAL_PUSH" + EnvRepoSize = "REPO_CURRENT_SIZE" + EnvRepoMaxFileSize = "REPO_MAX_FILE_SIZE" + EnvRepoMaxSize = "REPO_MAX_SIZE" + EnvPushSizeCheckFlag = "PUSH_SIZE_CHECK_FLAG" ) // InternalPushingEnvironment returns an os environment to switch off hooks on push diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index 2b29b7fe0..7bcc5b550 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -36,7 +36,7 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []str fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } sizeLimitTpls = []string{ - fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=0\n\n\nstatus=\"$EXIT_SUCCESS\"\n\nfunction readINI()\n{\n FILENAME='%s'; SECTION=$1; KEY=$2\n RESULT=`awk -F '=' '/\\['$SECTION'\\]/{a=1}a==1&&$1~/'$KEY'/{print $2;exit}' $FILENAME`\n echo $RESULT\n}\n\n# skip this hook entirely if shell check is not open\ncheck_flag=$(readINI 'repository.upload' 'SHELL_FLAG')\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get the maximum filesize configured for this repository or the default\nfunction get_file_maxsize() {\n local value;\n value=$(readINI 'repository.upload' 'FILE_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE_MB\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"$(get_file_maxsize)\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\nfunction get_repo_maxsize() {\n local value;\n value=$(readINI 'repository' 'REPO_MAX_SIZE')\n if [[ \"$?\" != $EXIT_SUCCESS ]] || [[ -z \"$value\" ]]; then\n echo \"$DEFAULT_FILE_MAXSIZE\"\n return \"$EXIT_SUCCESS\"\n fi\n echo \"$value\"\n return \"$EXIT_SUCCESS\"\n}\n\n\nsizelimit_mb=\"$(get_repo_maxsize)\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nstr=`du -sb .`\narr=($str)\nreposize_b=${arr[0]}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), fmt.Sprintf(""), fmt.Sprintf(""), } diff --git a/modules/setting/repository.go b/modules/setting/repository.go index dceb48f16..ee4f8b379 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -56,6 +56,7 @@ var ( FileMaxSize int64 MaxFiles int TotalMaxSize int64 + ShellFlag int } `ini:"-"` // Repository local settings @@ -125,6 +126,7 @@ var ( FileMaxSize int64 MaxFiles int TotalMaxSize int64 + ShellFlag int }{ Enabled: true, TempPath: "data/tmp/uploads", @@ -132,6 +134,7 @@ var ( FileMaxSize: 30, MaxFiles: 10, TotalMaxSize: 1024, + ShellFlag: 0, }, // Repository local settings diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go index e7a694683..ac590a057 100644 --- a/modules/ssh/ssh.go +++ b/modules/ssh/ssh.go @@ -69,8 +69,17 @@ func sessionHandler(session ssh.Session) { os.Environ(), "SSH_ORIGINAL_COMMAND="+command, "SKIP_MINWINSVC=1", + models.EnvRepoMaxFileSize+"="+fmt.Sprint(setting.Repository.Upload.FileMaxSize), + models.EnvRepoMaxSize+"="+fmt.Sprint(setting.Repository.RepoMaxSize), + models.EnvPushSizeCheckFlag+"="+fmt.Sprint(setting.Repository.Upload.ShellFlag), ) + if strings.HasPrefix(command, "git-receive-pack") { + repo := getRepoFromCommandStr(command) + if repo != nil { + cmd.Env = append(cmd.Env, models.EnvRepoSize+"="+fmt.Sprint(repo.Size)) + } + } stdout, err := cmd.StdoutPipe() if err != nil { log.Error("SSH: StdoutPipe: %v", err) @@ -131,6 +140,23 @@ func sessionHandler(session ssh.Session) { } } +func getRepoFromCommandStr(command string) *models.Repository { + repoPath := strings.TrimPrefix(command, "git-receive-pack '") + repoPath = strings.TrimSuffix(repoPath, ".git'") + if repoPath != "" { + nameArray := strings.Split(repoPath, "/") + if len(nameArray) >= 2 { + ownerName := nameArray[0] + repoName := nameArray[1] + if repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName); err == nil { + return repo + } + } + } + return nil + +} + func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool { if ctx.User() != setting.SSH.BuiltinServerUser { return false diff --git a/routers/repo/http.go b/routers/repo/http.go index 87406a2c3..d96fea82e 100644 --- a/routers/repo/http.go +++ b/routers/repo/http.go @@ -256,6 +256,10 @@ func HTTP(ctx *context.Context) { models.EnvPusherName + "=" + authUser.Name, models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), models.EnvIsDeployKey + "=false", + models.EnvRepoSize + "=" + fmt.Sprint(repo.Size), + models.EnvRepoMaxFileSize + "=" + fmt.Sprint(setting.Repository.Upload.FileMaxSize), + models.EnvRepoMaxSize + "=" + fmt.Sprint(setting.Repository.RepoMaxSize), + models.EnvPushSizeCheckFlag + "=" + fmt.Sprint(setting.Repository.Upload.ShellFlag), } if !authUser.KeepEmailPrivate { environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) From dff00b1796f9260e1698f8e6747893af6093ed1b Mon Sep 17 00:00:00 2001 From: wangjr Date: Sun, 24 Apr 2022 14:47:04 +0800 Subject: [PATCH 27/63] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- web_src/js/index.js | 46 +++++++++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/web_src/js/index.js b/web_src/js/index.js index edde96054..679409b90 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -231,7 +231,7 @@ function initLabelEdit() { }); } -function updateIssuesMeta(url, action, issueIds, elementId, isAdd,ref_b) { +function updateIssuesMeta_new(url, action, issueIds, elementId, isAdd,ref_b) { return new Promise((resolve) => { $.ajax({ type: 'POST', @@ -249,6 +249,24 @@ function updateIssuesMeta(url, action, issueIds, elementId, isAdd,ref_b) { }); } +function updateIssuesMeta(url, action, issueIds, elementId) { + return new Promise((resolve) => { + $.ajax({ + type: 'POST', + url, + data: { + _csrf: csrf, + action, + issue_ids: issueIds, + id: elementId, + is_add: isAdd, + }, + success: resolve + }); + }); +} + + function initRepoStatusChecker() { const migrating = $('#repo_migrating'); $('#repo_migrating_failed').hide(); @@ -495,7 +513,6 @@ function initCommentForm() { label['issue-id'], elementId, label['is-checked'], - '' ); promises.push(promise); }); @@ -531,7 +548,7 @@ function initCommentForm() { } console.log("$(this)",$(this)) console.log("$listMenu",$listMenu) - const isRef = ''; + //const isRef = ''; // if ($listMenu.data('update-url').includes("ref")){ // isRef = $(this).data('data-name'); // } @@ -541,7 +558,6 @@ function initCommentForm() { $listMenu.data('issue-id'), $(this).data('id'), $(this).data('is-checked'), - '' ); $listMenu.data('action', 'update'); // Update to reload the page when we updated items return false; @@ -612,8 +628,8 @@ function initCommentForm() { 'clear', $listMenu.data('issue-id'), '', - '', - '', + '' + ).then(reload); } @@ -670,10 +686,9 @@ function initCommentForm() { let ref = '' if (select_id=='.select-branch'){ ref = $(this).data('name'); - console.log("ref:",ref) } - console.log("ref:",ref) - updateIssuesMeta( + + updateIssuesMeta_new( $menu.data('update-url'), '', $menu.data('issue-id'), @@ -719,8 +734,7 @@ function initCommentForm() { '', $menu.data('issue-id'), $(this).data('id'), - $(this).data('is-checked'), - '' + $(this).data('is-checked') ).then(reload); } @@ -733,7 +747,7 @@ function initCommentForm() { // Milestone and assignee selectItem('.select-milestone', '#milestone_id'); selectItem('.select-assignee', '#assignee_id'); - selectItem('.select-branch', ''); + //selectItem('.select-branch', ''); } function initInstall() { @@ -836,9 +850,9 @@ function initIssueComments() { const issueId = $(this).data('issue-id'); const id = $(this).data('id'); const isChecked = $(this).data('is-checked'); - const ref = $(this).data('name'); + //const ref = $(this).data('name'); event.preventDefault(); - updateIssuesMeta(url, '', issueId, id, isChecked,ref).then(reload); + updateIssuesMeta(url, '', issueId, id, isChecked).then(reload); }); $(document).on('click', (event) => { @@ -2926,12 +2940,12 @@ $(document).ready(async () => { .get() .join(); console.log("this:",this) - + const {url} = this.dataset; if (elementId === '0' && url.substr(-9) === '/assignee') { elementId = ''; action = 'clear'; } - updateIssuesMeta(url, action, issueIDs, elementId, '','').then(() => { + updateIssuesMeta(url, action, issueIDs, elementId, '').then(() => { // NOTICE: This reset of checkbox state targets Firefox caching behaviour, as the checkboxes stay checked after reload if (action === 'close' || action === 'open') { // uncheck all checkboxes From 20c24e8be5cf1ccf5ec8e3f9afc15fd8635b58d2 Mon Sep 17 00:00:00 2001 From: wangjr Date: Sun, 24 Apr 2022 15:01:52 +0800 Subject: [PATCH 28/63] =?UTF-8?q?=E5=8F=96=E6=B6=88=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- web_src/js/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web_src/js/index.js b/web_src/js/index.js index 679409b90..99fa6fbbd 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -747,7 +747,7 @@ function initCommentForm() { // Milestone and assignee selectItem('.select-milestone', '#milestone_id'); selectItem('.select-assignee', '#assignee_id'); - //selectItem('.select-branch', ''); + selectItem('.select-branch', ''); } function initInstall() { From e3be711d0720101d0c2c14eb4b95df2fc3e7ec5b Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Sun, 24 Apr 2022 15:08:08 +0800 Subject: [PATCH 29/63] fix issue --- templates/repo/cloudbrain/show.tmpl | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/templates/repo/cloudbrain/show.tmpl b/templates/repo/cloudbrain/show.tmpl index 5fabaa937..932a588c3 100755 --- a/templates/repo/cloudbrain/show.tmpl +++ b/templates/repo/cloudbrain/show.tmpl @@ -56,7 +56,7 @@ margin:10px 5px ; } .tab_2_content { - min-height: 380px; + min-height: 420px; margin-left: 10px; } .ac-grid { @@ -289,6 +289,16 @@ td, th {
+
+ + +
+ {{$.i18n.Tr "repo.modelarts.code_version"}} + +
+ {{.BranchName}} +
+
{{$.i18n.Tr "cloudbrain.gpu_type"}} From 5002a893121aa194344f991c57454e7b19b8fb67 Mon Sep 17 00:00:00 2001 From: ychao_1983 Date: Sun, 24 Apr 2022 16:43:49 +0800 Subject: [PATCH 30/63] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- models/dataset.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/models/dataset.go b/models/dataset.go index e841261c7..d3a142742 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -155,10 +155,6 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.RepoID > 0 { cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) } - if opts.RecommendOnly { - cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) - } - if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) cond = cond.And(builder.Eq{"attachment.is_private": false}) @@ -197,6 +193,10 @@ func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.C cond = cond.And(builder.Eq{"dataset.license": opts.License}) } + if opts.RecommendOnly { + cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) + } + return cond } From 00b57718d11ec82df5508dc7a8a343d877633a5e Mon Sep 17 00:00:00 2001 From: wangjr Date: Sun, 24 Apr 2022 16:53:41 +0800 Subject: [PATCH 31/63] =?UTF-8?q?=E4=BF=AE=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- web_src/js/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web_src/js/index.js b/web_src/js/index.js index 99fa6fbbd..25185b4bf 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -249,7 +249,7 @@ function updateIssuesMeta_new(url, action, issueIds, elementId, isAdd,ref_b) { }); } -function updateIssuesMeta(url, action, issueIds, elementId) { +function updateIssuesMeta(url, action, issueIds, elementId,isAdd) { return new Promise((resolve) => { $.ajax({ type: 'POST', From 5d9f9b3e571d2ba43733e47316b56d95cd56f50e Mon Sep 17 00:00:00 2001 From: wangjr Date: Sun, 24 Apr 2022 17:26:39 +0800 Subject: [PATCH 32/63] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- web_src/js/index.js | 36 ++++++------------------------------ 1 file changed, 6 insertions(+), 30 deletions(-) diff --git a/web_src/js/index.js b/web_src/js/index.js index 25185b4bf..09652f57b 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -231,24 +231,6 @@ function initLabelEdit() { }); } -function updateIssuesMeta_new(url, action, issueIds, elementId, isAdd,ref_b) { - return new Promise((resolve) => { - $.ajax({ - type: 'POST', - url, - data: { - _csrf: csrf, - action, - issue_ids: issueIds, - id: elementId, - is_add: isAdd, - ref: ref_b, - }, - success: resolve - }); - }); -} - function updateIssuesMeta(url, action, issueIds, elementId,isAdd) { return new Promise((resolve) => { $.ajax({ @@ -546,12 +528,7 @@ function initCommentForm() { .removeClass('invisible'); $(this).data('is-checked', 'add'); } - console.log("$(this)",$(this)) - console.log("$listMenu",$listMenu) - //const isRef = ''; - // if ($listMenu.data('update-url').includes("ref")){ - // isRef = $(this).data('data-name'); - // } + updateIssuesMeta( $listMenu.data('update-url'), '', @@ -683,18 +660,17 @@ function initCommentForm() { $(this).addClass('selected active'); if (hasUpdateAction) { - let ref = '' - if (select_id=='.select-branch'){ - ref = $(this).data('name'); - } + //let ref = '' + //if (select_id=='.select-branch'){ + // ref = $(this).data('name'); + // } - updateIssuesMeta_new( + updateIssuesMeta( $menu.data('update-url'), '', $menu.data('issue-id'), $(this).data('id'), $(this).data('is-checked'), - ref ).then(reload); } switch (input_id) { From 8cb22839149a4e8f2fe7897a0f04548f47aed0ba Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Sun, 24 Apr 2022 17:38:50 +0800 Subject: [PATCH 33/63] #1821 update --- routers/repo/issue.go | 10 ++++++++-- routers/routes/routes.go | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/routers/repo/issue.go b/routers/repo/issue.go index 875c02024..d28936594 100755 --- a/routers/repo/issue.go +++ b/routers/repo/issue.go @@ -1304,17 +1304,23 @@ func UpdateIssueContent(ctx *context.Context) { // UpdateIssueRef change issue's code reference func UpdateIssueRef(ctx *context.Context) { - issue := GetActionIssue(ctx) + issues := getActionIssues(ctx) if ctx.Written() { return } + issue := issues[0] + if issue == nil { + log.Error("UpdateIssueRef param error ") + return + } + if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { ctx.Error(403) return } - ref := ctx.Query("ref") + ref := ctx.Query("id") if err := issue_service.ChangeRef(issue, ctx.User, ref); err != nil { ctx.ServerError("ChangeRef", err) return diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 7af42867c..bb57637d7 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -868,7 +868,6 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/:index", func() { m.Post("/title", repo.UpdateIssueTitle) m.Post("/content", repo.UpdateIssueContent) - m.Post("/ref", repo.UpdateIssueRef) m.Post("/watch", repo.IssueWatch) m.Group("/dependency", func() { m.Post("/add", repo.AddDependency) @@ -891,6 +890,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/labels", reqRepoIssuesOrPullsWriter, repo.UpdateIssueLabel) m.Post("/milestone", reqRepoIssuesOrPullsWriter, repo.UpdateIssueMilestone) m.Post("/assignee", reqRepoIssuesOrPullsWriter, repo.UpdateIssueAssignee) + m.Post("/ref", reqRepoIssuesOrPullsWriter, repo.UpdateIssueRef) m.Post("/request_review", reqRepoIssuesOrPullsReader, repo.UpdatePullReviewRequest) m.Post("/status", reqRepoIssuesOrPullsWriter, repo.UpdateIssueStatus) m.Post("/resolve_conversation", reqRepoIssuesOrPullsReader, repo.UpdateResolveConversation) From 82c591c4a8a1e60dbe9dbde0ef0e47d13151d74c Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Sun, 24 Apr 2022 18:02:05 +0800 Subject: [PATCH 34/63] #1982 fix bug --- routers/repo/modelarts.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index b713f385f..5e9217b4b 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -51,6 +51,9 @@ const ( func DebugJobIndex(ctx *context.Context) { listType := ctx.Query("debugListType") + if listType == "" { + listType = models.AllResource + } ctx.Data["ListType"] = listType MustEnableCloudbrain(ctx) repo := ctx.Repo.Repository From fac91851f95edc0f1617a7c434b253864e8289d5 Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Sun, 24 Apr 2022 18:04:07 +0800 Subject: [PATCH 35/63] fix issue --- .../cloudbrain/{images.html => images.tmpl} | 0 templates/repo/modelarts/trainjob/show.tmpl | 30 ++++++++++++------- web_src/js/components/images/adminImages.vue | 2 +- 3 files changed, 21 insertions(+), 11 deletions(-) rename templates/admin/cloudbrain/{images.html => images.tmpl} (100%) diff --git a/templates/admin/cloudbrain/images.html b/templates/admin/cloudbrain/images.tmpl similarity index 100% rename from templates/admin/cloudbrain/images.html rename to templates/admin/cloudbrain/images.tmpl diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index 92f98939e..263abb05c 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -844,7 +844,8 @@ td, th { let scrollHeight = container.scrollHeight let clientHeight = container.clientHeight let scrollLeft = container.scrollLeft - if((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight) && (scrollLeft===0)){ + console.log(parseInt(scrollTop),clientHeight,scrollHeight) + if(((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0){ let end_line = $(`#log${version_name} input[name=end_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${end_line}&lines=50&order=desc`, (data) => { if (data.Lines == 0){ @@ -868,7 +869,7 @@ td, th { }); } - if(scrollTop == 1 && scrollLeft==0){ + if([1,2,3,4,5].includes(scrollTop) && scrollLeft==0){ let start_line = $(`#log${version_name} input[name=start_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { @@ -897,7 +898,6 @@ td, th { const dist = Math.ceil(needScrollTop / 10); _currentY += dist; //移动一个十分之一 - console.log(_currentY, targetY) dom.scrollTo(currentX || 0, _currentY); // 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果 if (needScrollTop > 10 || needScrollTop < -10) { @@ -909,24 +909,34 @@ td, th { } $('.log_top').click(function(){ - let logContentDom = document.querySelector('.log') - if(!logContentDom) - return - let version_name = $('.log_top').data('version') + // let logContentDom = document.querySelector('.log') + // if(!logContentDom) + // return + // let version_name = $('.log_top').data('version') + let version_name = $(this).data('version') + let logContentDom = document.querySelector(`#log${version_name}`) + $(`#log_file${version_name}`).siblings('pre').remove() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=asc`, (data) => { $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 $(`#log${version_name} input[name=start_line]`).val(data.StartLine) $(`#log${version_name}`).prepend('
' + data.Content)
+            $(`.message${version_name} #header`).text('您已翻阅至日志顶部')
+            $(`.message${version_name}`).css('display', 'block')
+            setTimeout(function(){
+                $(`.message${version_name}`).css('display', 'none')
+            }, 1000)
             scrollAnimation(logContentDom, logContentDom.scrollTop, 0);
         })
 
     })
-    $('.log_bottom').click(function(){
+    $('.log_bottom').click(function(e){
         
-        let logContentDom = document.querySelector('.log')
-        let version_name = $('.log_bottom').data('version')
+        console.log($(this).data('version'))
+        let version_name = $(this).data('version')
+        let logContentDom = document.querySelector(`#log${version_name}`)
+        console.log(version_name)
         console.log($(`#log${version_name}`).siblings('pre'))
         $(`#log_file${version_name}`).siblings('pre').remove()
         $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => {
diff --git a/web_src/js/components/images/adminImages.vue b/web_src/js/components/images/adminImages.vue
index 3b372114c..417d4e530 100644
--- a/web_src/js/components/images/adminImages.vue
+++ b/web_src/js/components/images/adminImages.vue
@@ -22,7 +22,7 @@
                         
                             全部
                             公开
-                            私有
+                            私有
                         
                          
             

From 016fcf5f1e5a194a44387a510b67beda8485a771 Mon Sep 17 00:00:00 2001
From: zhoupzh 
Date: Sun, 24 Apr 2022 18:06:47 +0800
Subject: [PATCH 36/63] fix issue

---
 templates/repo/modelarts/trainjob/show.tmpl | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl
index 263abb05c..c0a37e157 100755
--- a/templates/repo/modelarts/trainjob/show.tmpl
+++ b/templates/repo/modelarts/trainjob/show.tmpl
@@ -844,7 +844,6 @@ td, th {
         let scrollHeight = container.scrollHeight
         let clientHeight = container.clientHeight
         let scrollLeft = container.scrollLeft
-        console.log(parseInt(scrollTop),clientHeight,scrollHeight)
         if(((parseInt(scrollTop)  + clientHeight == scrollHeight || parseInt(scrollTop)  + clientHeight +1 == scrollHeight || parseInt(scrollTop)  + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0){
             let end_line = $(`#log${version_name} input[name=end_line]`).val()
             $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${end_line}&lines=50&order=desc`, (data) => {
@@ -873,7 +872,6 @@ td, th {
             
             let start_line = $(`#log${version_name} input[name=start_line]`).val()
             $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => {
-                console.log("11111")
                 if (data.Lines == 0){
                     $(`.message${version_name} #header`).text('您已翻阅至日志顶部')
                     $(`.message${version_name}`).css('display', 'block')
@@ -932,12 +930,8 @@ td, th {
 
     })
     $('.log_bottom').click(function(e){
-        
-        console.log($(this).data('version'))
         let version_name = $(this).data('version')
         let logContentDom = document.querySelector(`#log${version_name}`)
-        console.log(version_name)
-        console.log($(`#log${version_name}`).siblings('pre'))
         $(`#log_file${version_name}`).siblings('pre').remove()
         $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => {
             

From 551104e47f705611f64cdf7afedf73c022607975 Mon Sep 17 00:00:00 2001
From: chenyifan01 
Date: Mon, 25 Apr 2022 09:24:30 +0800
Subject: [PATCH 37/63] #1821 fix bug

---
 models/issue_comment.go | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/models/issue_comment.go b/models/issue_comment.go
index ec9441bde..8197eba85 100755
--- a/models/issue_comment.go
+++ b/models/issue_comment.go
@@ -48,8 +48,6 @@ const (
 	CommentTypePullRef
 	// Labels changed
 	CommentTypeLabel
-	// Ref changed
-	CommentTypeRef
 	// Milestone changed
 	CommentTypeMilestone
 	// Assignees changed
@@ -92,6 +90,8 @@ const (
 	CommentTypeReviewRequest
 	// merge pull request
 	CommentTypeMergePull
+	// Ref changed
+	CommentTypeRef
 )
 
 // CommentTag defines comment tag type

From 4e0b06a512e5dd3ca3654715809d6dd99fc20ed3 Mon Sep 17 00:00:00 2001
From: lewis <747342561@qq.com>
Date: Mon, 25 Apr 2022 09:54:01 +0800
Subject: [PATCH 38/63] no duration limit for train job

---
 routers/repo/cloudbrain.go | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index 4ef205af2..41826d0c5 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -40,13 +40,11 @@ const (
 	tplCloudBrainBenchmarkNew   base.TplName = "repo/cloudbrain/benchmark/new"
 	tplCloudBrainBenchmarkShow  base.TplName = "repo/cloudbrain/benchmark/show"
 
-	tplCloudBrainImageSubmit    base.TplName = "repo/cloudbrain/image/submit"
-	tplCloudBrainImageEdit      base.TplName = "repo/cloudbrain/image/edit"
-
+	tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit"
+	tplCloudBrainImageEdit   base.TplName = "repo/cloudbrain/image/edit"
 
 	tplCloudBrainTrainJobNew  base.TplName = "repo/cloudbrain/trainjob/new"
 	tplCloudBrainTrainJobShow base.TplName = "repo/cloudbrain/trainjob/show"
-
 )
 
 var (
@@ -1402,11 +1400,11 @@ func SyncCloudbrainStatus() {
 						maxDuration = setting.MaxDuration
 					}
 
-					if task.Duration >= maxDuration {
-						log.Info("begin to stop job(%s), because of the duration", task.JobName)
+					if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) {
+						log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
 						err = cloudbrain.StopJob(task.JobID)
 						if err != nil {
-							log.Error("StopJob(%s) failed:%v", task.JobName, err)
+							log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
 							continue
 						}
 						task.Status = string(models.JobStopped)
@@ -1416,7 +1414,8 @@ func SyncCloudbrainStatus() {
 						task.ComputeAndSetDuration()
 						err = models.UpdateJob(task)
 						if err != nil {
-							log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+							log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
+							continue
 						}
 					}
 				}

From 19d616a03a9ee4d278e2ce8291c86825a00a98a6 Mon Sep 17 00:00:00 2001
From: liuzx 
Date: Mon, 25 Apr 2022 10:34:32 +0800
Subject: [PATCH 39/63] fix-bug

---
 routers/repo/cloudbrain.go | 1 +
 1 file changed, 1 insertion(+)

diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index 4ef205af2..ea761cb83 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -601,6 +601,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo
 	ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath
 	ctx.Data["model_path"] = cloudbrain.ModelMountPath
 	ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task)
+	ctx.Data["branchName"] = task.BranchName
 	ctx.HTML(200, tpName)
 }
 

From bb048253203907594f1c2a755498e9ad0e5cb18d Mon Sep 17 00:00:00 2001
From: chenyifan01 
Date: Mon, 25 Apr 2022 10:42:17 +0800
Subject: [PATCH 40/63] #1821 add function for ref  handle

---
 modules/templates/helper.go | 31 +++++++++++++++++++++++++++++--
 1 file changed, 29 insertions(+), 2 deletions(-)

diff --git a/modules/templates/helper.go b/modules/templates/helper.go
index 006a1e046..6f0690509 100755
--- a/modules/templates/helper.go
+++ b/modules/templates/helper.go
@@ -40,6 +40,14 @@ import (
 	"github.com/editorconfig/editorconfig-core-go/v2"
 )
 
+const (
+	REF_HEADS_PREFIX = "refs/heads/"
+	REF_TAGS_PREFIX  = "refs/tags/"
+	REF_TYPE_BRANCH  = "branch"
+	REF_TYPE_TAG     = "tag"
+	REF_TYPE_PATTERN = "(refs/heads/|refs/tags/)"
+)
+
 // Used from static.go && dynamic.go
 var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}[\s]*$`)
 
@@ -427,6 +435,8 @@ func NewTextFuncMap() []texttmpl.FuncMap {
 			}
 			return float32(n) * 100 / float32(sum)
 		},
+		"GetRefType": GetRefType,
+		"GetRefName": GetRefName,
 	}}
 }
 
@@ -444,10 +454,12 @@ func SafeJS(raw string) template.JS {
 func Str2html(raw string) template.HTML {
 	return template.HTML(markup.Sanitize(raw))
 }
+
 //
-func subOne(length int)int{
-	return length-1
+func subOne(length int) int {
+	return length - 1
 }
+
 // Escape escapes a HTML string
 func Escape(raw string) string {
 	return html.EscapeString(raw)
@@ -758,3 +770,18 @@ func licenses() []string {
 func tasks() []string {
 	return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"}
 }
+
+func GetRefType(ref string) string {
+	if strings.HasPrefix(ref, REF_HEADS_PREFIX) {
+		return REF_TYPE_BRANCH
+	}
+	if strings.HasPrefix(ref, REF_TAGS_PREFIX) {
+		return REF_TYPE_TAG
+	}
+	return ""
+}
+
+func GetRefName(ref string) string {
+	reg := regexp.MustCompile(REF_TYPE_PATTERN)
+	return reg.ReplaceAllString(ref, "")
+}

From 75863d0eb22b6bd68a327e389db8311b27c57799 Mon Sep 17 00:00:00 2001
From: wangjr 
Date: Mon, 25 Apr 2022 11:01:55 +0800
Subject: [PATCH 41/63] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BF=AE=E6=94=B9?=
 =?UTF-8?q?=E5=90=8E=E6=8F=90=E7=A4=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 options/locale/locale_en-US.ini               |  8 +++++++
 options/locale/locale_zh-CN.ini               |  8 +++++++
 .../repo/issue/view_content/comments.tmpl     | 23 +++++++++++++++++++
 .../repo/issue/view_content/sidebar.tmpl      |  2 +-
 4 files changed, 40 insertions(+), 1 deletion(-)

diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 14d4e19d1..9e92d6084 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -1324,6 +1324,7 @@ issues.new.labels = Labels
 issues.new.add_labels_title = Apply labels
 issues.new.no_label = No Label
 issues.new.clear_labels = Clear labels
+issues.new.clear_branch_tag =  Clear branch or tag
 issues.new.no_items = No items
 issues.new.milestone = Milestone
 issues.new.add_milestone_title = Set milestone
@@ -1353,6 +1354,13 @@ issues.remove_label_at = removed the 
+ {{svg "octicon-milestone" 16}} + + + + + {{.Poster.GetDisplayName}} + {{if .OldRef }} + {{if .NewRef }} + {{$.i18n.Tr "repo.issues.change_branch_tag_at" (.OldRef|Escape) (.NewRef|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.remove_branch_at" (.OldRef|Escape) $createdStr | Safe}} + {{end}} + {{else}} + {{if .NewRef}} + {{ $getRefType:= GetRefType .NewRef }} + {{$getRefType}} + {{$.i18n.Tr "repo.issues.add_branch_at" (.NewRef|Escape) $createdStr | Safe}} + {{end}} + {{end}} + +
{{end}} {{end}} diff --git a/templates/repo/issue/view_content/sidebar.tmpl b/templates/repo/issue/view_content/sidebar.tmpl index a08656702..19022020a 100644 --- a/templates/repo/issue/view_content/sidebar.tmpl +++ b/templates/repo/issue/view_content/sidebar.tmpl @@ -14,7 +14,7 @@ -
{{.i18n.Tr "repo.issues.new.clear_labels"}}
+
{{.i18n.Tr "repo.issues.new.clear_branch_tag"}}
From 230fa8bdd27bc49a184e580c12e89373e9d8758e Mon Sep 17 00:00:00 2001 From: zhoupzh Date: Mon, 25 Apr 2022 11:02:32 +0800 Subject: [PATCH 42/63] fix issu --- templates/repo/modelarts/trainjob/show.tmpl | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index c0a37e157..36d1529c2 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -838,13 +838,12 @@ td, th { } function logScroll(version_name) { - let container = document.querySelector(`#log${version_name}`) let scrollTop = container.scrollTop let scrollHeight = container.scrollHeight let clientHeight = container.clientHeight let scrollLeft = container.scrollLeft - if(((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0){ + if(((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0 && scrollLeft==0){ let end_line = $(`#log${version_name} input[name=end_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${end_line}&lines=50&order=desc`, (data) => { if (data.Lines == 0){ @@ -867,8 +866,7 @@ td, th { console.log(err); }); } - - if([1,2,3,4,5].includes(scrollTop) && scrollLeft==0){ + if([0,1,2,3,4,5,6,7,8,9,10].includes(scrollTop) && scrollLeft==0){ let start_line = $(`#log${version_name} input[name=start_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { From a35bb8ff8b8dfcf4e79ee8f4f98381cd26c2ed98 Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Mon, 25 Apr 2022 11:11:53 +0800 Subject: [PATCH 43/63] #1821 fix bug --- modules/templates/helper.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 6f0690509..dbb9354aa 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -325,6 +325,8 @@ func NewFuncMap() []template.FuncMap { "DatasetPathJoin": func(arr []string, index int, seq string) string { return strings.Join(arr[1:index+1], seq) }, + "GetRefType": GetRefType, + "GetRefName": GetRefName, }} } @@ -435,8 +437,6 @@ func NewTextFuncMap() []texttmpl.FuncMap { } return float32(n) * 100 / float32(sum) }, - "GetRefType": GetRefType, - "GetRefName": GetRefName, }} } From e7965694dd547ab676383afa72604598c6aa8d0c Mon Sep 17 00:00:00 2001 From: chenyifan01 Date: Mon, 25 Apr 2022 11:31:31 +0800 Subject: [PATCH 44/63] #1821 fix bug --- models/issue.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/models/issue.go b/models/issue.go index 3a7097977..3ed49ce42 100755 --- a/models/issue.go +++ b/models/issue.go @@ -779,6 +779,9 @@ func (issue *Issue) ChangeContent(doer *User, content string) (err error) { func (issue *Issue) ChangeRef(doer *User, newRef string) (err error) { oldRef := issue.Ref issue.Ref = newRef + if oldRef == newRef { + return nil + } sess := x.NewSession() defer sess.Close() From 1f4b553fbf81e552d5ad66d6eb10cb872c873579 Mon Sep 17 00:00:00 2001 From: wangjr Date: Mon, 25 Apr 2022 11:31:50 +0800 Subject: [PATCH 45/63] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E8=AF=84=E8=AE=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../repo/issue/view_content/comments.tmpl | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/templates/repo/issue/view_content/comments.tmpl b/templates/repo/issue/view_content/comments.tmpl index 34adc8e21..c19eb030e 100644 --- a/templates/repo/issue/view_content/comments.tmpl +++ b/templates/repo/issue/view_content/comments.tmpl @@ -602,17 +602,29 @@ {{.Poster.GetDisplayName}} + + {{ $refOldName:= GetRefName .OldRef }} + {{ $refNewName:= GetRefName .NewRef }} + {{if .OldRef }} {{if .NewRef }} - {{$.i18n.Tr "repo.issues.change_branch_tag_at" (.OldRef|Escape) (.NewRef|Escape) $createdStr | Safe}} + {{$.i18n.Tr "repo.issues.change_branch_tag_at" ($refOldName|Escape) ($refOldName|Escape) $createdStr | Safe}} {{else}} - {{$.i18n.Tr "repo.issues.remove_branch_at" (.OldRef|Escape) $createdStr | Safe}} + {{ $getRefOldType:= GetRefType .OldRef }} + {{ if eq $getRefOldType "branch"}} + {{$.i18n.Tr "repo.issues.remove_branch_at" ($refOldName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.remove_tag_at" ($refOldName|Escape) $createdStr | Safe}} + {{end}} {{end}} {{else}} {{if .NewRef}} - {{ $getRefType:= GetRefType .NewRef }} - {{$getRefType}} - {{$.i18n.Tr "repo.issues.add_branch_at" (.NewRef|Escape) $createdStr | Safe}} + {{ $getRefNewType:= GetRefType .NewRef }} + {{ if eq $getRefNewType "branch"}} + {{$.i18n.Tr "repo.issues.add_branch_at" ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.add_tag_at" ($refNewName|Escape) $createdStr | Safe}} + {{end}} {{end}} {{end}} From 74aab140b9f95bf2446e021624bbe6f9a7e92b67 Mon Sep 17 00:00:00 2001 From: wangjr Date: Mon, 25 Apr 2022 14:14:10 +0800 Subject: [PATCH 46/63] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E6=9D=83=E9=99=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../repo/issue/view_content/comments.tmpl | 58 +++++++++---------- .../repo/issue/view_content/sidebar.tmpl | 2 +- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/templates/repo/issue/view_content/comments.tmpl b/templates/repo/issue/view_content/comments.tmpl index c19eb030e..796054005 100644 --- a/templates/repo/issue/view_content/comments.tmpl +++ b/templates/repo/issue/view_content/comments.tmpl @@ -594,40 +594,40 @@ {{end}}
- {{else}} -
- {{svg "octicon-milestone" 16}} - - - - - {{.Poster.GetDisplayName}} + {{else if eq .Type 29}} +
+ {{svg "octicon-git-branch" 16}} + + + + + {{.Poster.GetDisplayName}} - {{ $refOldName:= GetRefName .OldRef }} - {{ $refNewName:= GetRefName .NewRef }} + {{ $refOldName:= GetRefName .OldRef }} + {{ $refNewName:= GetRefName .NewRef }} - {{if .OldRef }} - {{if .NewRef }} - {{$.i18n.Tr "repo.issues.change_branch_tag_at" ($refOldName|Escape) ($refOldName|Escape) $createdStr | Safe}} - {{else}} - {{ $getRefOldType:= GetRefType .OldRef }} - {{ if eq $getRefOldType "branch"}} - {{$.i18n.Tr "repo.issues.remove_branch_at" ($refOldName|Escape) $createdStr | Safe}} + {{if .OldRef }} + {{if .NewRef }} + {{$.i18n.Tr "repo.issues.change_branch_tag_at" ($refOldName|Escape) ($refNewName|Escape) $createdStr | Safe}} {{else}} - {{$.i18n.Tr "repo.issues.remove_tag_at" ($refOldName|Escape) $createdStr | Safe}} + {{ $getRefOldType:= GetRefType .OldRef }} + {{ if eq $getRefOldType "branch"}} + {{$.i18n.Tr "repo.issues.remove_branch_at" ($refOldName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.remove_tag_at" ($refOldName|Escape) $createdStr | Safe}} + {{end}} {{end}} - {{end}} - {{else}} - {{if .NewRef}} - {{ $getRefNewType:= GetRefType .NewRef }} - {{ if eq $getRefNewType "branch"}} - {{$.i18n.Tr "repo.issues.add_branch_at" ($refNewName|Escape) $createdStr | Safe}} - {{else}} - {{$.i18n.Tr "repo.issues.add_tag_at" ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{if .NewRef}} + {{ $getRefNewType:= GetRefType .NewRef }} + {{ if eq $getRefNewType "branch"}} + {{$.i18n.Tr "repo.issues.add_branch_at" ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.add_tag_at" ($refNewName|Escape) $createdStr | Safe}} + {{end}} {{end}} {{end}} - {{end}} - -
+
+
{{end}} {{end}} diff --git a/templates/repo/issue/view_content/sidebar.tmpl b/templates/repo/issue/view_content/sidebar.tmpl index 19022020a..e6a61a567 100644 --- a/templates/repo/issue/view_content/sidebar.tmpl +++ b/templates/repo/issue/view_content/sidebar.tmpl @@ -3,7 +3,7 @@ {{if and (not .Issue.IsPull) (not .PageIsComparePull)}} -