Reviewed-on: https://git.openi.org.cn/OpenI/aiforge/pulls/2163tags/v1.22.5.2^2
| @@ -1,8 +1,6 @@ | |||||
| package models | package models | ||||
| import ( | import ( | ||||
| "fmt" | |||||
| "code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
| "xorm.io/xorm" | "xorm.io/xorm" | ||||
| ) | ) | ||||
| @@ -22,7 +20,6 @@ var customMigrations = []CustomMigration{ | |||||
| } | } | ||||
| var customMigrationsStatic = []CustomMigrationStatic{ | var customMigrationsStatic = []CustomMigrationStatic{ | ||||
| {"Delete organization user history data ", deleteNotDisplayUser}, | |||||
| {"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate}, | {"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate}, | ||||
| } | } | ||||
| @@ -36,7 +33,6 @@ func MigrateCustom(x *xorm.Engine) { | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| func MigrateCustomStatic(x *xorm.Engine, static *xorm.Engine) { | func MigrateCustomStatic(x *xorm.Engine, static *xorm.Engine) { | ||||
| @@ -58,24 +54,6 @@ func syncTopicStruct(x *xorm.Engine) error { | |||||
| return err | return err | ||||
| } | } | ||||
| func deleteNotDisplayUser(x *xorm.Engine, static *xorm.Engine) error { | |||||
| querySQL := "select id,name from public.user where type=1" | |||||
| rows, err := x.Query(querySQL) | |||||
| if err != nil { | |||||
| log.Info("select db failed,err:", err) | |||||
| return err | |||||
| } | |||||
| for i, userRow := range rows { | |||||
| log.Info("delete zuzi user, i=" + fmt.Sprint(i) + " userName=" + string(userRow["name"])) | |||||
| deleteSql := "delete from user_business_analysis where id=" + string(userRow["id"]) + " and name='" + string(userRow["name"]) + "'" | |||||
| static.Exec(deleteSql) | |||||
| } | |||||
| return nil | |||||
| } | |||||
| func updateIssueFixedRate(x *xorm.Engine, static *xorm.Engine) error { | func updateIssueFixedRate(x *xorm.Engine, static *xorm.Engine) error { | ||||
| updateSQL := "update repo_statistic set issue_fixed_rate=1.0 where num_issues=0" | updateSQL := "update repo_statistic set issue_fixed_rate=1.0 where num_issues=0" | ||||
| _, err := static.Exec(updateSQL) | _, err := static.Exec(updateSQL) | ||||
| @@ -157,6 +157,7 @@ func init() { | |||||
| new(UserBusinessAnalysisCurrentMonth), | new(UserBusinessAnalysisCurrentMonth), | ||||
| new(UserBusinessAnalysisCurrentWeek), | new(UserBusinessAnalysisCurrentWeek), | ||||
| new(UserBusinessAnalysisYesterday), | new(UserBusinessAnalysisYesterday), | ||||
| new(UserBusinessAnalysisLastWeek), | |||||
| new(UserLoginLog), | new(UserLoginLog), | ||||
| new(UserMetrics), | new(UserMetrics), | ||||
| new(UserAnalysisPara), | new(UserAnalysisPara), | ||||
| @@ -211,7 +211,7 @@ func setKeyContributerDict(contributorDistinctDict map[string]int, email string, | |||||
| } | } | ||||
| } | } | ||||
| func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) { | |||||
| func GetAllUserKPIStats(startTime time.Time, endTime time.Time) (map[string]*git.UserKPIStats, error) { | |||||
| authors := make(map[string]*git.UserKPIStats) | authors := make(map[string]*git.UserKPIStats) | ||||
| repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name") | repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name") | ||||
| if err != nil { | if err != nil { | ||||
| @@ -219,7 +219,7 @@ func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) { | |||||
| } | } | ||||
| for _, repository := range repositorys { | for _, repository := range repositorys { | ||||
| authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath()) | |||||
| authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath(), startTime, endTime) | |||||
| if err1 != nil { | if err1 != nil { | ||||
| log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error()) | log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error()) | ||||
| continue | continue | ||||
| @@ -407,15 +407,146 @@ func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusi | |||||
| return userBusinessAnalysisReturnList, allCount | return userBusinessAnalysisReturnList, allCount | ||||
| } | } | ||||
| func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wikiCountMap map[string]int) ([]*UserBusinessAnalysis, int64) { | |||||
| log.Info("start to count other user info data") | |||||
| sess := x.NewSession() | |||||
| defer sess.Close() | |||||
| currentTimeNow := time.Now() | |||||
| log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05")) | |||||
| start_unix := opts.StartTime | |||||
| end_unix := opts.EndTime | |||||
| CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location()) | |||||
| DataDate := currentTimeNow.Format("2006-01-02 15:04") | |||||
| CodeMergeCountMap := queryPullRequest(start_unix, end_unix) | |||||
| CommitCountMap := queryCommitAction(start_unix, end_unix, 5) | |||||
| IssueCountMap := queryCreateIssue(start_unix, end_unix) | |||||
| CommentCountMap := queryComment(start_unix, end_unix) | |||||
| FocusRepoCountMap := queryWatch(start_unix, end_unix) | |||||
| StarRepoCountMap := queryStar(start_unix, end_unix) | |||||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | |||||
| StartTime := time.Unix(start_unix, 0) | |||||
| EndTime := time.Unix(end_unix, 0) | |||||
| CommitCodeSizeMap, err := GetAllUserKPIStats(StartTime, EndTime) | |||||
| if err != nil { | |||||
| log.Info("query commit code errr.") | |||||
| } else { | |||||
| log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) | |||||
| CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) | |||||
| log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) | |||||
| } | |||||
| CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) | |||||
| SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||||
| CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) | |||||
| LoginCountMap := queryLoginCount(start_unix, end_unix) | |||||
| OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) | |||||
| CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||||
| AiModelManageMap := queryUserModel(start_unix, end_unix) | |||||
| CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||||
| RecommendDataset := queryRecommedDataSet(start_unix, end_unix) | |||||
| CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) | |||||
| RecommendImage := queryRecommedImage(start_unix, end_unix) | |||||
| statictisSess := xStatistic.NewSession() | |||||
| defer statictisSess.Close() | |||||
| cond := "type != 1 and is_active=true" | |||||
| count, err := sess.Where(cond).Count(new(User)) | |||||
| ParaWeight := getParaWeight() | |||||
| ResultList := make([]*UserBusinessAnalysis, 0) | |||||
| var indexTotal int64 | |||||
| indexTotal = 0 | |||||
| for { | |||||
| sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| userList := make([]*User, 0) | |||||
| sess.Find(&userList) | |||||
| for i, userRecord := range userList { | |||||
| var dateRecord UserBusinessAnalysis | |||||
| dateRecord.ID = userRecord.ID | |||||
| log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) | |||||
| dateRecord.CountDate = CountDate.Unix() | |||||
| dateRecord.DataDate = DataDate | |||||
| dateRecord.Email = userRecord.Email | |||||
| dateRecord.RegistDate = userRecord.CreatedUnix | |||||
| dateRecord.Name = userRecord.Name | |||||
| dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime()) | |||||
| dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap) | |||||
| dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap) | |||||
| dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap) | |||||
| dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap) | |||||
| dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) | |||||
| dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) | |||||
| dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) | |||||
| dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap) | |||||
| if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { | |||||
| dateRecord.CommitCodeSize = 0 | |||||
| } else { | |||||
| dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) | |||||
| } | |||||
| dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) | |||||
| dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) | |||||
| dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) | |||||
| dateRecord.EncyclopediasCount = getMapKeyStringValue(dateRecord.Name, wikiCountMap) | |||||
| dateRecord.CreateRepoCount = getMapValue(dateRecord.ID, CreateRepoCountMap) | |||||
| dateRecord.LoginCount = getMapValue(dateRecord.ID, LoginCountMap) | |||||
| if _, ok := OpenIIndexMap[dateRecord.ID]; !ok { | |||||
| dateRecord.OpenIIndex = 0 | |||||
| } else { | |||||
| dateRecord.OpenIIndex = OpenIIndexMap[dateRecord.ID] | |||||
| } | |||||
| dateRecord.CloudBrainTaskNum = getMapValue(dateRecord.ID, CloudBrainTaskMap) | |||||
| dateRecord.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuDebugJob", CloudBrainTaskItemMap) | |||||
| dateRecord.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuDebugJob", CloudBrainTaskItemMap) | |||||
| dateRecord.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuTrainJob", CloudBrainTaskItemMap) | |||||
| dateRecord.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) | |||||
| dateRecord.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | |||||
| dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | |||||
| dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | |||||
| dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) | |||||
| dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset) | |||||
| dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset) | |||||
| dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset) | |||||
| dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage) | |||||
| dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) | |||||
| dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) | |||||
| dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) | |||||
| ResultList = append(ResultList, &dateRecord) | |||||
| } | |||||
| indexTotal += PAGE_SIZE | |||||
| if indexTotal >= count { | |||||
| break | |||||
| } | |||||
| } | |||||
| log.Info("query user define,count=" + fmt.Sprint((ResultList))) | |||||
| return ResultList, int64(len(ResultList)) | |||||
| } | |||||
| func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) { | func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) { | ||||
| log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll)) | log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll)) | ||||
| statictisSess := xStatistic.NewSession() | statictisSess := xStatistic.NewSession() | ||||
| defer statictisSess.Close() | defer statictisSess.Close() | ||||
| currentTimeNow := time.Now() | |||||
| pageStartTime := getLastCountDate() | |||||
| pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix() | |||||
| //currentTimeNow := time.Now() | |||||
| //pageStartTime := getLastCountDate() | |||||
| //pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix() | |||||
| var cond = builder.NewCond() | var cond = builder.NewCond() | ||||
| if len(opts.UserName) > 0 { | if len(opts.UserName) > 0 { | ||||
| @@ -424,10 +555,10 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus | |||||
| ) | ) | ||||
| } | } | ||||
| cond = cond.And( | cond = cond.And( | ||||
| builder.Gte{"count_date": pageStartTime}, | |||||
| builder.Gte{"count_date": opts.StartTime}, | |||||
| ) | ) | ||||
| cond = cond.And( | cond = cond.And( | ||||
| builder.Lte{"count_date": pageEndTime}, | |||||
| builder.Lte{"count_date": opts.EndTime}, | |||||
| ) | ) | ||||
| count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) | count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) | ||||
| @@ -447,7 +578,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus | |||||
| } | } | ||||
| userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) | userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) | ||||
| if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("id desc"). | |||||
| if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("count_date,id desc"). | |||||
| Find(&userBusinessAnalysisList); err != nil { | Find(&userBusinessAnalysisList); err != nil { | ||||
| return nil, 0 | return nil, 0 | ||||
| } | } | ||||
| @@ -532,10 +663,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| log.Info("truncate all data from table: " + tableName) | log.Info("truncate all data from table: " + tableName) | ||||
| statictisSess.Exec("TRUNCATE TABLE " + tableName) | statictisSess.Exec("TRUNCATE TABLE " + tableName) | ||||
| StartTimeNextDay := pageStartTime.AddDate(0, 0, 1) | |||||
| EndTimeNextDay := pageEndTime.AddDate(0, 0, 1) | |||||
| log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05")) | |||||
| log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05")) | |||||
| log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) | |||||
| log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) | |||||
| start_unix := pageStartTime.Unix() | start_unix := pageStartTime.Unix() | ||||
| end_unix := pageEndTime.Unix() | end_unix := pageEndTime.Unix() | ||||
| @@ -551,7 +680,15 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| FocusRepoCountMap := queryWatch(start_unix, end_unix) | FocusRepoCountMap := queryWatch(start_unix, end_unix) | ||||
| StarRepoCountMap := queryStar(start_unix, end_unix) | StarRepoCountMap := queryStar(start_unix, end_unix) | ||||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | ||||
| CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) | |||||
| CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime) | |||||
| if err != nil { | |||||
| log.Info("query commit code errr.") | |||||
| } else { | |||||
| log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) | |||||
| CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) | |||||
| log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) | |||||
| } | |||||
| //CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) | |||||
| CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) | CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) | ||||
| SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | ||||
| CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) | CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) | ||||
| @@ -605,7 +742,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) | dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) | ||||
| dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) | dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) | ||||
| dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) | dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) | ||||
| dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) | |||||
| if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok { | |||||
| dateRecordAll.CommitCodeSize = 0 | |||||
| } else { | |||||
| dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines) | |||||
| } | |||||
| //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) | |||||
| dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap) | dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap) | ||||
| dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap) | dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap) | ||||
| dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap) | dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap) | ||||
| @@ -626,6 +768,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||||
| dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) | dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) | ||||
| dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | ||||
| dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | ||||
| dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | |||||
| dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) | dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) | ||||
| dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) | dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) | ||||
| dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) | dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) | ||||
| @@ -733,7 +876,12 @@ func RefreshUserStaticAllTabel(wikiCountMap map[string]int, userMetrics map[stri | |||||
| pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset) | pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset) | ||||
| refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_week", pageStartTime, pageEndTime, userMetrics) | refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_week", pageStartTime, pageEndTime, userMetrics) | ||||
| pageEndTime = pageStartTime | |||||
| pageStartTime = pageStartTime.AddDate(0, 0, -7) | |||||
| refreshUserStaticTable(wikiCountMap, "user_business_analysis_last_week", pageStartTime, pageEndTime, userMetrics) | |||||
| pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30) | pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30) | ||||
| pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) | |||||
| refreshUserStaticTable(wikiCountMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime, userMetrics) | refreshUserStaticTable(wikiCountMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime, userMetrics) | ||||
| pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -1) | pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -1) | ||||
| @@ -774,7 +922,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||||
| StarRepoCountMap := queryStar(start_unix, end_unix) | StarRepoCountMap := queryStar(start_unix, end_unix) | ||||
| WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) | ||||
| CommitCodeSizeMap, err := GetAllUserKPIStats() | |||||
| CommitCodeSizeMap, err := GetAllUserKPIStats(startTime, endTime) | |||||
| if err != nil { | if err != nil { | ||||
| log.Info("query commit code errr.") | log.Info("query commit code errr.") | ||||
| } else { | } else { | ||||
| @@ -878,10 +1026,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||||
| log.Info("has activity." + userRecord.Name) | log.Info("has activity." + userRecord.Name) | ||||
| addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID) | addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID) | ||||
| } | } | ||||
| if userRecord.IsActive { | |||||
| if !userRecord.IsActive { | |||||
| continue | continue | ||||
| } | } | ||||
| statictisSess.Delete(&dateRecord) | |||||
| var deleteDateRecord UserBusinessAnalysis | |||||
| deleteDateRecord.ID = userRecord.ID | |||||
| deleteDateRecord.CountDate = CountDate.Unix() | |||||
| statictisSess.Delete(&deleteDateRecord) | |||||
| _, err = statictisSess.Insert(&dateRecord) | _, err = statictisSess.Insert(&dateRecord) | ||||
| if err != nil { | if err != nil { | ||||
| log.Info("insert daterecord failed." + err.Error()) | log.Info("insert daterecord failed." + err.Error()) | ||||
| @@ -1640,7 +1793,7 @@ func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64 | |||||
| var indexTotal int64 | var indexTotal int64 | ||||
| indexTotal = 0 | indexTotal = 0 | ||||
| for { | for { | ||||
| sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| sess.Select("id,uid,image_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||||
| imageStarList := make([]*ImageStar, 0) | imageStarList := make([]*ImageStar, 0) | ||||
| sess.Find(&imageStarList) | sess.Find(&imageStarList) | ||||
| log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) | log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) | ||||
| @@ -394,6 +394,72 @@ type UserBusinessAnalysisYesterday struct { | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | ||||
| } | } | ||||
| type UserBusinessAnalysisLastWeek struct { | |||||
| ID int64 `xorm:"pk"` | |||||
| CountDate int64 `xorm:"pk"` | |||||
| //action :ActionMergePullRequest // 11 | |||||
| CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //action :ActionCommitRepo | |||||
| CommitCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //issue // 10 | |||||
| IssueCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //comment table current date | |||||
| CommentCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //watch table current date | |||||
| FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //star table current date | |||||
| StarRepoCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //follow table | |||||
| WatchedCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| // user table | |||||
| GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"` | |||||
| // | |||||
| CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //attachement table | |||||
| CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //0 | |||||
| CommitModelCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //issue, issueassignees | |||||
| SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //baike | |||||
| EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //user | |||||
| RegistDate timeutil.TimeStamp `xorm:"NOT NULL"` | |||||
| //repo | |||||
| CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //login count, from elk | |||||
| LoginCount int `xorm:"NOT NULL DEFAULT 0"` | |||||
| //openi index | |||||
| OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| //user | |||||
| Email string `xorm:"NOT NULL"` | |||||
| //user | |||||
| Name string `xorm:"NOT NULL"` | |||||
| DataDate string `xorm:"NULL"` | |||||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` | |||||
| UserLocation string `xorm:"NULL"` | |||||
| FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||||
| } | |||||
| type UserAnalysisPara struct { | type UserAnalysisPara struct { | ||||
| Key string `xorm:"NOT NULL"` | Key string `xorm:"NOT NULL"` | ||||
| Value float64 `xorm:"NOT NULL DEFAULT 0"` | Value float64 `xorm:"NOT NULL DEFAULT 0"` | ||||
| @@ -58,12 +58,11 @@ func SetDevelopAge(repoPath string, stats *RepoKPIStats, fromTime time.Time) err | |||||
| return nil | return nil | ||||
| } | } | ||||
| //获取一天内的用户贡献指标 | |||||
| func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { | |||||
| timeUntil := time.Now() | |||||
| oneDayAgo := timeUntil.AddDate(0, 0, -1) | |||||
| since := oneDayAgo.Format(time.RFC3339) | |||||
| args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} | |||||
| func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (map[string]*UserKPIStats, error) { | |||||
| after := startTime.Format(time.RFC3339) | |||||
| until := endTime.Format(time.RFC3339) | |||||
| args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until=='%s'", until)} | |||||
| stdout, err := NewCommand(args...).RunInDirBytes(repoPath) | stdout, err := NewCommand(args...).RunInDirBytes(repoPath) | ||||
| if err != nil { | if err != nil { | ||||
| return nil, err | return nil, err | ||||
| @@ -124,6 +123,14 @@ func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { | |||||
| } | } | ||||
| //获取一天内的用户贡献指标 | |||||
| func getUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { | |||||
| timeUntil := time.Now() | |||||
| oneDayAgo := timeUntil.AddDate(0, 0, -1) | |||||
| return GetUserKPIStats(repoPath, oneDayAgo, oneDayAgo) | |||||
| } | |||||
| func SetRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error { | func SetRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error { | ||||
| since := fromTime.Format(time.RFC3339) | since := fromTime.Format(time.RFC3339) | ||||
| args := []string{"log", "--numstat", "--no-merges", "HEAD", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} | args := []string{"log", "--numstat", "--no-merges", "HEAD", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} | ||||
| @@ -522,6 +522,7 @@ static.RecommendImage=Recommended Image Count | |||||
| static.all=All | static.all=All | ||||
| static.public.user_business_analysis_current_month=Current_Month | static.public.user_business_analysis_current_month=Current_Month | ||||
| static.public.user_business_analysis_current_week=Current_Week | static.public.user_business_analysis_current_week=Current_Week | ||||
| static.public.user_business_analysis_last_week=Last_Week | |||||
| static.public.user_business_analysis_current_year=Current_Year | static.public.user_business_analysis_current_year=Current_Year | ||||
| static.public.user_business_analysis_last30_day=Last_30_day | static.public.user_business_analysis_last30_day=Last_30_day | ||||
| static.public.user_business_analysis_last_month=Last_Month | static.public.user_business_analysis_last_month=Last_Month | ||||
| @@ -527,6 +527,7 @@ static.RecommendImage=被推荐镜像数 | |||||
| static.all=所有 | static.all=所有 | ||||
| static.public.user_business_analysis_current_month=本月 | static.public.user_business_analysis_current_month=本月 | ||||
| static.public.user_business_analysis_current_week=本周 | static.public.user_business_analysis_current_week=本周 | ||||
| static.public.user_business_analysis_last_week=上周 | |||||
| static.public.user_business_analysis_current_year=今年 | static.public.user_business_analysis_current_year=今年 | ||||
| static.public.user_business_analysis_last30_day=近30天 | static.public.user_business_analysis_last30_day=近30天 | ||||
| static.public.user_business_analysis_last_month=上月 | static.public.user_business_analysis_last_month=上月 | ||||
| @@ -559,10 +559,12 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
| m.Get("/query_metrics_all", operationReq, repo_ext.QueryUserMetricsAll) | m.Get("/query_metrics_all", operationReq, repo_ext.QueryUserMetricsAll) | ||||
| m.Get("/query_user_metrics_page", operationReq, repo_ext.QueryUserMetricDataPage) | m.Get("/query_user_metrics_page", operationReq, repo_ext.QueryUserMetricDataPage) | ||||
| m.Get("/download_user_define_file", operationReq, repo_ext.DownloadUserDefineFile) | |||||
| m.Get("/query_user_rank_list", operationReq, repo_ext.QueryRankingList) | m.Get("/query_user_rank_list", operationReq, repo_ext.QueryRankingList) | ||||
| m.Get("/query_user_static_page", operationReq, repo_ext.QueryUserStaticDataPage) | m.Get("/query_user_static_page", operationReq, repo_ext.QueryUserStaticDataPage) | ||||
| m.Get("/query_user_current_month", operationReq, repo_ext.QueryUserStaticCurrentMonth) | m.Get("/query_user_current_month", operationReq, repo_ext.QueryUserStaticCurrentMonth) | ||||
| m.Get("/query_user_current_week", operationReq, repo_ext.QueryUserStaticCurrentWeek) | m.Get("/query_user_current_week", operationReq, repo_ext.QueryUserStaticCurrentWeek) | ||||
| m.Get("/query_user_last_week", operationReq, repo_ext.QueryUserStaticLastWeek) | |||||
| m.Get("/query_user_current_year", operationReq, repo_ext.QueryUserStaticCurrentYear) | m.Get("/query_user_current_year", operationReq, repo_ext.QueryUserStaticCurrentYear) | ||||
| m.Get("/query_user_last30_day", operationReq, repo_ext.QueryUserStaticLast30Day) | m.Get("/query_user_last30_day", operationReq, repo_ext.QueryUserStaticLast30Day) | ||||
| m.Get("/query_user_last_month", operationReq, repo_ext.QueryUserStaticLastMonth) | m.Get("/query_user_last_month", operationReq, repo_ext.QueryUserStaticLastMonth) | ||||
| @@ -4,6 +4,7 @@ import ( | |||||
| "fmt" | "fmt" | ||||
| "net/http" | "net/http" | ||||
| "net/url" | "net/url" | ||||
| "os" | |||||
| "time" | "time" | ||||
| "code.gitea.io/gitea/models" | "code.gitea.io/gitea/models" | ||||
| @@ -16,7 +17,8 @@ import ( | |||||
| ) | ) | ||||
| const ( | const ( | ||||
| PAGE_SIZE = 2000 | |||||
| PAGE_SIZE = 2000 | |||||
| Excel_File_Path = "/useranalysis/" | |||||
| ) | ) | ||||
| func getUserMetricsExcelHeader(ctx *context.Context) map[string]string { | func getUserMetricsExcelHeader(ctx *context.Context) map[string]string { | ||||
| @@ -186,6 +188,75 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode | |||||
| formatTime = userRecord.DataDate | formatTime = userRecord.DataDate | ||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) | xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) | ||||
| } | } | ||||
| func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysis) { | |||||
| rows := fmt.Sprint(row) | |||||
| var tmp byte | |||||
| tmp = 0 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) | |||||
| tmp = tmp + 1 | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) | |||||
| tmp = tmp + 1 | |||||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) | |||||
| tmp = tmp + 1 | |||||
| formatTime = userRecord.DataDate | |||||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) | |||||
| } | |||||
| func getColumn(tmp byte) string { | func getColumn(tmp byte) string { | ||||
| var tmpA byte | var tmpA byte | ||||
| tmpA = 'A' | tmpA = 'A' | ||||
| @@ -330,6 +401,25 @@ func QueryRankingList(ctx *context.Context) { | |||||
| ctx.JSON(http.StatusOK, mapInterface) | ctx.JSON(http.StatusOK, mapInterface) | ||||
| } | } | ||||
| func DownloadUserDefineFile(ctx *context.Context) { | |||||
| filename := ctx.Query("filename") | |||||
| length := len(filename) | |||||
| if filename[0:1] == "\"" { | |||||
| filename = filename[1 : length-1] | |||||
| } | |||||
| allFilename := setting.AppDataPath + Excel_File_Path + filename | |||||
| log.Info("allFilename=" + allFilename) | |||||
| _, err := os.Stat(allFilename) | |||||
| if err != nil { //文件不存在 | |||||
| log.Info("file not exist.") | |||||
| ctx.JSON(http.StatusOK, "File Not Exist.") | |||||
| } else { | |||||
| ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+filename) | |||||
| ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||||
| ctx.ServeFile(allFilename, filename) | |||||
| } | |||||
| } | |||||
| func QueryUserMetricsCurrentMonth(ctx *context.Context) { | func QueryUserMetricsCurrentMonth(ctx *context.Context) { | ||||
| currentTimeNow := time.Now() | currentTimeNow := time.Now() | ||||
| @@ -365,6 +455,10 @@ func QueryUserMetricsCurrentWeek(ctx *context.Context) { | |||||
| func QueryUserStaticCurrentWeek(ctx *context.Context) { | func QueryUserStaticCurrentWeek(ctx *context.Context) { | ||||
| queryUserDataPage(ctx, "public.user_business_analysis_current_week", new(models.UserBusinessAnalysisCurrentWeek)) | queryUserDataPage(ctx, "public.user_business_analysis_current_week", new(models.UserBusinessAnalysisCurrentWeek)) | ||||
| } | } | ||||
| func QueryUserStaticLastWeek(ctx *context.Context) { | |||||
| queryUserDataPage(ctx, "public.user_business_analysis_last_week", new(models.UserBusinessAnalysisLastWeek)) | |||||
| } | |||||
| func QueryUserMetricsCurrentYear(ctx *context.Context) { | func QueryUserMetricsCurrentYear(ctx *context.Context) { | ||||
| currentTimeNow := time.Now() | currentTimeNow := time.Now() | ||||
| pageStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location()) | pageStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location()) | ||||
| @@ -457,7 +551,7 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
| startDate = settingStartTime.Format("2006-01-02") | startDate = settingStartTime.Format("2006-01-02") | ||||
| } | } | ||||
| endTime, _ = time.ParseInLocation("2006-01-02", endDate, time.Local) | endTime, _ = time.ParseInLocation("2006-01-02", endDate, time.Local) | ||||
| endTime = endTime.AddDate(0, 0, 1) | |||||
| //endTime = endTime.AddDate(0, 0, 1) | |||||
| endTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), 23, 59, 59, 0, startTime.Location()) | endTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), 23, 59, 59, 0, startTime.Location()) | ||||
| isAll = false | isAll = false | ||||
| @@ -481,36 +575,14 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
| } | } | ||||
| if IsReturnFile { | if IsReturnFile { | ||||
| re, count := models.QueryUserStaticDataAll(pageOpts) | |||||
| log.Info("return count=" + fmt.Sprint(count)) | |||||
| //writer exec file. | |||||
| xlsx := excelize.NewFile() | |||||
| //re, count := models.QueryUserStaticDataAll(pageOpts) | |||||
| wikiMap, _ := queryWikiCountMap(startTime, endTime) | |||||
| re, count := models.QueryUserStaticDataForUserDefine(pageOpts, wikiMap) | |||||
| sheetName := ctx.Tr("user.static.sheetname") | sheetName := ctx.Tr("user.static.sheetname") | ||||
| index := xlsx.NewSheet(sheetName) | |||||
| xlsx.DeleteSheet("Sheet1") | |||||
| dataHeader := getExcelHeader(ctx) | |||||
| for k, v := range dataHeader { | |||||
| //设置单元格的值 | |||||
| xlsx.SetCellValue(sheetName, k, v) | |||||
| } | |||||
| for i, userRecord := range re { | |||||
| row := i + 2 | |||||
| writeExcel(row, xlsx, sheetName, userRecord) | |||||
| } | |||||
| //设置默认打开的表单 | |||||
| xlsx.SetActiveSheet(index) | |||||
| filename := sheetName + "_" + ctx.Tr("user.static.all") + ".xlsx" | |||||
| ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) | |||||
| ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||||
| if _, err := xlsx.WriteTo(ctx.Resp); err != nil { | |||||
| log.Info("writer exel error." + err.Error()) | |||||
| } | |||||
| filename := sheetName + "_" + startDate + "_" + endDate + ".xlsx" | |||||
| os.Remove(setting.AppDataPath + Excel_File_Path + filename) | |||||
| go writeFileToDisk(ctx, count, re, filename) | |||||
| ctx.JSON(http.StatusOK, setting.AppURL+"api/v1/download_user_define_file?filename="+filename) | |||||
| } else { | } else { | ||||
| mapInterface := make(map[string]interface{}) | mapInterface := make(map[string]interface{}) | ||||
| re, count := models.QueryUserStaticDataPage(pageOpts) | re, count := models.QueryUserStaticDataPage(pageOpts) | ||||
| @@ -520,22 +592,47 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
| } | } | ||||
| } | } | ||||
| func TimingCountDataByDateAndReCount(date string, isReCount bool) { | |||||
| func writeFileToDisk(ctx *context.Context, count int64, re []*models.UserBusinessAnalysis, filename string) { | |||||
| log.Info("return count=" + fmt.Sprint(count)) | |||||
| //writer exec file. | |||||
| xlsx := excelize.NewFile() | |||||
| sheetName := ctx.Tr("user.static.sheetname") | |||||
| index := xlsx.NewSheet(sheetName) | |||||
| xlsx.DeleteSheet("Sheet1") | |||||
| dataHeader := getExcelHeader(ctx) | |||||
| for k, v := range dataHeader { | |||||
| //设置单元格的值 | |||||
| xlsx.SetCellValue(sheetName, k, v) | |||||
| } | |||||
| t, _ := time.Parse("2006-01-02", date) | |||||
| startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) | |||||
| for i, userRecord := range re { | |||||
| row := i + 2 | |||||
| writeExcelPage(row, xlsx, sheetName, userRecord) | |||||
| } | |||||
| endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location()) | |||||
| //设置默认打开的表单 | |||||
| xlsx.SetActiveSheet(index) | |||||
| //query wiki data | |||||
| log.Info("start to time count data") | |||||
| //ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) | |||||
| //ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||||
| filename = setting.AppDataPath + Excel_File_Path + filename | |||||
| os.Mkdir(setting.AppDataPath+Excel_File_Path, 0755) | |||||
| if err := xlsx.SaveAs(filename); err != nil { | |||||
| log.Info("writer exel error." + err.Error()) | |||||
| } else { | |||||
| log.Info("write to file succeed, filepath=" + filename) | |||||
| } | |||||
| } | |||||
| func queryWikiCountMap(startTime time.Time, endTime time.Time) (map[string]int, error) { | |||||
| wikiMap := make(map[string]int) | wikiMap := make(map[string]int) | ||||
| warnEmailMessage := "用户统计信息入库失败,请尽快定位。" | warnEmailMessage := "用户统计信息入库失败,请尽快定位。" | ||||
| repoList, err := models.GetAllRepositories() | repoList, err := models.GetAllRepositories() | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("query repo error." + err.Error()) | log.Error("query repo error." + err.Error()) | ||||
| mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) | mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) | ||||
| return | |||||
| return nil, err | |||||
| } | } | ||||
| log.Info("start to query wiki data") | log.Info("start to query wiki data") | ||||
| for _, repoRecord := range repoList { | for _, repoRecord := range repoList { | ||||
| @@ -543,7 +640,7 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) { | |||||
| time, err := git.GetLatestCommitTime(wikiPath) | time, err := git.GetLatestCommitTime(wikiPath) | ||||
| if err == nil { | if err == nil { | ||||
| log.Info("last commit time:" + time.Format("2006-01-02 15:04:05") + " wikiPath=" + wikiPath) | log.Info("last commit time:" + time.Format("2006-01-02 15:04:05") + " wikiPath=" + wikiPath) | ||||
| if time.After(startTime) { | |||||
| if time.After(startTime) && time.Before(endTime) { | |||||
| wikiRepo, _, err := FindWikiRepoCommitByWikiPath(wikiPath) | wikiRepo, _, err := FindWikiRepoCommitByWikiPath(wikiPath) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("wiki not exist. wikiPath=" + wikiPath) | log.Error("wiki not exist. wikiPath=" + wikiPath) | ||||
| @@ -568,14 +665,26 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) { | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| return wikiMap, nil | |||||
| } | |||||
| func TimingCountDataByDateAndReCount(date string, isReCount bool) { | |||||
| t, _ := time.Parse("2006-01-02", date) | |||||
| startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) | |||||
| endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location()) | |||||
| warnEmailMessage := "用户统计信息入库失败,请尽快定位。" | |||||
| //query wiki data | |||||
| log.Info("start to time count data") | |||||
| wikiMap, err := queryWikiCountMap(startTime, endTime) | |||||
| //other user info data | //other user info data | ||||
| err = models.CounDataByDateAndReCount(wikiMap, startTime, endTime, isReCount) | err = models.CounDataByDateAndReCount(wikiMap, startTime, endTime, isReCount) | ||||
| if err != nil { | if err != nil { | ||||
| log.Error("count user info error." + err.Error()) | log.Error("count user info error." + err.Error()) | ||||
| mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) | mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) | ||||
| } | } | ||||
| log.Info("start to count all user info data") | |||||
| //models.RefreshUserStaticAllTabel(wikiMap) | |||||
| log.Info("end to count all user info data") | log.Info("end to count all user info data") | ||||
| } | } | ||||
| @@ -6,6 +6,7 @@ | |||||
| <div style="margin-top: 20px;"> | <div style="margin-top: 20px;"> | ||||
| <span class="sta_iterm">统计周期:</span> | <span class="sta_iterm">统计周期:</span> | ||||
| <button type="button" class='btnFirst' id ="yesterday_usr" v-bind:class="{colorChange:1==dynamic}" @click="resetPage(),getUserList('yesterday_usr',1)">昨天</button> | <button type="button" class='btnFirst' id ="yesterday_usr" v-bind:class="{colorChange:1==dynamic}" @click="resetPage(),getUserList('yesterday_usr',1)">昨天</button> | ||||
| <button type="button" class='btn' id = "last_week_usr" v-bind:class="{colorChange:8==dynamic}" @click="resetPage(),getUserList('last_week_usr',8)">上周</button> | |||||
| <button type="button" class='btn' id = "current_week_usr" v-bind:class="{colorChange:2==dynamic}" @click="resetPage(),getUserList('current_week_usr',2)">本周</button> | <button type="button" class='btn' id = "current_week_usr" v-bind:class="{colorChange:2==dynamic}" @click="resetPage(),getUserList('current_week_usr',2)">本周</button> | ||||
| <button type="button" class='btn' id = "current_month_usr" v-bind:class="{colorChange:3==dynamic}" @click="resetPage(),getUserList('current_month_usr',3)">本月</button> | <button type="button" class='btn' id = "current_month_usr" v-bind:class="{colorChange:3==dynamic}" @click="resetPage(),getUserList('current_month_usr',3)">本月</button> | ||||
| <button type="button" class='btn' id = "last_month_usr" v-bind:class="{colorChange:4==dynamic}" @click="resetPage(),getUserList('last_month_usr',4)">上月</button> | <button type="button" class='btn' id = "last_month_usr" v-bind:class="{colorChange:4==dynamic}" @click="resetPage(),getUserList('last_month_usr',4)">上月</button> | ||||
| @@ -375,6 +376,11 @@ | |||||
| this.dataUrl = '../../api/v1/query_user_current_week'; | this.dataUrl = '../../api/v1/query_user_current_week'; | ||||
| break | break | ||||
| } | } | ||||
| case "last_week_usr":{ | |||||
| this.value_time=[] | |||||
| this.dataUrl = '../../api/v1/query_user_last_week'; | |||||
| break | |||||
| } | |||||
| case "current_month_usr":{ | case "current_month_usr":{ | ||||
| this.value_time=[] | this.value_time=[] | ||||
| this.dataUrl = '../../api/v1/query_user_current_month'; | this.dataUrl = '../../api/v1/query_user_current_month'; | ||||