| @@ -29,51 +29,22 @@ | |||
| } | |||
| .rotation3D__item .scale{ position: absolute; top: 0; width: 100%; height: 100%; } | |||
| .rotation3D__item .cont{ position: relative; z-index: 2; } | |||
| .rotation3D__item .cont .iconfont { font-size: 28px; margin-top: 30px; margin-bottom: 96px; display: block; } | |||
| .rotation3D__item .cont .iconfont { font-size: 28px; margin-top: 30px; margin-bottom: 96px; display: block; height: 35px;} | |||
| .rotation3D__item .cont p{ color: #101010; } | |||
| .itemList .rotation3D__item .cont p::after{ | |||
| font-size: 12px; | |||
| content: ''; | |||
| position: absolute; | |||
| left: 0; | |||
| right: 0; | |||
| margin-top: 60px; | |||
| color: #101010; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(1) .cont p::after{ | |||
| content: "鹏城云脑一号"; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(2) .cont p::after{ | |||
| content: "鹏城云脑二号"; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(3) .cont p::after{ | |||
| content: "北大人工智能集群系统"; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(4) .cont p::after{ | |||
| content: "合肥类脑智能开放平台"; | |||
| .lineList .rotation3D__line:nth-child(5n+0) .dot{ | |||
| } | |||
| .itemList .rotation3D__item:nth-child(5) .cont p::after{ | |||
| content: "武汉人工智能计算中心"; | |||
| .lineList .rotation3D__line:nth-child(5n+1) .dot{ | |||
| animation-delay: 1s; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(6) .cont p::after{ | |||
| content: "西安未来人工智能计算中心"; | |||
| .lineList .rotation3D__line:nth-child(5n+2) .dot{ | |||
| animation-delay: 3s; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(7) .cont p::after{ | |||
| content: "更多接入中…"; | |||
| .lineList .rotation3D__line:nth-child(5n+3) .dot{ | |||
| animation-delay: 2s; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(8) .cont p::after{ | |||
| content: "中原人工智能计算中心"; | |||
| .lineList .rotation3D__line:nth-child(5n+3) .dot{ | |||
| animation-delay: 4s; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(9) .cont p::after{ | |||
| content: "成都人工智能计算中心"; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(10) .cont p::after{ | |||
| content: "横琴先进智能计算中心"; | |||
| } | |||
| .itemList .rotation3D__item:nth-child(11) .cont p::after{ | |||
| content: "国家超级计算济南中心"; | |||
| } | |||
| .rotation3D__item.blue{ color: #01e9fc; } | |||
| .rotation3D__item.green{ color: #b4b3ca; } | |||
| .rotation3D__item.yellow{ color: #ffd200; } | |||
| @@ -90,14 +61,17 @@ | |||
| ---------------------------*/ | |||
| .rotation3D__line{ | |||
| position: absolute; left: 50%; top: 50%; | |||
| display: block; width: 1px; height: 50%; | |||
| display: block; | |||
| width: 30px; | |||
| height: 50%; | |||
| padding-top: 60px; color: #fff; font-size: 50px; | |||
| /*background: #fff;*/ | |||
| /*原点设置在中间*/ | |||
| transform-origin: 50% 0; | |||
| transform-style: preserve-3d; | |||
| } | |||
| .rotation3D__line .pos{ position: absolute; top: 0; } | |||
| overflow: hidden; | |||
| } | |||
| .rotation3D__line .pos{ position: absolute; top: 0; left: 15px;} | |||
| .rotation3D__line svg { position: absolute; top: 0; } | |||
| .rotation3D__line svg path { | |||
| stroke: #fff; fill: none; | |||
| @@ -139,8 +113,10 @@ | |||
| position: absolute; | |||
| font-size: 12px; | |||
| color: #888; | |||
| transform: rotate(180deg)scale(0.80); | |||
| } | |||
| transform:scale(0.80); | |||
| transform-origin:left; | |||
| white-space: nowrap; | |||
| } | |||
| /*颜色*/ | |||
| .rotation3D__line.blue { color: #07b2f9; } | |||
| @@ -134,7 +134,7 @@ type Cloudbrain struct { | |||
| CanDebug bool `xorm:"-"` | |||
| CanDel bool `xorm:"-"` | |||
| CanModify bool `xorm:"-"` | |||
| Type int | |||
| Type int `xorm:"INDEX"` | |||
| BenchmarkTypeID int | |||
| BenchmarkChildTypeID int | |||
| @@ -2019,3 +2019,30 @@ func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) { | |||
| return datasetInfos, datasetNames, nil | |||
| } | |||
| func GetNewestJobsByAiCenter() ([]int64, error) { | |||
| ids := make([]int64, 0) | |||
| return ids, x. | |||
| Select("max(id) as id"). | |||
| Where("type=? and ai_center!='' and ai_center is not null", TypeC2Net). | |||
| GroupBy("ai_center"). | |||
| Table(Cloudbrain{}). | |||
| Find(&ids) | |||
| } | |||
| func GetNewestJobsByType() ([]int64, error) { | |||
| ids := make([]int64, 0) | |||
| return ids, x. | |||
| Select("max(id) as id"). | |||
| In("type", TypeCloudBrainOne, TypeCloudBrainTwo). | |||
| GroupBy("type"). | |||
| Table(Cloudbrain{}). | |||
| Find(&ids) | |||
| } | |||
| func GetCloudbrainByIDs(ids []int64) ([]*Cloudbrain, error) { | |||
| cloudbrains := make([]*Cloudbrain, 0) | |||
| return cloudbrains, x. | |||
| In("id", ids). | |||
| Find(&cloudbrains) | |||
| } | |||
| @@ -211,6 +211,42 @@ func setKeyContributerDict(contributorDistinctDict map[string]int, email string, | |||
| } | |||
| } | |||
| func GetAllUserPublicRepoKPIStats(startTime time.Time, endTime time.Time) (map[string]*git.UserKPIStats, error) { | |||
| authors := make(map[string]*git.UserKPIStats) | |||
| repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name", "is_private") | |||
| if err != nil { | |||
| return nil, err | |||
| } | |||
| for _, repository := range repositorys { | |||
| if repository.IsPrivate { | |||
| continue | |||
| } | |||
| authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath(), startTime, endTime) | |||
| if err1 != nil { | |||
| log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error()) | |||
| continue | |||
| } | |||
| for key, value := range authorsOneRepo { | |||
| if _, ok := authors[key]; !ok { | |||
| authors[key] = &git.UserKPIStats{ | |||
| Name: value.Name, | |||
| Email: value.Email, | |||
| Commits: 0, | |||
| CommitLines: 0, | |||
| } | |||
| } | |||
| authors[key].Commits += value.Commits | |||
| authors[key].CommitLines += value.CommitLines | |||
| } | |||
| } | |||
| return authors, nil | |||
| } | |||
| func GetAllUserKPIStats(startTime time.Time, endTime time.Time) (map[string]*git.UserKPIStats, error) { | |||
| authors := make(map[string]*git.UserKPIStats) | |||
| repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name") | |||
| @@ -0,0 +1,437 @@ | |||
| package models | |||
| import ( | |||
| "fmt" | |||
| "time" | |||
| "code.gitea.io/gitea/modules/log" | |||
| "code.gitea.io/gitea/modules/timeutil" | |||
| ) | |||
| type UserBusinessAnalysisForActivity struct { | |||
| ID int64 `xorm:"pk"` | |||
| CountDate int64 `xorm:"pk"` | |||
| //action :ActionMergePullRequest // 11 | |||
| CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| //action :ActionCommitRepo | |||
| CommitCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| //issue // 10 | |||
| IssueCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| //comment table current date | |||
| CommentCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| //follow table | |||
| WatchedCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"` | |||
| //issue, issueassigees | |||
| SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| //use | |||
| RegistDate timeutil.TimeStamp `xorm:"NOT NULL"` | |||
| //user | |||
| Email string `xorm:"NOT NULL"` | |||
| Phone string `xorm:"NULL"` | |||
| //user | |||
| Name string `xorm:"NOT NULL"` | |||
| DataDate string `xorm:"NULL"` | |||
| CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` | |||
| //0 | |||
| CommitModelCount int `xorm:"NOT NULL DEFAULT 0"` | |||
| } | |||
| func QueryDataForActivity(startTime time.Time, endTime time.Time) []*UserBusinessAnalysisForActivity { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| result := make([]*UserBusinessAnalysisForActivity, 0) | |||
| publicRepo := queryPublicRepo() | |||
| start_unix := startTime.Unix() | |||
| end_unix := endTime.Unix() | |||
| CodeMergeCountMap := queryPullRequestPublic(start_unix, end_unix, publicRepo) | |||
| CommitCodeSizeMap, err := GetAllUserPublicRepoKPIStats(startTime, endTime) | |||
| if err != nil { | |||
| log.Info("error,info=" + err.Error()) | |||
| } | |||
| CommitCountMap := queryCommitActionPublic(start_unix, end_unix, 5, publicRepo) | |||
| IssueCountMap, publicRepoIssueIdMap := queryCreateIssuePublic(start_unix, end_unix, publicRepo) | |||
| SolveIssueCountMap := querySolveIssuePublic(start_unix, end_unix, publicRepoIssueIdMap) | |||
| WatchedCountMap, _ := queryFollow(start_unix, end_unix) | |||
| CommentCountMap := queryCommentPublic(start_unix, end_unix, publicRepoIssueIdMap) | |||
| PublicDataSet := queryAllPublicDataSet(publicRepo) | |||
| DatasetFileNums := queryPublicDatasetFileNums(start_unix, end_unix, PublicDataSet) | |||
| AiModelManageMap := queryUserModelPublic(start_unix, end_unix, publicRepo) | |||
| cond := "type != 1 and is_active=true" | |||
| count, err := sess.Where(cond).Count(new(User)) | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| userList := make([]*User, 0) | |||
| sess.Find(&userList) | |||
| for i, userRecord := range userList { | |||
| var dateRecord UserBusinessAnalysisForActivity | |||
| dateRecord.ID = userRecord.ID | |||
| log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) | |||
| dateRecord.Email = userRecord.Email | |||
| dateRecord.Phone = userRecord.PhoneNumber | |||
| dateRecord.RegistDate = userRecord.CreatedUnix | |||
| dateRecord.Name = userRecord.Name | |||
| dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap) | |||
| dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap) | |||
| dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) | |||
| dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, DatasetFileNums) | |||
| dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap) | |||
| dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap) | |||
| if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { | |||
| dateRecord.CommitCodeSize = 0 | |||
| } else { | |||
| dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) | |||
| } | |||
| dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) | |||
| dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) | |||
| result = append(result, &dateRecord) | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return result | |||
| } | |||
| func querySolveIssuePublic(start_unix int64, end_unix int64, publicRepoIssueIdMap map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| cond := "issue.is_closed=true and issue.closed_unix>=" + fmt.Sprint(start_unix) + " and issue.closed_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Table("issue_assignees").Join("inner", "issue", "issue.id=issue_assignees.issue_id").Where(cond).Count(new(IssueAssignees)) | |||
| if err != nil { | |||
| log.Info("query issue error. return.") | |||
| return resultMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| issueAssigneesList := make([]*IssueAssignees, 0) | |||
| sess.Select("issue_assignees.*").Table("issue_assignees"). | |||
| Join("inner", "issue", "issue.id=issue_assignees.issue_id"). | |||
| Where(cond).OrderBy("issue_assignees.id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| sess.Find(&issueAssigneesList) | |||
| log.Info("query IssueAssignees size=" + fmt.Sprint(len(issueAssigneesList))) | |||
| for _, issueAssigneesRecord := range issueAssigneesList { | |||
| if isPublicRepo(issueAssigneesRecord.IssueID, publicRepoIssueIdMap) { | |||
| if _, ok := resultMap[issueAssigneesRecord.AssigneeID]; !ok { | |||
| resultMap[issueAssigneesRecord.AssigneeID] = 1 | |||
| } else { | |||
| resultMap[issueAssigneesRecord.AssigneeID] += 1 | |||
| } | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap | |||
| } | |||
| func queryPublicRepo() map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| count, err := sess.Table("repository").Count(new(Repository)) | |||
| if err != nil { | |||
| log.Info("query Repository error. return.") | |||
| return resultMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| repositoryList := make([]*Repository, 0) | |||
| sess.Select("*").Table("repository").OrderBy("id desc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| sess.Find(&repositoryList) | |||
| log.Info("query repo size=" + fmt.Sprint(len(repositoryList))) | |||
| for _, repositoryRecord := range repositoryList { | |||
| if repositoryRecord.IsPrivate { | |||
| continue | |||
| } | |||
| if _, ok := resultMap[repositoryRecord.ID]; !ok { | |||
| resultMap[repositoryRecord.ID] = 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap | |||
| } | |||
| func isPublicRepo(repoId int64, publicAllRepo map[int64]int) bool { | |||
| if _, ok := publicAllRepo[repoId]; !ok { | |||
| return false | |||
| } | |||
| return true | |||
| } | |||
| func queryPullRequestPublic(start_unix int64, end_unix int64, publicAllRepo map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| cond := "pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).Count(new(Issue)) | |||
| if err != nil { | |||
| log.Info("query issue error. return.") | |||
| return resultMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| issueList := make([]*Issue, 0) | |||
| sess.Select("issue.*").Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).OrderBy("issue.id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| sess.Find(&issueList) | |||
| log.Info("query issue(PR) size=" + fmt.Sprint(len(issueList))) | |||
| for _, issueRecord := range issueList { | |||
| if isPublicRepo(issueRecord.RepoID, publicAllRepo) { | |||
| if _, ok := resultMap[issueRecord.PosterID]; !ok { | |||
| resultMap[issueRecord.PosterID] = 1 | |||
| } else { | |||
| resultMap[issueRecord.PosterID] += 1 | |||
| } | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap | |||
| } | |||
| func queryCommitActionPublic(start_unix int64, end_unix int64, actionType int64, publicAllRepo map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| cond := "user_id=act_user_id and op_type=" + fmt.Sprint(actionType) + " and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(Action)) | |||
| if err != nil { | |||
| log.Info("query action error. return.") | |||
| return resultMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,user_id,op_type,act_user_id,repo_id").Table("action").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| actionList := make([]*Action, 0) | |||
| sess.Find(&actionList) | |||
| log.Info("query action size=" + fmt.Sprint(len(actionList))) | |||
| for _, actionRecord := range actionList { | |||
| if isPublicRepo(actionRecord.RepoID, publicAllRepo) { | |||
| if _, ok := resultMap[actionRecord.UserID]; !ok { | |||
| resultMap[actionRecord.UserID] = 1 | |||
| } else { | |||
| resultMap[actionRecord.UserID] += 1 | |||
| } | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap | |||
| } | |||
| func queryCreateIssuePublic(start_unix int64, end_unix int64, publicAllRepo map[int64]int) (map[int64]int, map[int64]int) { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| publicRepoIssueIdMap := make(map[int64]int) | |||
| cond := "is_pull=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(Issue)) | |||
| if err != nil { | |||
| log.Info("query Issue error. return.") | |||
| return resultMap, publicRepoIssueIdMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,poster_id,repo_id").Table("issue").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| issueList := make([]*Issue, 0) | |||
| sess.Find(&issueList) | |||
| log.Info("query issue size=" + fmt.Sprint(len(issueList))) | |||
| for _, issueRecord := range issueList { | |||
| if isPublicRepo(issueRecord.RepoID, publicAllRepo) { | |||
| if _, ok := resultMap[issueRecord.PosterID]; !ok { | |||
| resultMap[issueRecord.PosterID] = 1 | |||
| } else { | |||
| resultMap[issueRecord.PosterID] += 1 | |||
| } | |||
| publicRepoIssueIdMap[issueRecord.ID] = 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap, publicRepoIssueIdMap | |||
| } | |||
| func queryCommentPublic(start_unix int64, end_unix int64, publicRepoIssueIdMap map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| cond := "created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| resultMap := make(map[int64]int) | |||
| count, err := sess.Where(cond).Count(new(Comment)) | |||
| if err != nil { | |||
| log.Info("query Comment error. return.") | |||
| return resultMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,type,poster_id").Table("comment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| commentList := make([]*Comment, 0) | |||
| sess.Find(&commentList) | |||
| log.Info("query Comment size=" + fmt.Sprint(len(commentList))) | |||
| for _, commentRecord := range commentList { | |||
| if isPublicRepo(commentRecord.IssueID, publicRepoIssueIdMap) { | |||
| if _, ok := resultMap[commentRecord.PosterID]; !ok { | |||
| resultMap[commentRecord.PosterID] = 1 | |||
| } else { | |||
| resultMap[commentRecord.PosterID] += 1 | |||
| } | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap | |||
| } | |||
| func queryAllPublicDataSet(publicAllRepo map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| publicDataSetIdMap := make(map[int64]int) | |||
| count, err := sess.Count(new(Dataset)) | |||
| if err != nil { | |||
| log.Info("query dataset error. return.") | |||
| return publicDataSetIdMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,user_id,repo_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| datasetList := make([]*Dataset, 0) | |||
| sess.Find(&datasetList) | |||
| log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) | |||
| for _, datasetRecord := range datasetList { | |||
| if isPublicRepo(datasetRecord.RepoID, publicAllRepo) { | |||
| publicDataSetIdMap[datasetRecord.ID] = 1 | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return publicDataSetIdMap | |||
| } | |||
| func queryPublicDatasetFileNums(start_unix int64, end_unix int64, publicDataSetIdMap map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultNumMap := make(map[int64]int) | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(Attachment)) | |||
| if err != nil { | |||
| log.Info("query attachment error. return.") | |||
| return resultNumMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,uploader_id,size,dataset_id").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| attachmentList := make([]*Attachment, 0) | |||
| sess.Find(&attachmentList) | |||
| log.Info("query Attachment size=" + fmt.Sprint(len(attachmentList))) | |||
| for _, attachRecord := range attachmentList { | |||
| if isPublicRepo(attachRecord.DatasetID, publicDataSetIdMap) { | |||
| if _, ok := resultNumMap[attachRecord.UploaderID]; !ok { | |||
| resultNumMap[attachRecord.UploaderID] = 1 | |||
| } else { | |||
| resultNumMap[attachRecord.UploaderID] += 1 | |||
| } | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultNumMap | |||
| } | |||
| func queryUserModelPublic(start_unix int64, end_unix int64, publicAllRepo map[int64]int) map[int64]int { | |||
| sess := x.NewSession() | |||
| defer sess.Close() | |||
| resultMap := make(map[int64]int) | |||
| cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
| count, err := sess.Where(cond).Count(new(AiModelManage)) | |||
| if err != nil { | |||
| log.Info("query AiModelManage error. return.") | |||
| return resultMap | |||
| } | |||
| var indexTotal int64 | |||
| indexTotal = 0 | |||
| for { | |||
| sess.Select("id,user_id,repo_id").Table("ai_model_manage").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
| aiModelList := make([]*AiModelManage, 0) | |||
| sess.Find(&aiModelList) | |||
| log.Info("query AiModelManage size=" + fmt.Sprint(len(aiModelList))) | |||
| for _, aiModelRecord := range aiModelList { | |||
| if isPublicRepo(aiModelRecord.RepoId, publicAllRepo) { | |||
| if _, ok := resultMap[aiModelRecord.UserId]; !ok { | |||
| resultMap[aiModelRecord.UserId] = 1 | |||
| } else { | |||
| resultMap[aiModelRecord.UserId] += 1 | |||
| } | |||
| } | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| if indexTotal >= count { | |||
| break | |||
| } | |||
| } | |||
| return resultMap | |||
| } | |||
| @@ -105,6 +105,8 @@ type UserBusinessAnalysisAll struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysis struct { | |||
| @@ -192,6 +194,8 @@ type UserBusinessAnalysis struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisQueryOptions struct { | |||
| @@ -475,6 +479,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi | |||
| dateRecord.CountDate = CountDate.Unix() | |||
| dateRecord.DataDate = DataDate | |||
| dateRecord.Email = userRecord.Email | |||
| dateRecord.Phone = userRecord.PhoneNumber | |||
| dateRecord.RegistDate = userRecord.CreatedUnix | |||
| dateRecord.Name = userRecord.Name | |||
| dateRecord.UserLocation = userRecord.Location | |||
| @@ -728,6 +733,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
| var dateRecordAll UserBusinessAnalysisAll | |||
| dateRecordAll.ID = userRecord.ID | |||
| dateRecordAll.Email = userRecord.Email | |||
| dateRecordAll.Phone = userRecord.PhoneNumber | |||
| dateRecordAll.RegistDate = userRecord.CreatedUnix | |||
| dateRecordAll.Name = userRecord.Name | |||
| dateRecordAll.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime()) | |||
| @@ -839,7 +845,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static | |||
| insertBatchSql := "INSERT INTO public." + tableName + | |||
| "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + | |||
| "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " + | |||
| "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive,phone) " + | |||
| "VALUES" | |||
| for i, record := range dateRecords { | |||
| @@ -848,7 +854,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static | |||
| ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + | |||
| ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + | |||
| ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," + | |||
| fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")" | |||
| fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ",'" + record.Phone + "')" | |||
| if i < (len(dateRecords) - 1) { | |||
| insertBatchSql += "," | |||
| } | |||
| @@ -973,6 +979,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| dateRecord.CountDate = CountDate.Unix() | |||
| dateRecord.Email = userRecord.Email | |||
| dateRecord.Phone = userRecord.PhoneNumber | |||
| dateRecord.RegistDate = userRecord.CreatedUnix | |||
| dateRecord.Name = userRecord.Name | |||
| dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime()) | |||
| @@ -1028,12 +1035,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) | |||
| if getUserActivate(dateRecord) > 0 { | |||
| log.Info("has activity." + userRecord.Name) | |||
| addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID) | |||
| addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID, currentTimeNow) | |||
| } | |||
| if userRecord.IsActive { | |||
| addUserToMap(userAcitvateJsonMap, userRecord.CreatedUnix, dateRecord.ID) | |||
| addUserToMap(userAcitvateJsonMap, userRecord.CreatedUnix, dateRecord.ID, currentTimeNow) | |||
| } | |||
| addUserToMap(userCurrentDayRegistMap, userRecord.CreatedUnix, dateRecord.ID) | |||
| addUserToMap(userCurrentDayRegistMap, userRecord.CreatedUnix, dateRecord.ID, currentTimeNow) | |||
| } | |||
| indexTotal += PAGE_SIZE | |||
| @@ -1056,7 +1063,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
| useMetrics.NotActivateRegistUser = getMapKeyStringValue("NotActivateRegistUser", userMetrics) | |||
| useMetrics.TotalActivateRegistUser = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) | |||
| useMetrics.TotalHasActivityUser = getMapKeyStringValue("TotalHasActivityUser", userMetrics) | |||
| useMetrics.CurrentDayRegistUser = getMapKeyStringValue("CurrentDayRegistUser", userMetrics) | |||
| count, err = sess.Where("type=0").Count(new(User)) | |||
| if err != nil { | |||
| log.Info("query user error. return.") | |||
| @@ -1124,8 +1131,9 @@ func setUniqueUserId(jsonString string, value map[int64]int64) (string, int) { | |||
| return userIdArray, len(value) | |||
| } | |||
| func addUserToMap(currentUserActivity map[int64]map[int64]int64, registDate timeutil.TimeStamp, userId int64) { | |||
| CountDateTime := time.Date(registDate.Year(), registDate.AsTime().Month(), registDate.AsTime().Day(), 0, 1, 0, 0, registDate.AsTime().Location()) | |||
| func addUserToMap(currentUserActivity map[int64]map[int64]int64, registDate timeutil.TimeStamp, userId int64, currentTimeNow time.Time) { | |||
| registTime := registDate.AsTimeInLocation(currentTimeNow.Location()) | |||
| CountDateTime := time.Date(registTime.Year(), registTime.Month(), registTime.Day(), 0, 1, 0, 0, currentTimeNow.Location()) | |||
| CountDate := CountDateTime.Unix() | |||
| if _, ok := currentUserActivity[CountDate]; !ok { | |||
| userIdMap := make(map[int64]int64, 0) | |||
| @@ -1149,6 +1157,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en | |||
| } else { | |||
| userMetrics["NotActivateRegistUser"] = getMapKeyStringValue("NotActivateRegistUser", userMetrics) + 1 | |||
| } | |||
| userMetrics["CurrentDayRegistUser"] = getMapKeyStringValue("CurrentDayRegistUser", userMetrics) + 1 | |||
| } | |||
| if user.IsActive { | |||
| userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 | |||
| @@ -65,6 +65,8 @@ type UserBusinessAnalysisCurrentYear struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisLast30Day struct { | |||
| @@ -130,6 +132,8 @@ type UserBusinessAnalysisLast30Day struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisLastMonth struct { | |||
| @@ -195,6 +199,8 @@ type UserBusinessAnalysisLastMonth struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisCurrentMonth struct { | |||
| @@ -260,6 +266,8 @@ type UserBusinessAnalysisCurrentMonth struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisCurrentWeek struct { | |||
| @@ -326,6 +334,8 @@ type UserBusinessAnalysisCurrentWeek struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisYesterday struct { | |||
| @@ -392,6 +402,8 @@ type UserBusinessAnalysisYesterday struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserBusinessAnalysisLastWeek struct { | |||
| @@ -458,6 +470,8 @@ type UserBusinessAnalysisLastWeek struct { | |||
| CollectImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| CollectedImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| RecommendImage int `xorm:"NOT NULL DEFAULT 0"` | |||
| Phone string `xorm:"NULL"` | |||
| } | |||
| type UserAnalysisPara struct { | |||
| @@ -17,7 +17,7 @@ import ( | |||
| ) | |||
| const ( | |||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||
| //Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||
| //CommandBenchmark = `echo "start benchmark";python /code/test.py;echo "end benchmark"` | |||
| CommandBenchmark = `echo "start benchmark";cd /benchmark && bash run_bk.sh;echo "end benchmark"` | |||
| CodeMountPath = "/code" | |||
| @@ -71,6 +71,11 @@ type GenerateCloudBrainTaskReq struct { | |||
| ResourceSpecId int | |||
| } | |||
| func GetCloudbrainDebugCommand() string { | |||
| var command = `pip3 install jupyterlab==3 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;/usr/local/bin/python /usr/local/bin/jupyter-lab --ServerApp.shutdown_no_activity_timeout=` + setting.CullIdleTimeout + ` --TerminalManager.cull_inactive_timeout=` + setting.CullIdleTimeout + ` --TerminalManager.cull_interval=` + setting.CullInterval + ` --MappingKernelManager.cull_idle_timeout=` + setting.CullIdleTimeout + ` --MappingKernelManager.cull_interval=` + setting.CullInterval + ` --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --ServerApp.token="" --ServerApp.allow_origin="self https://cloudbrain.pcl.ac.cn" ` | |||
| return command | |||
| } | |||
| func isAdminOrOwnerOrJobCreater(ctx *context.Context, job *models.Cloudbrain, err error) bool { | |||
| if !ctx.IsSigned { | |||
| return false | |||
| @@ -507,7 +512,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e | |||
| GPUNumber: resourceSpec.GpuNum, | |||
| MemoryMB: resourceSpec.MemMiB, | |||
| ShmMB: resourceSpec.ShareMemMiB, | |||
| Command: Command, | |||
| Command: GetCloudbrainDebugCommand(),//Command, | |||
| NeedIBDevice: false, | |||
| IsMainRole: false, | |||
| UseNNI: false, | |||
| @@ -7,6 +7,7 @@ package setting | |||
| import ( | |||
| "encoding/base64" | |||
| "encoding/json" | |||
| "fmt" | |||
| "io" | |||
| "io/ioutil" | |||
| @@ -64,7 +65,16 @@ const ( | |||
| ReCaptcha = "recaptcha" | |||
| ) | |||
| // settings | |||
| type C2NetSequenceInfo struct { | |||
| ID int `json:"id"` | |||
| Name string `json:"name"` | |||
| Content string `json:"content"` | |||
| } | |||
| type C2NetSqInfos struct { | |||
| C2NetSqInfo []*C2NetSequenceInfo `json:"sequence"` | |||
| } | |||
| var ( | |||
| // AppVer settings | |||
| AppVer string | |||
| @@ -467,6 +477,8 @@ var ( | |||
| TrainGpuTypes string | |||
| TrainResourceSpecs string | |||
| MaxDatasetNum int | |||
| CullIdleTimeout string | |||
| CullInterval string | |||
| //benchmark config | |||
| IsBenchmarkEnabled bool | |||
| @@ -532,13 +544,16 @@ var ( | |||
| //grampus config | |||
| Grampus = struct { | |||
| Env string | |||
| Host string | |||
| UserName string | |||
| Password string | |||
| SpecialPools string | |||
| Env string | |||
| Host string | |||
| UserName string | |||
| Password string | |||
| SpecialPools string | |||
| C2NetSequence string | |||
| }{} | |||
| C2NetInfos *C2NetSqInfos | |||
| //elk config | |||
| ElkUrl string | |||
| ElkUser string | |||
| @@ -1315,6 +1330,8 @@ func NewContext() { | |||
| SpecialPools = sec.Key("SPECIAL_POOL").MustString("") | |||
| MaxDatasetNum = sec.Key("MAX_DATASET_NUM").MustInt(5) | |||
| CullIdleTimeout = sec.Key("CULL_IDLE_TIMEOUT").MustString("900") | |||
| CullInterval = sec.Key("CULL_INTERVAL").MustString("60") | |||
| sec = Cfg.Section("benchmark") | |||
| IsBenchmarkEnabled = sec.Key("ENABLED").MustBool(false) | |||
| @@ -1419,7 +1436,12 @@ func GetGrampusConfig() { | |||
| Grampus.UserName = sec.Key("USERNAME").MustString("") | |||
| Grampus.Password = sec.Key("PASSWORD").MustString("") | |||
| Grampus.SpecialPools = sec.Key("SPECIAL_POOL").MustString("") | |||
| Grampus.C2NetSequence = sec.Key("C2NET_SEQUENCE").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\"}]}") | |||
| if Grampus.C2NetSequence != "" { | |||
| if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil { | |||
| log.Error("Unmarshal(C2NetSequence) failed:%v", err) | |||
| } | |||
| } | |||
| } | |||
| func SetRadarMapConfig() { | |||
| @@ -559,6 +559,7 @@ static.CollectImage=Collect Image Count | |||
| static.CollectedImage=Collected Image Count | |||
| static.RecommendImage=Recommended Image Count | |||
| static.email=Email | |||
| static.phone=Phone | |||
| static.location=Location | |||
| static.all=All | |||
| static.public.user_business_analysis_current_month=Current_Month | |||
| @@ -564,6 +564,7 @@ static.CollectImage=收藏镜像数 | |||
| static.CollectedImage=被收藏镜像数 | |||
| static.RecommendImage=被推荐镜像数 | |||
| static.email=Email | |||
| static.phone=电话 | |||
| static.location=所在地区 | |||
| static.all=所有 | |||
| static.public.user_business_analysis_current_month=本月 | |||
| @@ -119,7 +119,6 @@ document.onreadystatechange = function () { | |||
| continue; | |||
| } | |||
| } | |||
| refresh3DInfo(record); | |||
| var recordPrefix = getMsg(record); | |||
| if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | |||
| html += recordPrefix + actionName; | |||
| @@ -208,29 +207,6 @@ function getTaskLink(record){ | |||
| return re; | |||
| } | |||
| function refresh3DInfo(record){ | |||
| if(record.OpType == "25" || record.OpType == "29" || record.OpType == "31"){ | |||
| //cloudbrain one | |||
| var lines = $('.rotation3D__line'); | |||
| var span = $('.rotation3D__line').find("span")[0]; | |||
| //console.log(span); | |||
| span.innerText =record.RefName; | |||
| //$('.rotation3D__line').find("span").eq(0).text(record.RefName) | |||
| //console.log("cloudbrain one line length=" + lines.length); | |||
| //lines[0].find("span").text(record.RefName); | |||
| }else if(record.OpType == "26" || record.OpType == "27" || record.OpType == "28"){ | |||
| //cloudbrain two | |||
| var lines = $('.rotation3D__line'); | |||
| //console.log("cloudbrain two line length=" + lines.length); | |||
| var span = $('.rotation3D__line').find("span")[1]; | |||
| //console.log(span); | |||
| if(span != null){ | |||
| span.innerText =record.RefName; | |||
| } | |||
| } | |||
| } | |||
| function getMsg(record){ | |||
| var html =""; | |||
| html += "<div class=\"swiper-slide item\">"; | |||
| @@ -570,6 +570,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Get("/query_user_last_month", operationReq, repo_ext.QueryUserStaticLastMonth) | |||
| m.Get("/query_user_yesterday", operationReq, repo_ext.QueryUserStaticYesterday) | |||
| m.Get("/query_user_all", operationReq, repo_ext.QueryUserStaticAll) | |||
| m.Get("/query_user_activity", operationReq, repo_ext.QueryUserActivity) | |||
| //cloudbrain board | |||
| m.Group("/cloudbrainboard", func() { | |||
| m.Get("/downloadAll", repo.DownloadCloudBrainBoard) | |||
| @@ -1056,6 +1057,8 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| m.Post("/prd/event", authentication.AcceptWechatEvent) | |||
| }) | |||
| m.Get("/wechat/material", authentication.GetMaterial) | |||
| m.Get("/cloudbrain/get_newest_job", repo.GetNewestJobs) | |||
| m.Get("/cloudbrain/get_center_info", repo.GetAICenterInfo) | |||
| }, securityHeaders(), context.APIContexter(), sudo()) | |||
| } | |||
| @@ -6,6 +6,7 @@ | |||
| package repo | |||
| import ( | |||
| "code.gitea.io/gitea/modules/setting" | |||
| "encoding/json" | |||
| "net/http" | |||
| "sort" | |||
| @@ -207,3 +208,92 @@ func CloudBrainModelList(ctx *context.APIContext) { | |||
| "PageIsCloudBrain": true, | |||
| }) | |||
| } | |||
| type JobInfo struct { | |||
| JobName string `json:"job_name"` | |||
| AiCenterId int `json:"ai_center_id"` | |||
| } | |||
| func GetNewestJobs(ctx *context.APIContext) { | |||
| idsC2Net, err := models.GetNewestJobsByAiCenter() | |||
| if err != nil { | |||
| log.Error("GetNewestJobsByAiCenter(%s) failed:%v", err.Error()) | |||
| return | |||
| } | |||
| idsCloudbrain, err := models.GetNewestJobsByType() | |||
| if err != nil { | |||
| log.Error("GetNewestJobsByType(%s) failed:%v", err.Error()) | |||
| return | |||
| } | |||
| ids := make([]int64, len(idsC2Net), cap(idsC2Net)*2) | |||
| copy(ids, idsC2Net) | |||
| copy(ids, idsCloudbrain) | |||
| jobs, err := models.GetCloudbrainByIDs(ids) | |||
| if err != nil { | |||
| log.Error("GetCloudbrainByIDs(%s) failed:%v", err.Error()) | |||
| return | |||
| } | |||
| jobInfos := make([]JobInfo, 0) | |||
| for _, job := range jobs { | |||
| var id int | |||
| var content string | |||
| switch job.Type { | |||
| case models.TypeCloudBrainOne: | |||
| id, content = getAICenterID("cloudbrain_one") | |||
| if content == "" { | |||
| log.Error("job(%s) has no match config info", job.DisplayJobName) | |||
| continue | |||
| } | |||
| case models.TypeCloudBrainTwo: | |||
| id, content = getAICenterID("cloudbrain_two") | |||
| if content == "" { | |||
| log.Error("job(%s) has no match config info", job.DisplayJobName) | |||
| continue | |||
| } | |||
| case models.TypeC2Net: | |||
| centerInfo := strings.Split(job.AiCenter, "+") | |||
| if len(centerInfo) != 2 { | |||
| log.Error("job(%s):ai_center(%s) is wrong", job.DisplayJobName, job.AiCenter) | |||
| continue | |||
| } | |||
| id, content = getAICenterID(centerInfo[0]) | |||
| if content == "" { | |||
| log.Error("job(%s) has no match config info", job.DisplayJobName) | |||
| continue | |||
| } | |||
| default: | |||
| log.Error("no match info") | |||
| continue | |||
| } | |||
| jobInfos = append(jobInfos, JobInfo{ | |||
| JobName: job.DisplayJobName, | |||
| AiCenterId: id, | |||
| }) | |||
| } | |||
| ctx.JSON(http.StatusOK, jobInfos) | |||
| } | |||
| func GetAICenterInfo(ctx *context.APIContext) { | |||
| if setting.C2NetInfos == nil { | |||
| log.Error("C2NET_SEQUENCE is incorrect") | |||
| return | |||
| } | |||
| ctx.JSON(http.StatusOK, setting.C2NetInfos.C2NetSqInfo) | |||
| } | |||
| func getAICenterID(name string) (int, string) { | |||
| for _, info := range setting.C2NetInfos.C2NetSqInfo { | |||
| if name == info.Name { | |||
| return info.ID, info.Content | |||
| } | |||
| } | |||
| return 0, "" | |||
| } | |||
| @@ -136,7 +136,7 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error { | |||
| } | |||
| ctx.Data["attachments"] = attachs | |||
| ctx.Data["command"] = cloudbrain.Command | |||
| ctx.Data["command"] = cloudbrain.GetCloudbrainDebugCommand() | |||
| ctx.Data["code_path"] = cloudbrain.CodeMountPath | |||
| ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath | |||
| ctx.Data["model_path"] = cloudbrain.ModelMountPath | |||
| @@ -315,7 +315,7 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
| return | |||
| } | |||
| command := cloudbrain.Command | |||
| command := cloudbrain.GetCloudbrainDebugCommand() | |||
| if jobType == string(models.JobTypeTrain) { | |||
| tpl = tplCloudBrainTrainJobNew | |||
| commandTrain, err := getTrainJobCommand(form) | |||
| @@ -2186,7 +2186,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) | |||
| repo := ctx.Repo.Repository | |||
| tpl := tplCloudBrainBenchmarkNew | |||
| command := cloudbrain.Command | |||
| command := cloudbrain.GetCloudbrainDebugCommand() | |||
| tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName) | |||
| if err == nil { | |||
| @@ -112,6 +112,7 @@ func getExcelHeader(ctx *context.Context) map[string]string { | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.email")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.phone")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.location")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) | |||
| @@ -193,6 +194,9 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Phone) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation) | |||
| tmp = tmp + 1 | |||
| @@ -268,6 +272,9 @@ func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord * | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Phone) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation) | |||
| tmp = tmp + 1 | |||
| @@ -601,7 +608,7 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||
| filename := sheetName + "_" + startDate + "_" + endDate + ".xlsx" | |||
| os.Remove(setting.AppDataPath + Excel_File_Path + filename) | |||
| go writeFileToDisk(ctx, count, re, filename) | |||
| ctx.JSON(http.StatusOK, ctx.Tr("user.static.downloadinfo")+setting.AppURL+"api/v1/download_user_define_file?filename="+filename) | |||
| ctx.JSON(http.StatusOK, ctx.Tr("user.static.downloadinfo")+"/api/v1/download_user_define_file?filename="+filename) | |||
| } else { | |||
| mapInterface := make(map[string]interface{}) | |||
| re, count := models.QueryUserStaticDataPage(pageOpts) | |||
| @@ -721,3 +728,114 @@ func TimingCountData() { | |||
| startTime := currentTimeNow.AddDate(0, 0, -1).Format("2006-01-02") | |||
| TimingCountDataByDateAndReCount(startTime, false) | |||
| } | |||
| func QueryUserActivity(ctx *context.Context) { | |||
| startDate := ctx.Query("beginTime") | |||
| endDate := ctx.Query("endTime") | |||
| t, _ := time.Parse("2006-01-02", startDate) | |||
| startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) | |||
| startTime = startTime.UTC() | |||
| t, _ = time.Parse("2006-01-02", endDate) | |||
| endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location()) | |||
| endTime = endTime.UTC() | |||
| sheetName := ctx.Tr("user.static.sheetname") | |||
| filename := sheetName + "_" + startDate + "_" + endDate + ".xlsx" | |||
| filePath := setting.AppDataPath + Excel_File_Path + filename | |||
| os.Remove(setting.AppDataPath + Excel_File_Path + filename) | |||
| go writeUserActivityToExcel(startTime, endTime, filePath, ctx) | |||
| ctx.JSON(http.StatusOK, ctx.Tr("user.static.downloadinfo")+"/api/v1/download_user_define_file?filename="+filename) | |||
| } | |||
| func writeUserActivityToExcel(startTime time.Time, endTime time.Time, filePath string, ctx *context.Context) { | |||
| re := models.QueryDataForActivity(startTime, endTime) | |||
| log.Info("return count=" + fmt.Sprint(len(re))) | |||
| //writer exec file. | |||
| xlsx := excelize.NewFile() | |||
| sheetName := ctx.Tr("user.static.sheetname") | |||
| index := xlsx.NewSheet(sheetName) | |||
| xlsx.DeleteSheet("Sheet1") | |||
| excelHeader := make([]string, 0) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.id")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.name")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.email")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.phone")) | |||
| excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) | |||
| excelHeaderMap := make(map[string]string, 0) | |||
| var j byte | |||
| j = 0 | |||
| for _, value := range excelHeader { | |||
| excelColumn := getColumn(j) + fmt.Sprint(1) | |||
| log.Info("excelColumn=" + excelColumn) | |||
| excelHeaderMap[excelColumn] = value | |||
| j++ | |||
| } | |||
| for k, v := range excelHeaderMap { | |||
| //设置单元格的值 | |||
| xlsx.SetCellValue(sheetName, k, v) | |||
| } | |||
| for i, userRecord := range re { | |||
| row := i + 2 | |||
| rows := fmt.Sprint(row) | |||
| var tmp byte | |||
| tmp = 0 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email) | |||
| tmp = tmp + 1 | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Phone) | |||
| tmp = tmp + 1 | |||
| formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | |||
| xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) | |||
| tmp = tmp + 1 | |||
| } | |||
| //设置默认打开的表单 | |||
| xlsx.SetActiveSheet(index) | |||
| os.Mkdir(setting.AppDataPath+Excel_File_Path, 0755) | |||
| if err := xlsx.SaveAs(filePath); err != nil { | |||
| log.Info("writer exel error." + err.Error()) | |||
| } else { | |||
| log.Info("write to file succeed, filepath=" + filePath) | |||
| } | |||
| } | |||
| @@ -51,23 +51,63 @@ | |||
| <script src="/rotation3D/vue-2.6.10.min.js"></script> | |||
| <script src="/rotation3D/rotation3D.js?v={{MD5 AppVer}}"></script> | |||
| <script> | |||
| var jobTask={}; | |||
| function queryAiCenterInfo(){ | |||
| $.ajax({ | |||
| type:"GET", | |||
| url:"/api/v1/cloudbrain/get_newest_job", | |||
| headers: { | |||
| authorization:token, | |||
| }, | |||
| dataType:"json", | |||
| async:false, | |||
| success:function(json){ | |||
| for(var i=0;i < json.length;i++){ | |||
| jobTask[json[i].ai_center_id] =json[i].job_name; | |||
| } | |||
| }, | |||
| error:function(response) { | |||
| console.log("query task info error."); | |||
| } | |||
| }); | |||
| $.ajax({ | |||
| type:"GET", | |||
| url:"/api/v1/cloudbrain/get_center_info", | |||
| headers: { | |||
| authorization:token, | |||
| }, | |||
| dataType:"json", | |||
| async:false, | |||
| success:function(json){ | |||
| displayAiCenterInfo(json); | |||
| }, | |||
| error:function(response) { | |||
| } | |||
| }); | |||
| } | |||
| function displayAiCenterInfo(json){ | |||
| for(var i=0;i<json.length;i++){ | |||
| var tmp ={}; | |||
| tmp["name"]=json[i].name; | |||
| if(jobTask[json[i].id] != null){ | |||
| tmp["type"]="blue"; | |||
| }else{ | |||
| tmp["type"]="green"; | |||
| } | |||
| tmp["icon"]=""; | |||
| tmp["content"]=json[i].content; | |||
| serverItemList.push(tmp); | |||
| } | |||
| } | |||
| var serverItemList=[]; | |||
| queryAiCenterInfo(); | |||
| var app = new Vue({ | |||
| el: "#app", | |||
| //数据 blue, green, yellow | |||
| data: { | |||
| itemList: [ | |||
| { name:'鹏城云脑一号', type:'blue', icon:'', }, | |||
| { name:'鹏城云脑二号', type:'blue', icon:'', }, | |||
| { name:'北大人工智能集群系统', type:'green', icon:'', }, | |||
| { name:'合肥类脑智能开放平台', type:'green', icon:'', }, | |||
| { name:'武汉人工智能计算中心', type:'green', icon:'', }, | |||
| { name:'西安未来人工智能计算中心', type:'green', icon:'', }, | |||
| { name:'……', type:'yellow', icon:'', }, | |||
| { name:'中原人工智能计算中心', type:'green', icon:'', }, | |||
| { name:'成都人工智能计算中心', type:'green', icon:'', }, | |||
| { name:'横琴先进智能计算中心', type:'green', icon:'', }, | |||
| { name:'国家超级计算济南中心', type:'green', icon:'', }, | |||
| ], | |||
| itemList:serverItemList, | |||
| }, | |||
| mounted: function () { | |||
| new Rotation3D({ | |||
| @@ -84,6 +124,19 @@ | |||
| }, | |||
| methods: {}, | |||
| }); | |||
| $(document).ready(function(){ | |||
| var pArrays=$('.itemList').find("p"); | |||
| for(var i=0;i<pArrays.length;i++){ | |||
| var p = pArrays[i]; | |||
| p.innerText=serverItemList[i].content; | |||
| } | |||
| var lines=$('.lineList').find("span"); | |||
| for(var i=0; i< lines.length;i++){ | |||
| if(jobTask[i+1] != null){ | |||
| lines[i].innerText = jobTask[i+1]; | |||
| } | |||
| } | |||
| }); | |||
| </script> | |||
| {{end}} | |||
| @@ -94,12 +94,6 @@ | |||
| </div> | |||
| <div id="app" v-cloak> | |||
| <!--数据 | |||
| <div class="aiData"> | |||
| <p>完成AI任务<br><strong id="completed_task">1716</strong></p> | |||
| <p>运行AI任务<br><strong id="running_task">120</strong></p> | |||
| <p>等待AI任务<br><strong id="wait_task">80</strong></p> | |||
| </div>--> | |||
| <!--底座--> | |||
| <div class="rotation3D-baseMap"></div> | |||
| <!--旋转3D--> | |||
| @@ -129,13 +123,13 @@ | |||
| <svg width="10" height="400"> | |||
| <path id="path2" d="M0 400, 0 0" stroke-dasharray="5,10"/> | |||
| </svg> | |||
| <div class="dot dot2"><i class="el-icon-close"></i></div> | |||
| <div class="dot dot2"><i class="el-icon-close"></i><span></span></div> | |||
| </div> | |||
| <div v-if="item.type=='green'" class="pos"> | |||
| <svg width="50" height="400"> | |||
| <path id="path1" d="M0 400, 0 0" stroke-dasharray="5,10"/> | |||
| </svg> | |||
| <div class="dot dot1 ri-arrow-left-s-line"></div> | |||
| <div class="dot dot1 ri-arrow-left-s-line"><span></span></div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| @@ -226,8 +226,11 @@ | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column prop="BindPhone" label="是否手机验证" width="120px" align="center"> | |||
| <template slot-scope="scope"> {{scope.row.BindPhone ? '是' : '否'}} </template> | |||
| <el-table-column | |||
| prop="Phone" | |||
| label="手机" | |||
| width="120px" | |||
| align="center"> | |||
| </el-table-column> | |||
| <el-table-column | |||
| prop="RegistDate" | |||