| @@ -55,21 +55,21 @@ const ( | |||
| // repository. It implemented interface base.Actioner so that can be | |||
| // used in template render. | |||
| type Action struct { | |||
| ID int64 `xorm:"pk autoincr"` | |||
| UserID int64 `xorm:"INDEX"` // Receiver user id. | |||
| OpType ActionType | |||
| ActUserID int64 `xorm:"INDEX"` // Action user id. | |||
| ActUser *User `xorm:"-"` | |||
| RepoID int64 `xorm:"INDEX"` | |||
| Repo *Repository `xorm:"-"` | |||
| CommentID int64 `xorm:"INDEX"` | |||
| Comment *Comment `xorm:"-"` | |||
| IsDeleted bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| RefName string | |||
| IsPrivate bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| Content string `xorm:"TEXT"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
| ID int64 `xorm:"pk autoincr"` | |||
| UserID int64 `xorm:"INDEX"` // Receiver user id. | |||
| OpType ActionType | |||
| ActUserID int64 `xorm:"INDEX"` // Action user id. | |||
| ActUser *User `xorm:"-"` | |||
| RepoID int64 `xorm:"INDEX"` | |||
| Repo *Repository `xorm:"-"` | |||
| CommentID int64 `xorm:"INDEX"` | |||
| Comment *Comment `xorm:"-"` | |||
| IsDeleted bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| RefName string | |||
| IsPrivate bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| IsTransformed bool `xorm:"INDEX NOT NULL DEFAULT false"` | |||
| Content string `xorm:"TEXT"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
| } | |||
| // GetOpType gets the ActionType of this action. | |||
| @@ -46,7 +46,7 @@ type Attachment struct { | |||
| type AttachmentUsername struct { | |||
| Attachment `xorm:"extends"` | |||
| Name string | |||
| Name string | |||
| } | |||
| func (a *Attachment) AfterUpdate() { | |||
| @@ -359,7 +359,7 @@ func GetAllPublicAttachments() ([]*AttachmentUsername, error) { | |||
| func getAllPublicAttachments(e Engine) ([]*AttachmentUsername, error) { | |||
| attachments := make([]*AttachmentUsername, 0, 10) | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id " + | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+ | |||
| "= `user`.id").Where("decompress_state= ? and is_private= ?", DecompressStateDone, false).Find(&attachments); err != nil { | |||
| return nil, err | |||
| } | |||
| @@ -377,7 +377,7 @@ func GetPrivateAttachments(username string) ([]*AttachmentUsername, error) { | |||
| func getPrivateAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) { | |||
| attachments := make([]*AttachmentUsername, 0, 10) | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id " + | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+ | |||
| "= `user`.id").Where("decompress_state= ? and uploader_id= ?", DecompressStateDone, userID).Find(&attachments); err != nil { | |||
| return nil, err | |||
| } | |||
| @@ -401,11 +401,11 @@ func GetAllUserAttachments(userID int64) ([]*AttachmentUsername, error) { | |||
| return append(attachsPub, attachsPri...), nil | |||
| } | |||
| */ | |||
| */ | |||
| func getAllUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) { | |||
| attachments := make([]*AttachmentUsername, 0, 10) | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id " + | |||
| if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+ | |||
| "= `user`.id").Where("decompress_state= ? and (uploader_id= ? or is_private = ?)", DecompressStateDone, userID, false).Find(&attachments); err != nil { | |||
| return nil, err | |||
| } | |||
| @@ -7,24 +7,26 @@ import ( | |||
| ) | |||
| type BlockChainCommitStatus int | |||
| const ( | |||
| BlockChainCommitInit BlockChainCommitStatus = iota | |||
| BlockChainCommitSuccess | |||
| BlockChainCommitFailed | |||
| ) | |||
| type BlockChain struct { | |||
| ID int64 `xorm:"pk autoincr"` | |||
| CommitID string `xorm:"INDEX NOT NULL"` | |||
| Contributor string `xorm:"INDEX NOT NULL"` | |||
| ContractAddress string `xorm:"INDEX NOT NULL"` | |||
| Status BlockChainCommitStatus `xorm:"INDEX NOT NULL DEFAULT 0"` | |||
| Amount int64 `xorm:"INDEX"` | |||
| UserID int64 `xorm:"INDEX"` | |||
| RepoID int64 `xorm:"INDEX"` | |||
| TransactionHash string `xorm:"INDEX"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||
| DeletedAt time.Time `xorm:"deleted"` | |||
| ID int64 `xorm:"pk autoincr"` | |||
| CommitID string `xorm:"INDEX NOT NULL"` | |||
| Contributor string `xorm:"INDEX NOT NULL"` | |||
| ContractAddress string `xorm:"INDEX NOT NULL"` | |||
| Status BlockChainCommitStatus `xorm:"INDEX NOT NULL DEFAULT 0"` | |||
| Amount int64 `xorm:"INDEX"` | |||
| UserID int64 `xorm:"INDEX"` | |||
| RepoID int64 `xorm:"INDEX"` | |||
| TransactionHash string `xorm:"INDEX"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||
| UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||
| DeletedAt time.Time `xorm:"deleted"` | |||
| User *User `xorm:"-"` | |||
| Repo *Repository `xorm:"-"` | |||
| @@ -22,25 +22,25 @@ const ( | |||
| JobFailed CloudbrainStatus = "FAILED" | |||
| JobRunning CloudbrainStatus = "RUNNING" | |||
| JobTypeDebug JobType = "DEBUG" | |||
| JobTypeBenchmark JobType = "BENCHMARK" | |||
| JobTypeDebug JobType = "DEBUG" | |||
| JobTypeBenchmark JobType = "BENCHMARK" | |||
| ) | |||
| type Cloudbrain struct { | |||
| ID int64 `xorm:"pk autoincr"` | |||
| JobID string `xorm:"INDEX NOT NULL"` | |||
| JobType string `xorm:"INDEX NOT NULL DEFAULT 'DEBUG'"` | |||
| JobName string `xorm:"INDEX"` | |||
| Status string `xorm:"INDEX"` | |||
| UserID int64 `xorm:"INDEX"` | |||
| RepoID int64 `xorm:"INDEX"` | |||
| SubTaskName string `xorm:"INDEX"` | |||
| JobType string `xorm:"INDEX NOT NULL DEFAULT 'DEBUG'"` | |||
| JobName string `xorm:"INDEX"` | |||
| Status string `xorm:"INDEX"` | |||
| UserID int64 `xorm:"INDEX"` | |||
| RepoID int64 `xorm:"INDEX"` | |||
| SubTaskName string `xorm:"INDEX"` | |||
| ContainerID string | |||
| ContainerIp string | |||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
| UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | |||
| DeletedAt time.Time `xorm:"deleted"` | |||
| CanDebug bool `xorm:"-"` | |||
| DeletedAt time.Time `xorm:"deleted"` | |||
| CanDebug bool `xorm:"-"` | |||
| User *User `xorm:"-"` | |||
| Repo *Repository `xorm:"-"` | |||
| @@ -64,17 +64,17 @@ type TaskRole struct { | |||
| Command string `json:"command"` | |||
| NeedIBDevice bool `json:"needIBDevice"` | |||
| IsMainRole bool `json:"isMainRole"` | |||
| UseNNI bool `json:"useNNI"` | |||
| UseNNI bool `json:"useNNI"` | |||
| } | |||
| type StHostPath struct { | |||
| Path string `json:"path"` | |||
| MountPath string `json:"mountPath"` | |||
| ReadOnly bool `json:"readOnly"` | |||
| Path string `json:"path"` | |||
| MountPath string `json:"mountPath"` | |||
| ReadOnly bool `json:"readOnly"` | |||
| } | |||
| type Volume struct { | |||
| HostPath StHostPath `json:"hostPath"` | |||
| HostPath StHostPath `json:"hostPath"` | |||
| } | |||
| type CreateJobParams struct { | |||
| @@ -87,9 +87,9 @@ type CreateJobParams struct { | |||
| } | |||
| type CreateJobResult struct { | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Payload map[string]interface{} `json:"payload"` | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Payload map[string]interface{} `json:"payload"` | |||
| } | |||
| type GetJobResult struct { | |||
| @@ -99,8 +99,8 @@ type GetJobResult struct { | |||
| } | |||
| type GetImagesResult struct { | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Payload map[string]*ImageInfo `json:"payload"` | |||
| } | |||
| @@ -131,24 +131,23 @@ type TaskPod struct { | |||
| ExitCode int `json:"exitCode"` | |||
| ExitDiagnostics string `json:"exitDiagnostics"` | |||
| RetriedCount int `json:"retriedCount"` | |||
| StartTime string | |||
| FinishedTime string | |||
| StartTime string | |||
| FinishedTime string | |||
| } `json:"taskStatuses"` | |||
| } | |||
| type TaskInfo struct { | |||
| Username string `json:"username"` | |||
| TaskName string `json:"task_name"` | |||
| CodeName string `json:"code_name"` | |||
| Username string `json:"username"` | |||
| TaskName string `json:"task_name"` | |||
| CodeName string `json:"code_name"` | |||
| } | |||
| func ConvertToTaskPod(input map[string]interface{}) (TaskPod, error) { | |||
| data, _ := json.Marshal(input) | |||
| var taskPod TaskPod | |||
| err := json.Unmarshal(data, &taskPod) | |||
| taskPod.TaskStatuses[0].StartTime = time.Unix(taskPod.TaskStatuses[0].StartAt.Unix() + 8*3600, 0).UTC().Format("2006-01-02 15:04:05") | |||
| taskPod.TaskStatuses[0].FinishedTime = time.Unix(taskPod.TaskStatuses[0].FinishedAt.Unix() + 8*3600, 0).UTC().Format("2006-01-02 15:04:05") | |||
| taskPod.TaskStatuses[0].StartTime = time.Unix(taskPod.TaskStatuses[0].StartAt.Unix()+8*3600, 0).UTC().Format("2006-01-02 15:04:05") | |||
| taskPod.TaskStatuses[0].FinishedTime = time.Unix(taskPod.TaskStatuses[0].FinishedAt.Unix()+8*3600, 0).UTC().Format("2006-01-02 15:04:05") | |||
| return taskPod, err | |||
| } | |||
| @@ -173,8 +172,8 @@ type JobResultPayload struct { | |||
| AppExitDiagnostics string `json:"appExitDiagnostics"` | |||
| AppExitType interface{} `json:"appExitType"` | |||
| VirtualCluster string `json:"virtualCluster"` | |||
| StartTime string | |||
| EndTime string | |||
| StartTime string | |||
| EndTime string | |||
| } `json:"jobStatus"` | |||
| TaskRoles map[string]interface{} `json:"taskRoles"` | |||
| Resource struct { | |||
| @@ -220,42 +219,42 @@ func ConvertToJobResultPayload(input map[string]interface{}) (JobResultPayload, | |||
| type ImagesResultPayload struct { | |||
| Images []struct { | |||
| ID int `json:"id"` | |||
| Name string `json:"name"` | |||
| Place string `json:"place"` | |||
| Description string `json:"description"` | |||
| Provider string `json:"provider"` | |||
| Createtime string `json:"createtime"` | |||
| Remark string `json:"remark"` | |||
| ID int `json:"id"` | |||
| Name string `json:"name"` | |||
| Place string `json:"place"` | |||
| Description string `json:"description"` | |||
| Provider string `json:"provider"` | |||
| Createtime string `json:"createtime"` | |||
| Remark string `json:"remark"` | |||
| } `json:"taskStatuses"` | |||
| } | |||
| type ImageInfo struct { | |||
| ID int `json:"id"` | |||
| Name string `json:"name"` | |||
| Place string `json:"place"` | |||
| Description string `json:"description"` | |||
| Provider string `json:"provider"` | |||
| Createtime string `json:"createtime"` | |||
| Remark string `json:"remark"` | |||
| PlaceView string | |||
| ID int `json:"id"` | |||
| Name string `json:"name"` | |||
| Place string `json:"place"` | |||
| Description string `json:"description"` | |||
| Provider string `json:"provider"` | |||
| Createtime string `json:"createtime"` | |||
| Remark string `json:"remark"` | |||
| PlaceView string | |||
| } | |||
| type CommitImageParams struct { | |||
| Ip string `json:"ip"` | |||
| TaskContainerId string `json:"taskContainerId"` | |||
| ImageTag string `json:"imageTag"` | |||
| ImageDescription string `json:"imageDescription"` | |||
| Ip string `json:"ip"` | |||
| TaskContainerId string `json:"taskContainerId"` | |||
| ImageTag string `json:"imageTag"` | |||
| ImageDescription string `json:"imageDescription"` | |||
| } | |||
| type CommitImageResult struct { | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Payload map[string]interface{} `json:"payload"` | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Payload map[string]interface{} `json:"payload"` | |||
| } | |||
| type StopJobResult struct { | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| Code string `json:"code"` | |||
| Msg string `json:"msg"` | |||
| } | |||
| func Cloudbrains(opts *CloudbrainsOptions) ([]*Cloudbrain, int64, error) { | |||
| @@ -32,25 +32,25 @@ type LoginType int | |||
| // Note: new type must append to the end of list to maintain compatibility. | |||
| const ( | |||
| LoginNoType LoginType = iota | |||
| LoginPlain // 1 | |||
| LoginLDAP // 2 | |||
| LoginSMTP // 3 | |||
| LoginPAM // 4 | |||
| LoginDLDAP // 5 | |||
| LoginOAuth2 // 6 | |||
| LoginSSPI // 7 | |||
| LoginCloudBrain // 8 | |||
| LoginNoType LoginType = iota | |||
| LoginPlain // 1 | |||
| LoginLDAP // 2 | |||
| LoginSMTP // 3 | |||
| LoginPAM // 4 | |||
| LoginDLDAP // 5 | |||
| LoginOAuth2 // 6 | |||
| LoginSSPI // 7 | |||
| LoginCloudBrain // 8 | |||
| ) | |||
| // LoginNames contains the name of LoginType values. | |||
| var LoginNames = map[LoginType]string{ | |||
| LoginLDAP: "LDAP (via BindDN)", | |||
| LoginDLDAP: "LDAP (simple auth)", // Via direct bind | |||
| LoginSMTP: "SMTP", | |||
| LoginPAM: "PAM", | |||
| LoginOAuth2: "OAuth2", | |||
| LoginSSPI: "SPNEGO with SSPI", | |||
| LoginLDAP: "LDAP (via BindDN)", | |||
| LoginDLDAP: "LDAP (simple auth)", // Via direct bind | |||
| LoginSMTP: "SMTP", | |||
| LoginPAM: "PAM", | |||
| LoginOAuth2: "OAuth2", | |||
| LoginSSPI: "SPNEGO with SSPI", | |||
| LoginCloudBrain: "Cloud Brain", | |||
| } | |||
| @@ -849,14 +849,14 @@ func LoginViaCloudBrain(user *User, login, password string, source *LoginSource) | |||
| } | |||
| user = &User{ | |||
| LowerName: strings.ToLower(login), | |||
| Name: login, | |||
| Email: cloudBrainUser.Email, | |||
| LoginType: source.Type, | |||
| LoginSource: source.ID, | |||
| LoginName: login, | |||
| IsActive: true, | |||
| Token: token, | |||
| LowerName: strings.ToLower(login), | |||
| Name: login, | |||
| Email: cloudBrainUser.Email, | |||
| LoginType: source.Type, | |||
| LoginSource: source.ID, | |||
| LoginName: login, | |||
| IsActive: true, | |||
| Token: token, | |||
| } | |||
| err = CreateUser(user) | |||
| @@ -144,7 +144,7 @@ const ( | |||
| ) | |||
| const ( | |||
| RepoBlockChainInit RepoBlockChainStatus = iota | |||
| RepoBlockChainInit RepoBlockChainStatus = iota | |||
| RepoBlockChainSuccess | |||
| RepoBlockChainFailed | |||
| ) | |||
| @@ -204,9 +204,9 @@ type Repository struct { | |||
| Avatar string `xorm:"VARCHAR(64)"` | |||
| //blockchain | |||
| ContractAddress string `xorm:"INDEX"` | |||
| Balance int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"` | |||
| ContractAddress string `xorm:"INDEX"` | |||
| Balance int64 `xorm:"NOT NULL DEFAULT 0"` | |||
| BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"` | |||
| CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
| UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | |||
| @@ -1408,7 +1408,7 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e | |||
| repo.Description = repo.Description[:255] | |||
| } | |||
| */ | |||
| */ | |||
| if len(repo.Website) > 255 { | |||
| repo.Website = repo.Website[:255] | |||
| } | |||
| @@ -171,11 +171,11 @@ type User struct { | |||
| Theme string `xorm:"NOT NULL DEFAULT ''"` | |||
| //CloudBrain | |||
| Token string `xorm:"VARCHAR(1024)"` | |||
| Token string `xorm:"VARCHAR(1024)"` | |||
| //BlockChain | |||
| PublicKey string `xorm` | |||
| PrivateKey string `xorm` | |||
| PublicKey string `xorm` | |||
| PrivateKey string `xorm` | |||
| } | |||
| // SearchOrganizationsOptions options to filter organizations | |||
| @@ -7,16 +7,16 @@ import ( | |||
| // CreateDatasetForm form for dataset page | |||
| type CreateCloudBrainForm struct { | |||
| JobName string `form:"job_name" binding:"Required"` | |||
| Image string `form:"image" binding:"Required"` | |||
| Command string `form:"command" binding:"Required"` | |||
| JobName string `form:"job_name" binding:"Required"` | |||
| Image string `form:"image" binding:"Required"` | |||
| Command string `form:"command" binding:"Required"` | |||
| Attachment string `form:"attachment" binding:"Required"` | |||
| JobType string `form:"job_type" binding:"Required"` | |||
| JobType string `form:"job_type" binding:"Required"` | |||
| } | |||
| type CommitImageCloudBrainForm struct { | |||
| Description string `form:"description" binding:"Required"` | |||
| Tag string `form:"tag" binding:"Required"` | |||
| Tag string `form:"tag" binding:"Required"` | |||
| } | |||
| func (f *CreateCloudBrainForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { | |||
| @@ -12,7 +12,7 @@ import ( | |||
| ) | |||
| const ( | |||
| UrlToken = "/rest-server/api/v1/token/" | |||
| UrlToken = "/rest-server/api/v1/token/" | |||
| UrlGetUserInfo = "/rest-server/api/v1/user/" | |||
| TokenTypeBear = "Bearer " | |||
| @@ -21,29 +21,29 @@ const ( | |||
| ) | |||
| type RespAuth struct { | |||
| AccessToken string `json:"access_token"` | |||
| RefreshToken string `json:"refresh_token"` | |||
| TokenType string `json:"token_type"` | |||
| ExpiresIn int `json:"expires_in"` | |||
| Error string `json:"error"` | |||
| AccessToken string `json:"access_token"` | |||
| RefreshToken string `json:"refresh_token"` | |||
| TokenType string `json:"token_type"` | |||
| ExpiresIn int `json:"expires_in"` | |||
| Error string `json:"error"` | |||
| ErrorDescription string `json:"error_description"` | |||
| } | |||
| type RespToken struct { | |||
| Code string `json:"code"` | |||
| Message string `json:"msg"` | |||
| Code string `json:"code"` | |||
| Message string `json:"msg"` | |||
| Payload PayloadToken `json:"payload"` | |||
| } | |||
| type PayloadToken struct { | |||
| Username string `json:"username"` | |||
| Token string `json:"token"` | |||
| IsAdmin bool `json:"admin"` | |||
| Token string `json:"token"` | |||
| IsAdmin bool `json:"admin"` | |||
| } | |||
| type RespUserInfo struct { | |||
| Code string `json:"code"` | |||
| Message string `json:"msg"` | |||
| Code string `json:"code"` | |||
| Message string `json:"msg"` | |||
| Payload PayloadUserInfo `json:"payload"` | |||
| } | |||
| @@ -57,13 +57,13 @@ type StUserInfo struct { | |||
| type CloudBrainUser struct { | |||
| UserName string `json:"username"` | |||
| Email string `json:"email"` | |||
| Email string `json:"email"` | |||
| } | |||
| func UserValidate(username string, password string) (string, error) { | |||
| values := map[string]string{"username": username, "password": password} | |||
| jsonValue, _ := json.Marshal(values) | |||
| resp, err := http.Post(setting.RestServerHost + UrlToken, | |||
| resp, err := http.Post(setting.RestServerHost+UrlToken, | |||
| "application/json", | |||
| bytes.NewBuffer(jsonValue)) | |||
| if err != nil { | |||
| @@ -73,7 +73,7 @@ func UserValidate(username string, password string) (string, error) { | |||
| defer resp.Body.Close() | |||
| body,err := ioutil.ReadAll(resp.Body) | |||
| body, err := ioutil.ReadAll(resp.Body) | |||
| if err != nil { | |||
| log.Error("read resp body failed:" + err.Error()) | |||
| return "", err | |||
| @@ -98,14 +98,14 @@ func GetUserInfo(username string, token string) (*CloudBrainUser, error) { | |||
| user := &CloudBrainUser{} | |||
| client := &http.Client{} | |||
| reqHttp,err := http.NewRequest("GET", setting.RestServerHost + UrlGetUserInfo + username, strings.NewReader("")) | |||
| reqHttp, err := http.NewRequest("GET", setting.RestServerHost+UrlGetUserInfo+username, strings.NewReader("")) | |||
| if err != nil { | |||
| log.Error("new req failed:", err.Error()) | |||
| return nil, err | |||
| } | |||
| reqHttp.Header.Set("Authorization", TokenTypeBear + token) | |||
| resp,err := client.Do(reqHttp) | |||
| reqHttp.Header.Set("Authorization", TokenTypeBear+token) | |||
| resp, err := client.Do(reqHttp) | |||
| if err != nil { | |||
| log.Error("req rest-server failed:", err.Error()) | |||
| return nil, err | |||
| @@ -113,7 +113,7 @@ func GetUserInfo(username string, token string) (*CloudBrainUser, error) { | |||
| defer resp.Body.Close() | |||
| body,err := ioutil.ReadAll(resp.Body) | |||
| body, err := ioutil.ReadAll(resp.Body) | |||
| if err != nil { | |||
| log.Error("read resp body failed:", err.Error()) | |||
| return nil, err | |||
| @@ -1,14 +1,12 @@ | |||
| package blockchain | |||
| const ( | |||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||
| CodeMountPath = "/code" | |||
| DataSetMountPath = "/dataset" | |||
| ModelMountPath = "/model" | |||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||
| CodeMountPath = "/code" | |||
| DataSetMountPath = "/dataset" | |||
| ModelMountPath = "/model" | |||
| BenchMarkMountPath = "/benchmark" | |||
| TaskInfoName = "/taskInfo" | |||
| TaskInfoName = "/taskInfo" | |||
| SubTaskName = "task1" | |||
| ) | |||
| @@ -13,9 +13,9 @@ var ( | |||
| const ( | |||
| UrlCreateAccount = "createAccount" | |||
| UrlGetBalance = "getBalance" | |||
| UrlNewRepo = "newRepo" | |||
| UrlContribute = "contribute" | |||
| UrlGetBalance = "getBalance" | |||
| UrlNewRepo = "newRepo" | |||
| UrlContribute = "contribute" | |||
| ActionCommit = "commit" | |||
| @@ -23,27 +23,27 @@ const ( | |||
| ) | |||
| type CreateAccountResult struct { | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Payload map[string]interface{} `json:"data"` | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Payload map[string]interface{} `json:"data"` | |||
| } | |||
| type GetBalanceResult struct { | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Payload map[string]interface{} `json:"data"` | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Payload map[string]interface{} `json:"data"` | |||
| } | |||
| type NewRepoResult struct { | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| //Data string `json:"data"` | |||
| } | |||
| type ContributeResult struct { | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Payload map[string]interface{} `json:"data"` | |||
| Code int `json:"code"` | |||
| Msg string `json:"message"` | |||
| Payload map[string]interface{} `json:"data"` | |||
| } | |||
| func getRestyClient() *resty.Client { | |||
| @@ -80,9 +80,9 @@ func NewRepo(repoID, publicKey, repoName string) (*NewRepoResult, error) { | |||
| res, err := client.R(). | |||
| SetHeader("Accept", "application/json"). | |||
| SetQueryParams(map[string]string{ | |||
| "repoId" : repoID, | |||
| "creator" : publicKey, | |||
| "repoName" : repoName, | |||
| "repoId": repoID, | |||
| "creator": publicKey, | |||
| "repoName": repoName, | |||
| }). | |||
| SetResult(&result). | |||
| Get(setting.BlockChainHost + UrlNewRepo) | |||
| @@ -105,8 +105,8 @@ func GetBalance(contractAddress, contributor string) (*GetBalanceResult, error) | |||
| res, err := client.R(). | |||
| SetHeader("Accept", "application/json"). | |||
| SetQueryParams(map[string]string{ | |||
| "contractAddress" : contractAddress, | |||
| "contributor" : contributor, | |||
| "contractAddress": contractAddress, | |||
| "contributor": contributor, | |||
| }). | |||
| SetResult(&result). | |||
| Get(setting.BlockChainHost + UrlGetBalance) | |||
| @@ -129,11 +129,11 @@ func Contribute(contractAddress, contributor, action, commitId string, codeLine | |||
| res, err := client.R(). | |||
| SetHeader("Accept", "application/json"). | |||
| SetQueryParams(map[string]string{ | |||
| "contractAddress" : contractAddress, | |||
| "contributor" : contributor, | |||
| "action" : action, | |||
| "commitId": commitId, | |||
| "amount": string(codeLine), | |||
| "contractAddress": contractAddress, | |||
| "contributor": contributor, | |||
| "action": action, | |||
| "commitId": commitId, | |||
| "amount": string(codeLine), | |||
| }). | |||
| SetResult(&result). | |||
| Get(setting.BlockChainHost + UrlContribute) | |||
| @@ -10,12 +10,12 @@ import ( | |||
| ) | |||
| const ( | |||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||
| CodeMountPath = "/code" | |||
| DataSetMountPath = "/dataset" | |||
| ModelMountPath = "/model" | |||
| Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"` | |||
| CodeMountPath = "/code" | |||
| DataSetMountPath = "/dataset" | |||
| ModelMountPath = "/model" | |||
| BenchMarkMountPath = "/benchmark" | |||
| TaskInfoName = "/taskInfo" | |||
| TaskInfoName = "/taskInfo" | |||
| SubTaskName = "task1" | |||
| @@ -46,36 +46,36 @@ func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, | |||
| Command: command, | |||
| NeedIBDevice: false, | |||
| IsMainRole: false, | |||
| UseNNI: false, | |||
| UseNNI: false, | |||
| }, | |||
| }, | |||
| Volumes: []models.Volume{ | |||
| { | |||
| HostPath: models.StHostPath{ | |||
| Path: codePath, | |||
| MountPath: CodeMountPath, | |||
| ReadOnly: false, | |||
| Path: codePath, | |||
| MountPath: CodeMountPath, | |||
| ReadOnly: false, | |||
| }, | |||
| }, | |||
| { | |||
| HostPath: models.StHostPath{ | |||
| Path: dataActualPath, | |||
| MountPath: DataSetMountPath, | |||
| ReadOnly: true, | |||
| Path: dataActualPath, | |||
| MountPath: DataSetMountPath, | |||
| ReadOnly: true, | |||
| }, | |||
| }, | |||
| { | |||
| HostPath: models.StHostPath{ | |||
| Path: modelPath, | |||
| MountPath: ModelMountPath, | |||
| ReadOnly: false, | |||
| Path: modelPath, | |||
| MountPath: ModelMountPath, | |||
| ReadOnly: false, | |||
| }, | |||
| }, | |||
| { | |||
| HostPath: models.StHostPath{ | |||
| Path: benchmarkPath, | |||
| MountPath: BenchMarkMountPath, | |||
| ReadOnly: true, | |||
| Path: benchmarkPath, | |||
| MountPath: BenchMarkMountPath, | |||
| ReadOnly: true, | |||
| }, | |||
| }, | |||
| }, | |||
| @@ -91,13 +91,13 @@ func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, | |||
| var jobID = jobResult.Payload["jobId"].(string) | |||
| err = models.CreateCloudbrain(&models.Cloudbrain{ | |||
| Status: string(models.JobWaiting), | |||
| UserID: ctx.User.ID, | |||
| RepoID: ctx.Repo.Repository.ID, | |||
| JobID: jobID, | |||
| JobName: jobName, | |||
| Status: string(models.JobWaiting), | |||
| UserID: ctx.User.ID, | |||
| RepoID: ctx.Repo.Repository.ID, | |||
| JobID: jobID, | |||
| JobName: jobName, | |||
| SubTaskName: SubTaskName, | |||
| JobType: jobType, | |||
| JobType: jobType, | |||
| }) | |||
| if err != nil { | |||
| @@ -21,10 +21,10 @@ import ( | |||
| // ToggleOptions contains required or check options | |||
| type ToggleOptions struct { | |||
| SignInRequired bool | |||
| SignOutRequired bool | |||
| AdminRequired bool | |||
| DisableCSRF bool | |||
| SignInRequired bool | |||
| SignOutRequired bool | |||
| AdminRequired bool | |||
| DisableCSRF bool | |||
| BasicAuthRequired bool | |||
| } | |||
| @@ -149,7 +149,7 @@ func basicAuth(ctx *Context) bool { | |||
| var siteAuth = base64.StdEncoding.EncodeToString([]byte(setting.CBAuthUser + ":" + setting.CBAuthPassword)) | |||
| auth := ctx.Req.Header.Get("Authorization") | |||
| if !marc_auth.SecureCompare(auth, "Basic " + siteAuth) { | |||
| if !marc_auth.SecureCompare(auth, "Basic "+siteAuth) { | |||
| return false | |||
| } | |||
| @@ -158,6 +158,6 @@ func basicAuth(ctx *Context) bool { | |||
| } | |||
| func basicUnauthorized(res http.ResponseWriter) { | |||
| res.Header().Set("WWW-Authenticate", "Basic realm=\"" + marc_auth.BasicRealm + "\"") | |||
| res.Header().Set("WWW-Authenticate", "Basic realm=\""+marc_auth.BasicRealm+"\"") | |||
| http.Error(res, "Not Authorized", http.StatusUnauthorized) | |||
| } | |||
| @@ -27,9 +27,11 @@ type ErrorResponse struct { | |||
| func (e ErrorResponse) Error() string { | |||
| return e.Message | |||
| } | |||
| const ( | |||
| reportIssue = "Please report this issue at https://github.com/minio/minio/issues." | |||
| ) | |||
| // httpRespToErrorResponse returns a new encoded ErrorResponse | |||
| // structure as error. | |||
| func httpRespToErrorResponse(resp *http.Response, bucketName, objectName string) error { | |||
| @@ -122,6 +124,7 @@ func ToErrorResponse(err error) ErrorResponse { | |||
| return ErrorResponse{} | |||
| } | |||
| } | |||
| // ErrInvalidArgument - Invalid argument response. | |||
| func ErrInvalidArgument(message string) error { | |||
| return ErrorResponse{ | |||
| @@ -38,7 +38,6 @@ func (c Client) ListObjectParts(bucketName, objectName, uploadID string) (partsI | |||
| return partsInfo, nil | |||
| } | |||
| // listObjectPartsQuery (List Parts query) | |||
| // - lists some or all (up to 1000) parts that have been uploaded | |||
| // for a specific multipart upload | |||
| @@ -68,4 +68,4 @@ type ListObjectPartsResult struct { | |||
| ObjectParts []ObjectPart `xml:"Part"` | |||
| EncodingType string | |||
| } | |||
| } | |||
| @@ -142,7 +142,6 @@ func (r *lockedRandSource) Seed(seed int64) { | |||
| r.lk.Unlock() | |||
| } | |||
| // Different types of url lookup supported by the server.Initialized to BucketLookupAuto | |||
| const ( | |||
| BucketLookupAuto BucketLookupType = iota | |||
| @@ -904,8 +903,7 @@ func (c Client) newRequest(method string, metadata requestMetadata) (req *http.R | |||
| return req, nil | |||
| } | |||
| func (c Client) GenUploadPartSignedUrl(uploadID string, bucketName string, objectName string, partNumber int, size int64, expires time.Duration, bucketLocation string) (string, error){ | |||
| func (c Client) GenUploadPartSignedUrl(uploadID string, bucketName string, objectName string, partNumber int, size int64, expires time.Duration, bucketLocation string) (string, error) { | |||
| signedUrl := "" | |||
| // Input validation. | |||
| @@ -939,17 +937,17 @@ func (c Client) GenUploadPartSignedUrl(uploadID string, bucketName string, objec | |||
| customHeader := make(http.Header) | |||
| reqMetadata := requestMetadata{ | |||
| presignURL: true, | |||
| bucketName: bucketName, | |||
| objectName: objectName, | |||
| queryValues: urlValues, | |||
| customHeader: customHeader, | |||
| presignURL: true, | |||
| bucketName: bucketName, | |||
| objectName: objectName, | |||
| queryValues: urlValues, | |||
| customHeader: customHeader, | |||
| //contentBody: reader, | |||
| contentLength: size, | |||
| contentLength: size, | |||
| //contentMD5Base64: md5Base64, | |||
| //contentSHA256Hex: sha256Hex, | |||
| expires: int64(expires/time.Second), | |||
| bucketLocation: bucketLocation, | |||
| expires: int64(expires / time.Second), | |||
| bucketLocation: bucketLocation, | |||
| } | |||
| req, err := c.newRequest("PUT", reqMetadata) | |||
| @@ -959,10 +957,9 @@ func (c Client) GenUploadPartSignedUrl(uploadID string, bucketName string, objec | |||
| } | |||
| signedUrl = req.URL.String() | |||
| return signedUrl,nil | |||
| return signedUrl, nil | |||
| } | |||
| // executeMethod - instantiates a given method, and retries the | |||
| // request upon any error up to maxRetries attempts in a binomially | |||
| // delayed manner using a standard back off algorithm. | |||
| @@ -8,10 +8,12 @@ import ( | |||
| // StringMap represents map with custom UnmarshalXML | |||
| type StringMap map[string]string | |||
| // CommonPrefix container for prefix response. | |||
| type CommonPrefix struct { | |||
| Prefix string | |||
| } | |||
| // ObjectInfo container for object metadata. | |||
| type ObjectInfo struct { | |||
| // An ETag is optionally set to md5sum of an object. In case of multipart objects, | |||
| @@ -44,6 +46,7 @@ type ObjectInfo struct { | |||
| // Error | |||
| Err error `json:"-"` | |||
| } | |||
| // ListBucketResult container for listObjects response. | |||
| type ListBucketResult struct { | |||
| // A response can contain CommonPrefixes only if you have | |||
| @@ -427,26 +427,26 @@ var ( | |||
| ResultBackend string | |||
| //decompress config | |||
| DecompressAddress string | |||
| AuthUser string | |||
| AuthPassword string | |||
| DecompressAddress string | |||
| AuthUser string | |||
| AuthPassword string | |||
| //cloudbrain config | |||
| CBAuthUser string | |||
| CBAuthPassword string | |||
| RestServerHost string | |||
| JobPath string | |||
| JobType string | |||
| DebugServerHost string | |||
| CBAuthUser string | |||
| CBAuthPassword string | |||
| RestServerHost string | |||
| JobPath string | |||
| JobType string | |||
| DebugServerHost string | |||
| //benchmark config | |||
| IsBenchmarkEnabled bool | |||
| BenchmarkCode string | |||
| BenchmarkServerHost string | |||
| IsBenchmarkEnabled bool | |||
| BenchmarkCode string | |||
| BenchmarkServerHost string | |||
| //blockchain config | |||
| BlockChainHost string | |||
| CommitValidDate string | |||
| BlockChainHost string | |||
| CommitValidDate string | |||
| ) | |||
| // DateLang transforms standard language locale name to corresponding value in datetime plugin. | |||
| @@ -150,7 +150,7 @@ func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { | |||
| for _, partInfo := range partInfos { | |||
| complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{ | |||
| PartNumber: partInfo.PartNumber, | |||
| ETag: partInfo.ETag, | |||
| ETag: partInfo.ETag, | |||
| }) | |||
| } | |||
| @@ -14,7 +14,7 @@ const ( | |||
| ) | |||
| func SendDecompressTask(ctx context.Context, uuid string) error { | |||
| args := []tasks.Arg{{Name: "uuid", Type: "string", Value: uuid},{}} | |||
| args := []tasks.Arg{{Name: "uuid", Type: "string", Value: uuid}, {}} | |||
| task, err := tasks.NewSignature(DecompressTaskName, args) | |||
| if err != nil { | |||
| log.Error("NewSignature failed:", err.Error()) | |||
| @@ -30,10 +30,10 @@ const ( | |||
| ) | |||
| type CloudBrainDataset struct { | |||
| UUID string `json:"id"` | |||
| Name string `json:"name"` | |||
| Path string `json:"place"` | |||
| UserName string `json:"provider"` | |||
| UUID string `json:"id"` | |||
| Name string `json:"name"` | |||
| Path string `json:"place"` | |||
| UserName string `json:"provider"` | |||
| CreateTime string `json:"created_at"` | |||
| } | |||
| @@ -402,13 +402,13 @@ func GetSuccessChunks(ctx *context.Context) { | |||
| if attach == nil { | |||
| ctx.JSON(200, map[string]string{ | |||
| "uuid": fileChunk.UUID, | |||
| "uploaded": strconv.Itoa(fileChunk.IsUploaded), | |||
| "uploadID": fileChunk.UploadID, | |||
| "chunks": string(chunks), | |||
| "attachID": "0", | |||
| "datasetID": "0", | |||
| "fileName": "", | |||
| "uuid": fileChunk.UUID, | |||
| "uploaded": strconv.Itoa(fileChunk.IsUploaded), | |||
| "uploadID": fileChunk.UploadID, | |||
| "chunks": string(chunks), | |||
| "attachID": "0", | |||
| "datasetID": "0", | |||
| "fileName": "", | |||
| "datasetName": "", | |||
| }) | |||
| return | |||
| @@ -421,13 +421,13 @@ func GetSuccessChunks(ctx *context.Context) { | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "uuid": fileChunk.UUID, | |||
| "uploaded": strconv.Itoa(fileChunk.IsUploaded), | |||
| "uploadID": fileChunk.UploadID, | |||
| "chunks": string(chunks), | |||
| "attachID": strconv.Itoa(int(attachID)), | |||
| "datasetID": strconv.Itoa(int(attach.DatasetID)), | |||
| "fileName": attach.Name, | |||
| "uuid": fileChunk.UUID, | |||
| "uploaded": strconv.Itoa(fileChunk.IsUploaded), | |||
| "uploadID": fileChunk.UploadID, | |||
| "chunks": string(chunks), | |||
| "attachID": strconv.Itoa(int(attachID)), | |||
| "datasetID": strconv.Itoa(int(attach.DatasetID)), | |||
| "fileName": attach.Name, | |||
| "datasetName": dataset.Title, | |||
| }) | |||
| @@ -624,13 +624,13 @@ func HandleUnDecompressAttachment() { | |||
| return | |||
| } | |||
| func QueryAllPublicDataset(ctx *context.Context){ | |||
| func QueryAllPublicDataset(ctx *context.Context) { | |||
| attachs, err := models.GetAllPublicAttachments() | |||
| if err != nil { | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| @@ -638,14 +638,14 @@ func QueryAllPublicDataset(ctx *context.Context){ | |||
| queryDatasets(ctx, attachs) | |||
| } | |||
| func QueryPrivateDataset(ctx *context.Context){ | |||
| func QueryPrivateDataset(ctx *context.Context) { | |||
| username := ctx.Params(":username") | |||
| attachs, err := models.GetPrivateAttachments(username) | |||
| if err != nil { | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| @@ -663,14 +663,14 @@ func queryDatasets(ctx *context.Context, attachs []*models.AttachmentUsername) { | |||
| log.Info("dataset is null") | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "error_msg": "", | |||
| "data": "", | |||
| "error_msg": "", | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| for _, attch := range attachs { | |||
| has,err := storage.Attachments.HasObject(models.AttachmentRelativePath(attch.UUID)) | |||
| has, err := storage.Attachments.HasObject(models.AttachmentRelativePath(attch.UUID)) | |||
| if err != nil || !has { | |||
| continue | |||
| } | |||
| @@ -686,21 +686,21 @@ func queryDatasets(ctx *context.Context, attachs []*models.AttachmentUsername) { | |||
| attch.CreatedUnix.Format("2006-01-02 03:04:05 PM")}) | |||
| } | |||
| data,err := json.Marshal(datasets) | |||
| data, err := json.Marshal(datasets) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| "error_msg": err.Error(), | |||
| "data": "", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "error_msg": "", | |||
| "data": string(data), | |||
| "error_msg": "", | |||
| "data": string(data), | |||
| }) | |||
| return | |||
| } | |||
| @@ -12,13 +12,13 @@ import ( | |||
| ) | |||
| type BlockChainInitNotify struct { | |||
| RepoId int64 `json:"repoId"` | |||
| ContractAddress string `json:"contractAddress"` | |||
| RepoId int64 `json:"repoId"` | |||
| ContractAddress string `json:"contractAddress"` | |||
| } | |||
| type BlockChainCommitNotify struct { | |||
| CommitID string `json:"commitId"` | |||
| TransactionHash string `json:"txHash"` | |||
| CommitID string `json:"commitId"` | |||
| TransactionHash string `json:"txHash"` | |||
| } | |||
| func HandleBlockChainInitNotify(ctx *context.Context) { | |||
| @@ -30,8 +30,8 @@ func HandleBlockChainInitNotify(ctx *context.Context) { | |||
| if err != nil { | |||
| log.Error("GetRepositoryByID failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "internal error", | |||
| "code": "-1", | |||
| "message": "internal error", | |||
| }) | |||
| return | |||
| } | |||
| @@ -39,8 +39,8 @@ func HandleBlockChainInitNotify(ctx *context.Context) { | |||
| if repo.BlockChainStatus == models.RepoBlockChainSuccess && len(repo.ContractAddress) != 0 { | |||
| log.Error("the repo has been RepoBlockChainSuccess:", req.RepoId) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "the repo has been RepoBlockChainSuccess", | |||
| "code": "-1", | |||
| "message": "the repo has been RepoBlockChainSuccess", | |||
| }) | |||
| return | |||
| } | |||
| @@ -51,14 +51,14 @@ func HandleBlockChainInitNotify(ctx *context.Context) { | |||
| if err = models.UpdateRepositoryCols(repo, "block_chain_status", "contract_address"); err != nil { | |||
| log.Error("UpdateRepositoryCols failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "internal error", | |||
| "code": "-1", | |||
| "message": "internal error", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "code": "0", | |||
| "code": "0", | |||
| "message": "", | |||
| }) | |||
| } | |||
| @@ -69,8 +69,8 @@ func HandleBlockChainCommitNotify(ctx *context.Context) { | |||
| if err := json.Unmarshal(data, &req); err != nil { | |||
| log.Error("json.Unmarshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "response data error", | |||
| "code": "-1", | |||
| "message": "response data error", | |||
| }) | |||
| return | |||
| } | |||
| @@ -79,8 +79,8 @@ func HandleBlockChainCommitNotify(ctx *context.Context) { | |||
| if err != nil { | |||
| log.Error("GetRepositoryByID failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "internal error", | |||
| "code": "-1", | |||
| "message": "internal error", | |||
| }) | |||
| return | |||
| } | |||
| @@ -88,8 +88,8 @@ func HandleBlockChainCommitNotify(ctx *context.Context) { | |||
| if blockChain.Status == models.BlockChainCommitSuccess { | |||
| log.Error("the commit has been BlockChainCommitReady:", blockChain.RepoID) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "the commit has been BlockChainCommitReady", | |||
| "code": "-1", | |||
| "message": "the commit has been BlockChainCommitReady", | |||
| }) | |||
| return | |||
| } | |||
| @@ -100,14 +100,14 @@ func HandleBlockChainCommitNotify(ctx *context.Context) { | |||
| if err = models.UpdateBlockChainCols(blockChain, "status", "transaction_hash"); err != nil { | |||
| log.Error("UpdateBlockChainCols failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "code" : "-1", | |||
| "message" : "internal error", | |||
| "code": "-1", | |||
| "message": "internal error", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "code": "0", | |||
| "code": "0", | |||
| "message": "", | |||
| }) | |||
| } | |||
| @@ -226,13 +226,13 @@ func HandleUnTransformedActions() { | |||
| } | |||
| blockChain := models.BlockChain{ | |||
| CommitID : commit.Sha1, | |||
| Contributor : user.PublicKey, | |||
| ContractAddress : repo.ContractAddress, | |||
| Status : models.BlockChainCommitInit, | |||
| Amount : 1, | |||
| UserID : action.UserID, | |||
| RepoID : action.RepoID, | |||
| CommitID: commit.Sha1, | |||
| Contributor: user.PublicKey, | |||
| ContractAddress: repo.ContractAddress, | |||
| Status: models.BlockChainCommitInit, | |||
| Amount: 1, | |||
| UserID: action.UserID, | |||
| RepoID: action.RepoID, | |||
| } | |||
| _, err = models.InsertBlockChain(&blockChain) | |||
| if err != nil { | |||
| @@ -55,7 +55,7 @@ func CloudBrainIndex(ctx *context.Context) { | |||
| timestamp := time.Now().Unix() | |||
| for i, task := range ciTasks { | |||
| if task.Status == string(models.JobRunning) && (timestamp - int64(task.CreatedUnix) > 30){ | |||
| if task.Status == string(models.JobRunning) && (timestamp-int64(task.CreatedUnix) > 30) { | |||
| ciTasks[i].CanDebug = true | |||
| } else { | |||
| ciTasks[i].CanDebug = false | |||
| @@ -90,9 +90,9 @@ func CloudBrainNew(ctx *context.Context) { | |||
| ctx.Data["error"] = err.Error() | |||
| } | |||
| for i,payload := range result.Payload { | |||
| if strings.HasPrefix(result.Payload[i].Place,"192.168") { | |||
| result.Payload[i].PlaceView = payload.Place[strings.Index(payload.Place, "/"): len(payload.Place)] | |||
| for i, payload := range result.Payload { | |||
| if strings.HasPrefix(result.Payload[i].Place, "192.168") { | |||
| result.Payload[i].PlaceView = payload.Place[strings.Index(payload.Place, "/"):len(payload.Place)] | |||
| } else { | |||
| result.Payload[i].PlaceView = payload.Place | |||
| } | |||
| @@ -205,29 +205,29 @@ func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrain | |||
| if err != nil { | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": "GetCloudbrainByJobID failed", | |||
| "error_msg": "GetCloudbrainByJobID failed", | |||
| }) | |||
| return | |||
| } | |||
| err = cloudbrain.CommitImage(jobID, models.CommitImageParams{ | |||
| Ip: task.ContainerIp, | |||
| TaskContainerId: task.ContainerID, | |||
| ImageDescription: form.Description, | |||
| ImageTag: form.Tag, | |||
| Ip: task.ContainerIp, | |||
| TaskContainerId: task.ContainerID, | |||
| ImageDescription: form.Description, | |||
| ImageTag: form.Tag, | |||
| }) | |||
| if err != nil { | |||
| log.Error("CommitImage(%s) failed:", task.JobName, err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "-1", | |||
| "error_msg": "CommitImage failed", | |||
| "error_msg": "CommitImage failed", | |||
| }) | |||
| return | |||
| } | |||
| ctx.JSON(200, map[string]string{ | |||
| "result_code": "0", | |||
| "error_msg": "", | |||
| "error_msg": "", | |||
| }) | |||
| } | |||
| @@ -21,15 +21,15 @@ const ( | |||
| type FileInfo struct { | |||
| FileName string `json:"FileName"` | |||
| ModTime string `json:"ModTime"` | |||
| IsDir bool `json:"IsDir"` | |||
| Size int64 `json:"Size"` | |||
| IsDir bool `json:"IsDir"` | |||
| Size int64 `json:"Size"` | |||
| ParenDir string `json:"ParenDir"` | |||
| UUID string `json:"UUID"` | |||
| } | |||
| type RespGetDirs struct { | |||
| ResultCode string `json:"resultCode"` | |||
| FileInfos string `json:"fileInfos"` | |||
| FileInfos string `json:"fileInfos"` | |||
| } | |||
| func DirIndex(ctx *context.Context) { | |||
| @@ -80,7 +80,7 @@ func DirIndex(ctx *context.Context) { | |||
| ctx.HTML(200, tplDirIndex) | |||
| } | |||
| func getDirs(uuid string, parentDir string) (string,error) { | |||
| func getDirs(uuid string, parentDir string) (string, error) { | |||
| var dirs string | |||
| var req string | |||
| if parentDir == "" { | |||
| @@ -245,7 +245,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
| ignSignIn := context.Toggle(&context.ToggleOptions{SignInRequired: setting.Service.RequireSignInView}) | |||
| ignSignInAndCsrf := context.Toggle(&context.ToggleOptions{DisableCSRF: true}) | |||
| reqSignOut := context.Toggle(&context.ToggleOptions{SignOutRequired: true}) | |||
| reqBasicAuth := context.Toggle(&context.ToggleOptions{BasicAuthRequired:true}) | |||
| reqBasicAuth := context.Toggle(&context.ToggleOptions{BasicAuthRequired: true}) | |||
| bindIgnErr := binding.BindIgnErr | |||
| validation.AddBindingRules() | |||