Browse Source

提交代码

tags/v1.22.11.2^2
ychao_1983 2 years ago
parent
commit
8842ba0349
4 changed files with 10 additions and 54 deletions
  1. +1
    -1
      modules/structs/cloudbrain.go
  2. +2
    -2
      routers/api/v1/api.go
  3. +1
    -48
      routers/repo/grampus.go
  4. +6
    -3
      services/cloudbrain/cloudbrainTask/train.go

+ 1
- 1
modules/structs/cloudbrain.go View File

@@ -32,7 +32,7 @@ type CreateTrainJobOption struct {
BootFile string `json:"boot_file" binding:"Required"`
BranchName string `json:"branch_name" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
WorkServerNumber int `json:"work_server_number" binding:"Required"`
WorkServerNumber int `json:"work_server_number"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`


+ 2
- 2
routers/api/v1/api.go View File

@@ -1011,11 +1011,11 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/train-job", func() {
m.Get("/:jobid", reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)

m.Post("/create", reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat, bind(api.CreateTrainJobOption{}), repo.CreateCloudBrain)
m.Post("/create", reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), bind(api.CreateTrainJobOption{}), repo.CreateCloudBrain)

})
m.Group("/inference-job", func() {
m.Post("/create", reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat, bind(api.CreateTrainJobOption{}), repo.CreateCloudBrainInferenceTask)
m.Post("/create", reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), bind(api.CreateTrainJobOption{}), repo.CreateCloudBrainInferenceTask)
m.Get("/:jobid", reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)
})



+ 1
- 48
routers/repo/grampus.go View File

@@ -509,28 +509,6 @@ func GrampusTrainJobVersionCreate(ctx *context.Context, form auth.CreateGrampusT

}

func checkSpecialPool(ctx *context.Context, resourceType string) string {
grampus.InitSpecialPool()
if grampus.SpecialPools != nil {
for _, pool := range grampus.SpecialPools.Pools {

if pool.IsExclusive && pool.Type == resourceType {

org, _ := models.GetOrgByName(pool.Org)
if org != nil {
isOrgMember, _ := models.IsOrganizationMember(org.ID, ctx.User.ID)
if !isOrgMember {
return ctx.Tr("repo.grampus.no_operate_right")
}
}
}

}

}
return ""
}

func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = true
grampusTrainJobNpuCreate(ctx, form)
@@ -1003,7 +981,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
if processorType == grampus.ProcessorTypeNPU {
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
unZipDatasetCommand := cloudbrainTask.GenerateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}
@@ -1077,31 +1055,6 @@ func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileNa
return commandDownloadTemp
}

func generateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
if len(datasetNameArray) == 1 { //单数据集
unZipDatasetCommand = "unzip -q '" + datasetName + "';"
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
if strings.HasSuffix(datasetNameTemp, ".tar.gz") {
unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';"
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}
return unZipDatasetCommand
}

func downloadZipCode(ctx *context.Context, codePath, branchName string) error {
archiveType := git.ZIP
archivePath := codePath


+ 6
- 3
services/cloudbrain/cloudbrainTask/train.go View File

@@ -889,7 +889,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
if processorType == grampus.ProcessorTypeNPU {
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
unZipDatasetCommand := GenerateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}
@@ -962,7 +962,7 @@ func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileNa
return commandDownloadTemp
}

func generateDatasetUnzipCommand(datasetName string) string {
func GenerateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
@@ -971,6 +971,7 @@ func generateDatasetUnzipCommand(datasetName string) string {
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
@@ -979,6 +980,7 @@ func generateDatasetUnzipCommand(datasetName string) string {
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}
@@ -1071,7 +1073,8 @@ func SyncTaskStatus(task *models.Cloudbrain) error {
}
oldStatus := task.Status
task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
if task.Status != result.JobInfo.Status || result.JobInfo.Status == models.GrampusStatusRunning {

if task.Status != oldStatus || task.Status == models.GrampusStatusRunning {
task.Duration = result.JobInfo.RunSec
if task.Duration < 0 {
task.Duration = 0


Loading…
Cancel
Save