Browse Source

修复本地模型导入的问题。

Signed-off-by: zouap <zouap@pcl.ac.cn>
tags/v1.22.11.2^2
zouap 3 years ago
parent
commit
e9ac3eed2e
4 changed files with 246 additions and 44 deletions
  1. +13
    -0
      models/ai_model_manage.go
  2. +183
    -32
      models/user_business_analysis.go
  3. +16
    -1
      routers/repo/ai_model_manage.go
  4. +34
    -11
      routers/repo/attachment.go

+ 13
- 0
models/ai_model_manage.go View File

@@ -287,6 +287,19 @@ func ModifyModelDescription(id string, description string) error {
log.Info("success to update description from db.re=" + fmt.Sprint((re)))
return nil
}
func ModifyModelLabel(id string, label string) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("label").Update(&AiModelManage{
Label: label,
})
if err != nil {
return err
}
log.Info("success to update description from db.re=" + fmt.Sprint((re)))
return nil
}

func ModifyLocalModel(id string, name, label, description string, engine int) error {
var sess *xorm.Session


+ 183
- 32
models/user_business_analysis.go View File

@@ -499,7 +499,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
DataDate := currentTimeNow.Format("2006-01-02 15:04")

CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -517,7 +517,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
@@ -526,7 +526,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -752,7 +752,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
startTime := currentTimeNow.AddDate(0, 0, -1)

CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -764,11 +764,11 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("query commit code errr.")
} else {
log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
//CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
//CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
@@ -778,7 +778,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -885,8 +885,32 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
}
if tableName == "user_business_analysis_current_year" {
//年度数据
log.Info(fmt.Sprint(len(DetailInfoMap)))
log.Info(fmt.Sprint(len(MostDownloadMap)))
subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
mostActiveDay := ""
if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
mostActiveDay = getMostActiveJson(userInfo)
}
repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
dataSetInfo := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
codeInfo := getCodeInfo(dateRecordAll)
cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap)
re := &UserSummaryCurrentYear{
ID: dateRecordAll.ID,
Name: dateRecordAll.Name,
Email: dateRecordAll.Email,
Phone: dateRecordAll.Phone,
RegistDate: dateRecordAll.RegistDate,
DateCount: int(subTime.Hours()) / 24,
MostActiveDay: mostActiveDay,
RepoInfo: repoInfo,
DataSetInfo: dataSetInfo,
CodeInfo: codeInfo,
CloudBrainInfo: cloudBrainInfo,
}
statictisSess.Insert(re)
//log.Info(fmt.Sprint(len(CommitCountMap)))
//log.Info(fmt.Sprint(len(CommitCodeSizeMap)))

}
}
if len(dateRecordBatch) > 0 {
@@ -895,6 +919,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
if err != nil {
log.Info("insert all data failed." + err.Error())
}

}
indexTotal += PAGE_SIZE
if indexTotal >= count {
@@ -915,6 +940,95 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
}
log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount))
}
func getCloudBrainInfo(dateRecordAll UserBusinessAnalysisAll, CloudBrainTaskItemMap map[string]int) string {
cloudBrainInfo := make(map[string]string)
cloudBrainInfo["create_task_num"] = fmt.Sprint(dateRecordAll.CloudBrainTaskNum)
cloudBrainInfo["debug_task_num"] = fmt.Sprint(dateRecordAll.GpuDebugJob + dateRecordAll.NpuDebugJob)
cloudBrainInfo["train_task_num"] = fmt.Sprint(dateRecordAll.GpuTrainJob + dateRecordAll.NpuTrainJob)
cloudBrainInfo["inference_task_num"] = fmt.Sprint(dateRecordAll.NpuInferenceJob + CloudBrainTaskItemMap["GpuInferenceJob"])
cloudBrainInfo["card_runtime"] = fmt.Sprint(dateRecordAll.CloudBrainRunTime)
cloudBrainInfo["card_runtime_money"] = fmt.Sprint(dateRecordAll.CloudBrainRunTime * 5)
//todo
cloudBrainInfoJson, _ := json.Marshal(cloudBrainInfo)
return string(cloudBrainInfoJson)
}

func getCodeInfo(dateRecordAll UserBusinessAnalysisAll) string {
codeInfo := make(map[string]string)
codeInfo["commit_count"] = fmt.Sprint(dateRecordAll.CommitCount)
codeInfo["commit_line"] = fmt.Sprint(dateRecordAll.CommitCodeSize)

codeInfoJson, _ := json.Marshal(codeInfo)
return string(codeInfoJson)
}

func getDataSetInfo(userId int64, CreatedDataset map[int64]int, dataSetDownloadMap map[int64]int, CommitDatasetNumMap map[int64]int, CollectedDataset map[int64]int) string {
datasetInfo := make(map[string]string)
if create_count, ok := CreatedDataset[userId]; ok {
datasetInfo["create_count"] = fmt.Sprint(create_count)
}
if upload_count, ok := CommitDatasetNumMap[userId]; ok {
datasetInfo["upload_file_count"] = fmt.Sprint(upload_count)
}
if download_count, ok := dataSetDownloadMap[userId]; ok {
datasetInfo["download_count"] = fmt.Sprint(download_count)
}
if cllected_count, ok := CollectedDataset[userId]; ok {
datasetInfo["cllected_count"] = fmt.Sprint(cllected_count)
}
datasetInfoJson, _ := json.Marshal(datasetInfo)
return string(datasetInfoJson)
}

func getRepoDetailInfo(repoDetailInfoMap map[string]int, userId int64, mostDownload map[int64]string) string {
repoDetailInfo := make(map[string]string)
if total, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total"]; ok {
repoDetailInfo["repo_total"] = fmt.Sprint(total)
}
if private, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_private"]; ok {
repoDetailInfo["repo_is_private"] = fmt.Sprint(private)
}
if public, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_public"]; ok {
repoDetailInfo["repo_is_public"] = fmt.Sprint(public)
}
if download, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total_download"]; ok {
repoDetailInfo["repo_total_download"] = fmt.Sprint(download)
}
if mostdownload, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_most_download"]; ok {
repoDetailInfo["repo_most_download_count"] = fmt.Sprint(mostdownload)
}
if mostdownloadName, ok := mostDownload[userId]; ok {
repoDetailInfo["repo_most_download_name"] = mostdownloadName
}
repoDetailInfoJson, _ := json.Marshal(repoDetailInfo)
return string(repoDetailInfoJson)
}

func getMostActiveJson(userInfo map[string]int) string {
mostActiveMap := make(map[string]string)
if day, ok := userInfo["hour_day"]; ok {
hour := userInfo["hour_hour"]
month := userInfo["hour_month"]
year := userInfo["hour_year"]
delete(userInfo, "hour_day")
delete(userInfo, "hour_hour")
delete(userInfo, "hour_month")
delete(userInfo, "hour_year")
mostActiveMap["before_dawn"] = fmt.Sprint(year) + "/" + fmt.Sprint(month) + "/" + fmt.Sprint(day) + " " + fmt.Sprint(hour)
}
max := 0
max_day := ""
for key, value := range userInfo {
if value > max {
max = value
max_day = key
}
}
mostActiveMap["most_active_day"] = max_day
mostActiveMap["most_active_num"] = fmt.Sprint(max)
mostActiveMapJson, _ := json.Marshal(mostActiveMap)
return string(mostActiveMapJson)
}

func updateUserIndex(tableName string, statictisSess *xorm.Session, userId int64, userIndex float64) {
updateSql := "UPDATE public." + tableName + " set user_index=" + fmt.Sprint(userIndex*100) + " where id=" + fmt.Sprint(userId)
@@ -1002,7 +1116,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,

DataDate := CountDate.Format("2006-01-02")
CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)

CommentCountMap := queryComment(start_unix, end_unix)
@@ -1015,10 +1129,10 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("query commit code errr.")
} else {
//log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
//CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
@@ -1027,7 +1141,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

@@ -1495,7 +1609,7 @@ func queryPullRequest(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}

func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
func queryCommitAction(start_unix int64, end_unix int64, actionType int64) (map[int64]int, map[int64]map[string]int) {
sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)
@@ -1505,7 +1619,7 @@ func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[i
count, err := sess.Where(cond).Count(new(Action))
if err != nil {
log.Info("query action error. return.")
return resultMap
return resultMap, mostActiveMap
}

var indexTotal int64
@@ -1532,6 +1646,17 @@ func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[i
} else {
mostActiveMap[actionRecord.UserID][key] = getMapKeyStringValue(key, mostActiveMap[actionRecord.UserID]) + 1
}
utcTime := actionRecord.CreatedUnix.AsTime().UTC()
hour := utcTime.Hour()
if hour >= 0 && hour <= 5 {
key = "hour_hour"
if getMapKeyStringValue(key, mostActiveMap[actionRecord.UserID]) < hour {
mostActiveMap[actionRecord.UserID][key] = hour
mostActiveMap[actionRecord.UserID]["hour_day"] = utcTime.Day()
mostActiveMap[actionRecord.UserID]["hour_month"] = int(utcTime.Month())
mostActiveMap[actionRecord.UserID]["hour_year"] = utcTime.Year()
}
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
@@ -1539,7 +1664,7 @@ func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[i
}
}

return resultMap
return resultMap, mostActiveMap
}
func getDate(createTime timeutil.TimeStamp) string {
return createTime.Format("2006-01-02")
@@ -1732,15 +1857,16 @@ func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int
return resultMap, resultFocusedByOtherMap
}

func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
func queryRecommedDataSet(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
userIdDdatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true"
userIdRecommentDatasetMap := make(map[int64]int)
userIdCreateDatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(Dataset))
if err != nil {
log.Info("query recommend dataset error. return.")
return userIdDdatasetMap
return userIdRecommentDatasetMap, userIdCreateDatasetMap
}
var indexTotal int64
indexTotal = 0
@@ -1750,18 +1876,21 @@ func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
if datasetRecord.Recommend {
if _, ok := userIdRecommentDatasetMap[datasetRecord.UserID]; !ok {
userIdRecommentDatasetMap[datasetRecord.UserID] = 1
} else {
userIdRecommentDatasetMap[datasetRecord.UserID] += 1
}
}
userIdCreateDatasetMap[datasetRecord.UserID] = getMapValue(datasetRecord.UserID, userIdCreateDatasetMap) + 1
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdDdatasetMap
return userIdRecommentDatasetMap, userIdCreateDatasetMap
}

func queryAllDataSet() (map[int64]int64, map[int64]int64) {
@@ -1940,22 +2069,23 @@ func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64
return imageCollect, imageCollected
}

func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
resultSizeMap := make(map[int64]int)
resultNumMap := make(map[int64]int)
resultDownloadMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Attachment))
if err != nil {
log.Info("query attachment error. return.")
return resultSizeMap, resultNumMap
return resultSizeMap, resultNumMap, resultDownloadMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uploader_id,size").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
sess.Select("id,uploader_id,size,download_count").Table("attachment").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
attachmentList := make([]*Attachment, 0)
sess.Find(&attachmentList)

@@ -1964,9 +2094,11 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6
if _, ok := resultSizeMap[attachRecord.UploaderID]; !ok {
resultSizeMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB
resultNumMap[attachRecord.UploaderID] = 1
resultDownloadMap[attachRecord.UploaderID] = int(attachRecord.DownloadCount)
} else {
resultSizeMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB
resultNumMap[attachRecord.UploaderID] += 1
resultDownloadMap[attachRecord.UploaderID] += int(attachRecord.DownloadCount)
}
}

@@ -1976,7 +2108,7 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6
}
}

return resultSizeMap, resultNumMap
return resultSizeMap, resultNumMap, resultDownloadMap
}

func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[string]int, map[int64]string) {
@@ -2216,6 +2348,7 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
setMapKey("CloudBrainRunTime", cloudTaskRecord.UserID, int(cloudTaskRecord.Duration), resultItemMap)
}
if cloudTaskRecord.Type == 1 { //npu
setMapKey("CloudBrainTwo", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "INFERENCE" {
@@ -2223,14 +2356,32 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
} else {
setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else { //type=0 gpu
} else if cloudTaskRecord.Type == 0 { //type=0 gpu
setMapKey("CloudBrainOne", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "INFERENCE" {
setMapKey("GpuInferenceJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else if cloudTaskRecord.JobType == "BENCHMARK" {
setMapKey("GpuBenchMarkJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else if cloudTaskRecord.Type == 2 {
setMapKey("C2Net", cloudTaskRecord.UserID, 1, resultItemMap)
if cloudTaskRecord.ComputeResource == NPUResource {
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("NpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("NpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
} else if cloudTaskRecord.ComputeResource == GPUResource {
if cloudTaskRecord.JobType == "TRAIN" {
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap)
} else {
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap)
}
}
}
}
indexTotal += PAGE_SIZE


+ 16
- 1
routers/repo/ai_model_manage.go View File

@@ -1083,6 +1083,16 @@ func ModifyModel(id string, description string) error {
return err
}

func ModifyModelLabel(id string, label string) error {
err := models.ModifyModelLabel(id, label)
if err == nil {
log.Info("modify success.")
} else {
log.Info("Failed to modify.id=" + id + " desc=" + label + " error:" + err.Error())
}
return err
}

func ModifyModelInfo(ctx *context.Context) {
log.Info("modify model start.")
id := ctx.Query("id")
@@ -1124,7 +1134,12 @@ func ModifyModelInfo(ctx *context.Context) {
err = models.ModifyLocalModel(id, name, label, description, engine)

} else {
err = ModifyModel(id, description)
_, ok := ctx.Req.Form["label"]
if ok {
err = ModifyModelLabel(id, ctx.Query("label"))
} else {
err = ModifyModel(id, description)
}
}

if err != nil {


+ 34
- 11
routers/repo/attachment.go View File

@@ -675,22 +675,45 @@ func GetSuccessChunks(ctx *context.Context) {
//copy
srcObjectName := fileChunk.ObjectName
destObjectName := getObjectName(fileName, modeluuid)
var isExist bool
if typeCloudBrain == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
storage.MinioCopyAFile(bucketName, srcObjectName, bucketName, destObjectName)
if storage.MinioGetFilesSize(bucketName, []string{destObjectName}) > 0 {
isExist = true
} else {
storage.MinioCopyAFile(bucketName, srcObjectName, bucketName, destObjectName)
}
} else {
bucketName := setting.Bucket
storage.ObsCopyFile(bucketName, srcObjectName, bucketName, destObjectName)
if storage.ObsGetFilesSize(bucketName, []string{destObjectName}) > 0 {
isExist = true
} else {
storage.ObsCopyFile(bucketName, srcObjectName, bucketName, destObjectName)
}
}
UpdateModelSize(modeluuid)
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": strconv.Itoa(int(attachID)),
"fileName": attach.Name,
})
if isExist {
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": strconv.Itoa(int(attachID)),
"modeluuid": modeluuid,
"fileName": attach.Name,
"modelName": modelname,
})
} else {
UpdateModelSize(modeluuid)
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
"uploadID": fileChunk.UploadID,
"chunks": string(chunks),
"attachID": strconv.Itoa(int(attachID)),
"fileName": attach.Name,
})
}

return
} else {
model, err := models.QueryModelById(dbmodeluuid)


Loading…
Cancel
Save