| @@ -2618,6 +2618,7 @@ type DatasetInfo struct { | |||
| DataLocalPath string | |||
| Name string | |||
| FullName string | |||
| Size int | |||
| } | |||
| func GetDatasetInfo(uuidStr string, grampusType ...string) (map[string]DatasetInfo, string, error) { | |||
| @@ -2673,6 +2674,7 @@ func GetDatasetInfo(uuidStr string, grampusType ...string) (map[string]DatasetIn | |||
| DataLocalPath: dataLocalPath, | |||
| Name: fileName, | |||
| FullName: attach.Name, | |||
| Size: int(attach.Size), | |||
| } | |||
| if i == 0 { | |||
| datasetNames = attach.Name | |||
| @@ -518,6 +518,7 @@ var ( | |||
| MaxDatasetNum int | |||
| CullIdleTimeout string | |||
| CullInterval string | |||
| DebugAttachSize int | |||
| //benchmark config | |||
| IsBenchmarkEnabled bool | |||
| @@ -1497,6 +1498,7 @@ func NewContext() { | |||
| MaxDatasetNum = sec.Key("MAX_DATASET_NUM").MustInt(5) | |||
| CullIdleTimeout = sec.Key("CULL_IDLE_TIMEOUT").MustString("900") | |||
| CullInterval = sec.Key("CULL_INTERVAL").MustString("60") | |||
| DebugAttachSize = sec.Key("DEBUG_ATTACH_SIZE").MustInt(20) | |||
| sec = Cfg.Section("benchmark") | |||
| IsBenchmarkEnabled = sec.Key("ENABLED").MustBool(false) | |||
| @@ -98,6 +98,9 @@ func NewFuncMap() []template.FuncMap { | |||
| "AllowedReactions": func() []string { | |||
| return setting.UI.Reactions | |||
| }, | |||
| "DebugAttachSize": func() int { | |||
| return setting.DebugAttachSize * 1000 * 1000 * 1000 | |||
| }, | |||
| "AvatarLink": models.AvatarLink, | |||
| "Safe": Safe, | |||
| "SafeJS": SafeJS, | |||
| @@ -154,7 +157,7 @@ func NewFuncMap() []template.FuncMap { | |||
| "EscapePound": func(str string) string { | |||
| return strings.NewReplacer("%", "%25", "#", "%23", " ", "%20", "?", "%3F").Replace(str) | |||
| }, | |||
| "IpynbBool":func(str string) bool{ | |||
| "IpynbBool": func(str string) bool { | |||
| return strings.Contains(str, ".ipynb") | |||
| }, | |||
| "nl2br": func(text string) template.HTML { | |||
| @@ -3327,7 +3327,7 @@ Stopped_failed=任务停止失败,请稍后再试。 | |||
| Stopped_success_update_status_fail=任务停止成功,状态及运行时间更新失败。 | |||
| load_code_failed=代码加载失败,请确认选择了正确的分支。 | |||
| error.debug_datasetsize = 数据集大小超过限制('%d'GB) | |||
| error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集 | |||
| new_train_gpu_tooltips = 训练脚本存储在 <strong style="color:#010101">%s</strong> 中,数据集存储在 <strong style="color:#010101">%s</strong> 中,预训练模型存放在运行参数 <strong style="color:#010101">%s</strong> 中,训练输出请存储在 <strong style="color:#010101">%s</strong> 中以供后续下载。 | |||
| new_debug_gpu_tooltips = 项目代码存储在 <strong style="color:#010101">%s</strong> 中,数据集存储在 <strong style="color:#010101">%s</strong> 中,选择的模型存储在 <strong style="color:#010101">%s</strong> 中,调试输出请存储在 <strong style="color:#010101">%s</strong> 中以供后续下载。 | |||
| @@ -285,7 +285,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
| } | |||
| var datasetInfos map[string]models.DatasetInfo | |||
| var datasetNames string | |||
| //var | |||
| var attachSize int | |||
| if uuids != "" { | |||
| datasetInfos, datasetNames, err = models.GetDatasetInfo(uuids) | |||
| if err != nil { | |||
| @@ -294,6 +294,18 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
| ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form) | |||
| return | |||
| } | |||
| if jobType == string(models.JobTypeDebug) { | |||
| for _, infos := range datasetInfos { | |||
| attachSize += infos.Size | |||
| } | |||
| if attachSize > int(setting.DebugAttachSize*1000*1000*1000) { | |||
| log.Error("The DatasetSize exceeds the limit (%d)", int(setting.DebugAttachSize)) // GB | |||
| cloudBrainNewDataPrepare(ctx, jobType) | |||
| ctx.RenderWithErr(ctx.Tr("cloudbrain.error.debug_datasetsize", int(setting.DebugAttachSize*1000*1000*1000)), tpl, &form) | |||
| return | |||
| } | |||
| } | |||
| } | |||
| command := cloudbrain.GetCloudbrainDebugCommand() | |||
| @@ -373,6 +373,7 @@ func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) { | |||
| } | |||
| data, err := json.Marshal(datasets) | |||
| log.Info("datakey", string(data)) | |||
| if err != nil { | |||
| log.Error("json.Marshal failed:", err.Error()) | |||
| ctx.JSON(200, map[string]string{ | |||
| @@ -218,6 +218,22 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm | |||
| return | |||
| } | |||
| } | |||
| var datasetInfos map[string]models.DatasetInfo | |||
| var attachSize int | |||
| if uuid != "" { | |||
| datasetInfos, _, err = models.GetDatasetInfo(uuid) | |||
| for _, infos := range datasetInfos { | |||
| attachSize += infos.Size | |||
| } | |||
| if attachSize > int(setting.DebugAttachSize*1000*1000*1000) { | |||
| log.Error("The DatasetSize exceeds the limit (%d)", int(setting.DebugAttachSize)) //GB | |||
| notebookNewDataPrepare(ctx) | |||
| ctx.RenderWithErr(ctx.Tr("cloudbrain.error.debug_datasetsize", int(setting.DebugAttachSize*1000*1000*1000)), tplModelArtsNotebookNew, &form) | |||
| return | |||
| } | |||
| } | |||
| var aiCenterCode = models.AICenterOfCloudBrainTwo | |||
| if setting.ModelartsCD.Enabled { | |||
| aiCenterCode = models.AICenterOfChengdu | |||
| @@ -23,7 +23,7 @@ | |||
| <div class="repository"> | |||
| {{template "repo/header" .}} | |||
| <div class="ui container"> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-exceed-size="{{DebugAttachSize}}"></div> | |||
| {{if eq .NotStopTaskCount 0}} | |||
| {{template "base/alert" .}} | |||
| {{end}} | |||
| @@ -4,7 +4,7 @@ | |||
| <div class="repository"> | |||
| {{template "repo/header" .}} | |||
| <div class="ui container"> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-exceed-size="{{DebugAttachSize}}"></div> | |||
| {{if eq .NotStopTaskCount 0}} | |||
| {{template "base/alert" .}} | |||
| {{end}} | |||
| @@ -3,7 +3,7 @@ | |||
| <div class="repository"> | |||
| {{template "repo/header" .}} | |||
| <div class="ui container"> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-exceed-size="{{DebugAttachSize}}"></div> | |||
| {{if eq .NotStopTaskCount 0}} | |||
| {{template "base/alert" .}} | |||
| {{end}} | |||
| @@ -4,7 +4,7 @@ | |||
| {{template "repo/header" .}} | |||
| <div class="ui container"> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | |||
| <div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}" data-exceed-size="{{DebugAttachSize}}"></div> | |||
| {{if eq .NotStopTaskCount 0}} | |||
| {{template "base/alert" .}} | |||
| {{end}} | |||
| @@ -176,6 +176,12 @@ | |||
| > | |||
| {{ i18n.dataset_unzip_failed }} | |||
| </span> | |||
| <span | |||
| class="unzip-failed" | |||
| v-if="data.Size > exceedSize" | |||
| > | |||
| {{ i18n.dataset_exceeds_failed }}{{exceedSize/(1000*1000*1000)}}G | |||
| </span> | |||
| </span> | |||
| </span> | |||
| </el-tree> | |||
| @@ -282,6 +288,12 @@ | |||
| > | |||
| {{ i18n.dataset_unzip_failed }} | |||
| </span> | |||
| <span | |||
| class="unzip-failed" | |||
| v-if="data.Size > exceedSize" | |||
| > | |||
| {{ i18n.dataset_exceeds_failed }} | |||
| </span> | |||
| </span> | |||
| </span> | |||
| </el-tree> | |||
| @@ -388,6 +400,12 @@ | |||
| > | |||
| {{ i18n.dataset_unzip_failed }} | |||
| </span> | |||
| <span | |||
| class="unzip-failed" | |||
| v-if="data.Size > exceedSize" | |||
| > | |||
| {{ i18n.dataset_exceeds_failed }} | |||
| </span> | |||
| </span> | |||
| </span> | |||
| </el-tree> | |||
| @@ -494,6 +512,12 @@ | |||
| > | |||
| {{ i18n.dataset_unzip_failed }} | |||
| </span> | |||
| <span | |||
| class="unzip-failed" | |||
| v-if="data.Size > exceedSize" | |||
| > | |||
| {{ i18n.dataset_exceeds_failed }} | |||
| </span> | |||
| </span> | |||
| </span> | |||
| </el-tree> | |||
| @@ -625,6 +649,7 @@ export default { | |||
| paramsFavorite: { page: 1, q: "" }, | |||
| loadingFavorite: false, | |||
| initFavoriteTreeNode: [], | |||
| exceedSize:0, | |||
| }; | |||
| }, | |||
| methods: { | |||
| @@ -883,6 +908,9 @@ export default { | |||
| if (curchild.DecompressState !== 1) { | |||
| curchild.disabled = true; | |||
| } | |||
| if(curchild.Size>this.exceedSize && this.exceedSize){ | |||
| curchild.disabled = true; | |||
| } | |||
| curchild.ref = ref; | |||
| curchild.label = curchild.Name; | |||
| preChild.push(curchild); | |||
| @@ -971,6 +999,7 @@ export default { | |||
| mounted() { | |||
| this.type = $(".cloudbrain-type").data("cloudbrain-type"); | |||
| this.repoLink = $(".cloudbrain-type").data("repo-link"); | |||
| this.exceedSize = $(".cloudbrain-type").data("exceed-size"); | |||
| if ($(".cloudbrain-type").data("dataset-uuid")) { | |||
| this.hasSelectDatasetList = $(".cloudbrain-type") | |||
| .data("dataset-uuid") | |||
| @@ -74,6 +74,7 @@ export const i18nVue = { | |||
| dataset_search_placeholder: "搜数据集名称/描述...", | |||
| dataset_unziping: "正在解压缩", | |||
| dataset_unzip_failed: "解压失败", | |||
| dataset_exceeds_failed: "数据集大小超过", | |||
| dataset_my_upload: "我上传的", | |||
| dataset_current_repo: "本项目", | |||
| dataset_public: "公开数据集", | |||
| @@ -195,6 +196,7 @@ export const i18nVue = { | |||
| dataset_search_placeholder: "Search dataset name/description ...", | |||
| dataset_unziping: "Decompressing", | |||
| dataset_unzip_failed: "Decompression failed", | |||
| dataset_exceeds_failed: "Dataset size exceeds ", | |||
| dataset_my_upload: "Upload by me", | |||
| dataset_current_repo: "Current Repository", | |||
| dataset_public: "Public dataset", | |||