@@ -75,11 +75,11 @@ public class LogAspect { | |||||
private Object combineLogInfo(JoinPoint joinPoint) throws Throwable { | private Object combineLogInfo(JoinPoint joinPoint) throws Throwable { | ||||
Object[] param = joinPoint.getArgs(); | Object[] param = joinPoint.getArgs(); | ||||
LogUtil.info(LogEnum.REST_REQ, "uri:{},input:{},==>begin", joinPoint.getSignature(), param); | |||||
LogUtil.info(LogEnum.LOG_ASPECT, "uri:{},input:{},==>begin", joinPoint.getSignature(), param); | |||||
long start = System.currentTimeMillis(); | long start = System.currentTimeMillis(); | ||||
Object result = ((ProceedingJoinPoint) joinPoint).proceed(); | Object result = ((ProceedingJoinPoint) joinPoint).proceed(); | ||||
long end = System.currentTimeMillis(); | long end = System.currentTimeMillis(); | ||||
LogUtil.info(LogEnum.REST_REQ, "uri:{},output:{},proc_time:{},<==end", joinPoint.getSignature().toString(), | |||||
LogUtil.info(LogEnum.LOG_ASPECT, "uri:{},output:{},proc_time:{},<==end", joinPoint.getSignature().toString(), | |||||
result, end - start); | result, end - start); | ||||
return result; | return result; | ||||
} | } | ||||
@@ -59,8 +59,10 @@ public enum LogEnum { | |||||
//DATA_SEQUENCE | //DATA_SEQUENCE | ||||
DATA_SEQUENCE, | DATA_SEQUENCE, | ||||
//IO UTIL | //IO UTIL | ||||
IO_UTIL; | |||||
IO_UTIL, | |||||
// 日志切面 | |||||
LOG_ASPECT | |||||
; | |||||
/** | /** | ||||
* 判断日志类型不能为空 | * 判断日志类型不能为空 | ||||
* | * | ||||
@@ -55,6 +55,11 @@ public class BaseLogFilter extends AbstractMatcherFilter<ILoggingEvent> { | |||||
return onMismatch; | return onMismatch; | ||||
} | } | ||||
/** | |||||
* 检测日志级别 | |||||
* @param iLoggingEvent 日志事件 | |||||
* @return true 过滤当前级别 false 不过滤当前级别 | |||||
*/ | |||||
protected boolean checkLevel(ILoggingEvent iLoggingEvent) { | protected boolean checkLevel(ILoggingEvent iLoggingEvent) { | ||||
return this.level != null | return this.level != null | ||||
&& iLoggingEvent.getLevel() != null | && iLoggingEvent.getLevel() != null | ||||
@@ -41,7 +41,7 @@ import java.util.Objects; | |||||
import java.util.concurrent.TimeUnit; | import java.util.concurrent.TimeUnit; | ||||
/** | /** | ||||
* @description JWT | |||||
* @description JWT | |||||
* @date 2020-03-14 | * @date 2020-03-14 | ||||
*/ | */ | ||||
@Component | @Component | ||||
@@ -1,28 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.dao; | |||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper; | |||||
import org.dubhe.domain.PtDataset; | |||||
/** | |||||
* @description 数据集 | |||||
* @date 2020-03-30 | |||||
*/ | |||||
public interface PtDatasetMapper extends BaseMapper<PtDataset> { | |||||
} |
@@ -1,29 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.dao; | |||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper; | |||||
import org.dubhe.domain.PtDevEnvs; | |||||
/** | |||||
* @description 开发环境 | |||||
* @date 2020-03-30 | |||||
*/ | |||||
public interface PtDevEnvsMapper extends BaseMapper<PtDevEnvs> { | |||||
} |
@@ -1,28 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.dao; | |||||
import com.baomidou.mybatisplus.core.mapper.BaseMapper; | |||||
import org.dubhe.domain.PtStorage; | |||||
/** | |||||
* @description 存储管理 | |||||
* @date 2020-03-30 | |||||
*/ | |||||
public interface PtStorageMapper extends BaseMapper<PtStorage> { | |||||
} |
@@ -1,71 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.domain; | |||||
import cn.hutool.core.bean.BeanUtil; | |||||
import cn.hutool.core.bean.copier.CopyOptions; | |||||
import com.baomidou.mybatisplus.annotation.IdType; | |||||
import com.baomidou.mybatisplus.annotation.TableField; | |||||
import com.baomidou.mybatisplus.annotation.TableId; | |||||
import com.baomidou.mybatisplus.annotation.TableName; | |||||
import lombok.Data; | |||||
import org.dubhe.base.BaseEntity; | |||||
import org.dubhe.domain.entity.Team; | |||||
import org.dubhe.domain.entity.User; | |||||
import javax.validation.constraints.NotBlank; | |||||
/** | |||||
* @description 数据集 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Data | |||||
@TableName("pt_dataset") | |||||
public class PtDataset extends BaseEntity { | |||||
@TableId(value = "id", type = IdType.AUTO) | |||||
private Long id; | |||||
@TableField(value = "name") | |||||
@NotBlank | |||||
private String name; | |||||
@TableField(value = "remark") | |||||
private String remark; | |||||
@TableField(value = "type") | |||||
@NotBlank | |||||
private String type; | |||||
/** | |||||
* 团队 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private Team team; | |||||
/** | |||||
* 创建用户 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private User createUser; | |||||
public void copy(PtDataset source) { | |||||
BeanUtil.copyProperties(source, this, CopyOptions.create().setIgnoreNullValue(true)); | |||||
} | |||||
public @interface Update { | |||||
} | |||||
} |
@@ -1,114 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.domain; | |||||
import cn.hutool.core.bean.BeanUtil; | |||||
import cn.hutool.core.bean.copier.CopyOptions; | |||||
import com.baomidou.mybatisplus.annotation.IdType; | |||||
import com.baomidou.mybatisplus.annotation.TableField; | |||||
import com.baomidou.mybatisplus.annotation.TableId; | |||||
import com.baomidou.mybatisplus.annotation.TableName; | |||||
import lombok.Data; | |||||
import org.dubhe.base.BaseEntity; | |||||
import org.dubhe.domain.entity.PtImage; | |||||
import org.dubhe.domain.entity.Team; | |||||
import org.dubhe.domain.entity.User; | |||||
import javax.validation.constraints.NotBlank; | |||||
import java.sql.Timestamp; | |||||
/** | |||||
* @description 开发环境 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Data | |||||
@TableName("pt_dev_envs") | |||||
public class PtDevEnvs extends BaseEntity { | |||||
@TableId(value = "id", type = IdType.AUTO) | |||||
private Long id; | |||||
@NotBlank | |||||
private String name; | |||||
@TableField(value = "remark") | |||||
private String remark; | |||||
@TableField(value = "type") | |||||
@NotBlank | |||||
private String type; | |||||
@TableField(value = "pod_num") | |||||
private Integer podNum; | |||||
@TableField(value = "gpu_num") | |||||
private Integer gpuNum; | |||||
@TableField(value = "mem_num") | |||||
private Integer memNum; | |||||
@TableField(value = "cpu_num") | |||||
private Integer cpuNum; | |||||
@TableField(value = "duration") | |||||
private Integer duration; | |||||
@TableField(value = "start_time") | |||||
private Timestamp startTime; | |||||
@TableField(value = "close_time") | |||||
private Timestamp closeTime; | |||||
/** | |||||
* 数据集 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private PtDataset dataset; | |||||
/** | |||||
* 镜像 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private PtImage image; | |||||
/** | |||||
* 存储 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private PtStorage storage; | |||||
/** | |||||
* 团队 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private Team team; | |||||
/** | |||||
* 创建用户 | |||||
*/ | |||||
@TableField(exist = false) | |||||
private User createUser; | |||||
public void copy(PtDevEnvs source) { | |||||
BeanUtil.copyProperties(source, this, CopyOptions.create().setIgnoreNullValue(true)); | |||||
} | |||||
public @interface Update { | |||||
} | |||||
} |
@@ -1,69 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.domain; | |||||
import cn.hutool.core.bean.BeanUtil; | |||||
import cn.hutool.core.bean.copier.CopyOptions; | |||||
import com.baomidou.mybatisplus.annotation.IdType; | |||||
import com.baomidou.mybatisplus.annotation.TableField; | |||||
import com.baomidou.mybatisplus.annotation.TableId; | |||||
import com.baomidou.mybatisplus.annotation.TableName; | |||||
import lombok.Data; | |||||
import org.dubhe.base.BaseEntity; | |||||
import org.dubhe.domain.entity.Team; | |||||
import org.dubhe.domain.entity.User; | |||||
import javax.validation.constraints.NotBlank; | |||||
import javax.validation.constraints.NotNull; | |||||
/** | |||||
* @description 存储 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Data | |||||
@TableName("pt_storage") | |||||
public class PtStorage extends BaseEntity { | |||||
@TableId(value = "id", type = IdType.AUTO) | |||||
@NotNull(groups = {Update.class}) | |||||
private Long id; | |||||
@TableField(value = "name") | |||||
@NotBlank | |||||
private String name; | |||||
@TableField(value = "size") | |||||
private Integer size; | |||||
@TableField(value = "storageclass") | |||||
@NotBlank | |||||
private String storageclass; | |||||
@TableField(exist = false) | |||||
private Team team; | |||||
@TableField(exist = false) | |||||
private User createUser; | |||||
public void copy(PtStorage source) { | |||||
BeanUtil.copyProperties(source, this, CopyOptions.create().setIgnoreNullValue(true)); | |||||
} | |||||
public @interface Update { | |||||
} | |||||
} |
@@ -1,28 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.domain.dto; | |||||
import lombok.Data; | |||||
/** | |||||
* @description 数据集查询条件 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Data | |||||
public class PtDatasetQueryCriteria { | |||||
} |
@@ -1,41 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.domain.dto; | |||||
import lombok.Data; | |||||
import java.io.Serializable; | |||||
import java.sql.Timestamp; | |||||
/** | |||||
* @description 存储 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Data | |||||
public class PtStorageDTO implements Serializable { | |||||
private Long id; | |||||
private String name; | |||||
private Integer size; | |||||
private String storageclass; | |||||
private TeamSmallDTO team; | |||||
private UserSmallDTO createUser; | |||||
private Timestamp createTime; | |||||
private Timestamp updateTime; | |||||
private Boolean deleted; | |||||
} |
@@ -1,28 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.domain.dto; | |||||
import lombok.Data; | |||||
/** | |||||
* @description 存储 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Data | |||||
public class PtStorageQueryCriteria { | |||||
} |
@@ -103,8 +103,7 @@ public class PtTrainJobCreateDTO extends BaseImageDTO { | |||||
private Integer trainType; | private Integer trainType; | ||||
@ApiModelProperty(value = "节点个数", required = true) | @ApiModelProperty(value = "节点个数", required = true) | ||||
@Min(value = TrainUtil.NUMBER_ONE, message = "节点个数在1~8之间") | |||||
@Max(value = TrainUtil.NUMBER_EIGHT, message = "节点个数在1~8之间") | |||||
@Min(value = TrainUtil.NUMBER_ONE, message = "节点个数至少1个") | |||||
@NotNull(message = "节点个数") | @NotNull(message = "节点个数") | ||||
private Integer resourcesPoolNode; | private Integer resourcesPoolNode; | ||||
@@ -1,89 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.rest; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import io.swagger.annotations.Api; | |||||
import io.swagger.annotations.ApiOperation; | |||||
import org.dubhe.base.DataResponseBody; | |||||
import org.dubhe.domain.PtDataset; | |||||
import org.dubhe.domain.dto.PtDatasetQueryCriteria; | |||||
import org.dubhe.service.PtDatasetService; | |||||
import org.springframework.security.access.prepost.PreAuthorize; | |||||
import org.springframework.validation.annotation.Validated; | |||||
import org.springframework.web.bind.annotation.*; | |||||
import springfox.documentation.annotations.ApiIgnore; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
/** | |||||
* @description dataset管理 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Api(tags = "dataset管理") | |||||
@ApiIgnore | |||||
@RestController | |||||
@RequestMapping("/api/{version}/pt_dataset") | |||||
public class PtDatasetController { | |||||
private final PtDatasetService ptDatasetService; | |||||
public PtDatasetController(PtDatasetService ptDatasetService) { | |||||
this.ptDatasetService = ptDatasetService; | |||||
} | |||||
@ApiOperation("导出数据") | |||||
@GetMapping(value = "/download") | |||||
@PreAuthorize("@el.check('ptDataset:list')") | |||||
public void download(HttpServletResponse response, PtDatasetQueryCriteria criteria) throws IOException { | |||||
ptDatasetService.download(ptDatasetService.queryAll(criteria), response); | |||||
} | |||||
@GetMapping | |||||
@ApiOperation("查询dataset") | |||||
@PreAuthorize("@el.check('ptDataset:list')") | |||||
public DataResponseBody getPtDatasets(PtDatasetQueryCriteria criteria, Page page) { | |||||
return new DataResponseBody(ptDatasetService.queryAll(criteria, page)); | |||||
} | |||||
@PostMapping | |||||
@ApiOperation("新增dataset") | |||||
@PreAuthorize("@el.check('ptDataset:add')") | |||||
public DataResponseBody create(@Validated @RequestBody PtDataset resources) { | |||||
return new DataResponseBody(ptDatasetService.create(resources)); | |||||
} | |||||
@PutMapping | |||||
@ApiOperation("修改dataset") | |||||
@PreAuthorize("@el.check('ptDataset:edit')") | |||||
public DataResponseBody update(@Validated @RequestBody PtDataset resources) { | |||||
ptDatasetService.update(resources); | |||||
return new DataResponseBody(); | |||||
} | |||||
@ApiOperation("删除dataset") | |||||
@PreAuthorize("@el.check('ptDataset:del')") | |||||
@DeleteMapping | |||||
public DataResponseBody deleteAll(@RequestBody Long[] ids) { | |||||
ptDatasetService.deleteAll(ids); | |||||
return new DataResponseBody(); | |||||
} | |||||
} |
@@ -1,88 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.rest; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import io.swagger.annotations.Api; | |||||
import io.swagger.annotations.ApiOperation; | |||||
import org.dubhe.base.DataResponseBody; | |||||
import org.dubhe.domain.PtDevEnvs; | |||||
import org.dubhe.domain.dto.PtDevEnvsQueryCriteria; | |||||
import org.dubhe.service.PtDevEnvsService; | |||||
import org.springframework.security.access.prepost.PreAuthorize; | |||||
import org.springframework.validation.annotation.Validated; | |||||
import org.springframework.web.bind.annotation.*; | |||||
import springfox.documentation.annotations.ApiIgnore; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
/** | |||||
* @description devEnvs管理 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Api(tags = "devEnvs管理") | |||||
@ApiIgnore | |||||
@RestController | |||||
@RequestMapping("/api/{version}/pt_ev_envs") | |||||
public class PtDevEnvsController { | |||||
private final PtDevEnvsService ptDevEnvsService; | |||||
public PtDevEnvsController(PtDevEnvsService ptDevEnvsService) { | |||||
this.ptDevEnvsService = ptDevEnvsService; | |||||
} | |||||
@ApiOperation("导出数据") | |||||
@GetMapping(value = "/download") | |||||
@PreAuthorize("@el.check('ptDevEnvs:list')") | |||||
public void download(HttpServletResponse response, PtDevEnvsQueryCriteria criteria) throws IOException { | |||||
ptDevEnvsService.download(ptDevEnvsService.queryAll(criteria), response); | |||||
} | |||||
@GetMapping | |||||
@ApiOperation("查询devEnvs") | |||||
@PreAuthorize("@el.check('ptDevEnvs:list')") | |||||
public DataResponseBody getPtDevEnvss(PtDevEnvsQueryCriteria criteria, Page page) { | |||||
return new DataResponseBody(ptDevEnvsService.queryAll(criteria, page)); | |||||
} | |||||
@PostMapping | |||||
@ApiOperation("新增devEnvs") | |||||
@PreAuthorize("@el.check('ptDevEnvs:add')") | |||||
public DataResponseBody create(@Validated @RequestBody PtDevEnvs resources) { | |||||
return new DataResponseBody(ptDevEnvsService.create(resources)); | |||||
} | |||||
@PutMapping | |||||
@ApiOperation("修改devEnvs") | |||||
@PreAuthorize("@el.check('ptDevEnvs:edit')") | |||||
public DataResponseBody update(@Validated @RequestBody PtDevEnvs resources) { | |||||
ptDevEnvsService.update(resources); | |||||
return new DataResponseBody(); | |||||
} | |||||
@ApiOperation("删除devEnvs") | |||||
@PreAuthorize("@el.check('ptDevEnvs:del')") | |||||
@DeleteMapping | |||||
public DataResponseBody deleteAll(@RequestBody Long[] ids) { | |||||
ptDevEnvsService.deleteAll(ids); | |||||
return new DataResponseBody(); | |||||
} | |||||
} |
@@ -1,87 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.rest; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import io.swagger.annotations.Api; | |||||
import io.swagger.annotations.ApiOperation; | |||||
import org.dubhe.base.DataResponseBody; | |||||
import org.dubhe.domain.PtStorage; | |||||
import org.dubhe.domain.dto.PtStorageQueryCriteria; | |||||
import org.dubhe.service.PtStorageService; | |||||
import org.springframework.security.access.prepost.PreAuthorize; | |||||
import org.springframework.validation.annotation.Validated; | |||||
import org.springframework.web.bind.annotation.*; | |||||
import springfox.documentation.annotations.ApiIgnore; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
/** | |||||
* @description storage管理 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Api(tags = "storage管理") | |||||
@ApiIgnore | |||||
@RestController | |||||
@RequestMapping("/api/{version}/pt_storage") | |||||
public class PtStorageController { | |||||
private final PtStorageService ptStorageService; | |||||
public PtStorageController(PtStorageService ptStorageService) { | |||||
this.ptStorageService = ptStorageService; | |||||
} | |||||
@ApiOperation("导出数据") | |||||
@GetMapping(value = "/download") | |||||
@PreAuthorize("@el.check('ptStorage:list')") | |||||
public void download(HttpServletResponse response, PtStorageQueryCriteria criteria) throws IOException { | |||||
ptStorageService.download(ptStorageService.queryAll(criteria), response); | |||||
} | |||||
@GetMapping | |||||
@ApiOperation("查询storage") | |||||
@PreAuthorize("@el.check('ptStorage:list')") | |||||
public DataResponseBody getPtStorages(PtStorageQueryCriteria criteria, Page page) { | |||||
return new DataResponseBody(ptStorageService.queryAll(criteria, page)); | |||||
} | |||||
@PostMapping | |||||
@ApiOperation("新增storage") | |||||
@PreAuthorize("@el.check('ptStorage:add')") | |||||
public DataResponseBody create(@Validated @RequestBody PtStorage resources) { | |||||
return new DataResponseBody(ptStorageService.create(resources)); | |||||
} | |||||
@PutMapping | |||||
@ApiOperation("修改storage") | |||||
@PreAuthorize("@el.check('ptStorage:edit')") | |||||
public DataResponseBody update(@Validated @RequestBody PtStorage resources) { | |||||
ptStorageService.update(resources); | |||||
return new DataResponseBody(); | |||||
} | |||||
@ApiOperation("删除storage") | |||||
@PreAuthorize("@el.check('ptStorage:del')") | |||||
@DeleteMapping | |||||
public DataResponseBody deleteAll(@RequestBody Long[] ids) { | |||||
ptStorageService.deleteAll(ids); | |||||
return new DataResponseBody(); | |||||
} | |||||
} |
@@ -25,6 +25,8 @@ import org.dubhe.annotation.ApiVersion; | |||||
import org.dubhe.base.DataResponseBody; | import org.dubhe.base.DataResponseBody; | ||||
import org.dubhe.constant.Permissions; | import org.dubhe.constant.Permissions; | ||||
import org.dubhe.domain.dto.*; | import org.dubhe.domain.dto.*; | ||||
import org.dubhe.enums.TrainTypeEnum; | |||||
import org.dubhe.factory.DataResponseFactory; | |||||
import org.dubhe.service.PtTrainJobService; | import org.dubhe.service.PtTrainJobService; | ||||
import org.dubhe.service.PtTrainJobSpecsService; | import org.dubhe.service.PtTrainJobSpecsService; | ||||
import org.springframework.beans.factory.annotation.Autowired; | import org.springframework.beans.factory.annotation.Autowired; | ||||
@@ -94,6 +96,11 @@ public class PtTrainJobController { | |||||
@ApiOperation("创建训练任务") | @ApiOperation("创建训练任务") | ||||
@RequiresPermissions(Permissions.TRAINING_JOB) | @RequiresPermissions(Permissions.TRAINING_JOB) | ||||
public DataResponseBody createTrainJob(@Validated @RequestBody PtTrainJobCreateDTO ptTrainJobCreateDTO) { | public DataResponseBody createTrainJob(@Validated @RequestBody PtTrainJobCreateDTO ptTrainJobCreateDTO) { | ||||
if (TrainTypeEnum.isDistributeTrain(ptTrainJobCreateDTO.getTrainType()) | |||||
&& ptTrainJobCreateDTO.getResourcesPoolNode() < 2) { | |||||
// 分布式训练节点数校验补偿 | |||||
return DataResponseFactory.failed("分布式训练节点个数至少2个"); | |||||
} | |||||
return new DataResponseBody(ptTrainJobService.createTrainJobVersion(ptTrainJobCreateDTO)); | return new DataResponseBody(ptTrainJobService.createTrainJobVersion(ptTrainJobCreateDTO)); | ||||
} | } | ||||
@@ -1,91 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import org.dubhe.domain.PtDataset; | |||||
import org.dubhe.domain.dto.PtDatasetDTO; | |||||
import org.dubhe.domain.dto.PtDatasetQueryCriteria; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
import java.util.List; | |||||
import java.util.Map; | |||||
/** | |||||
* @description 查询数据 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
public interface PtDatasetService { | |||||
/** | |||||
* 查询数据分页 | |||||
* | |||||
* @param criteria 条件 | |||||
* @param page 分页参数 | |||||
* @return Map<String, Object> | |||||
*/ | |||||
Map<String, Object> queryAll(PtDatasetQueryCriteria criteria, Page page); | |||||
/** | |||||
* 查询所有数据不分页 | |||||
* | |||||
* @param criteria 条件参数 | |||||
* @return List<PtDatasetDto> | |||||
*/ | |||||
List<PtDatasetDTO> queryAll(PtDatasetQueryCriteria criteria); | |||||
/** | |||||
* 根据ID查询 | |||||
* | |||||
* @param id ID | |||||
* @return PtDatasetDto | |||||
*/ | |||||
PtDatasetDTO findById(Long id); | |||||
/** | |||||
* 创建 | |||||
* | |||||
* @param resources / | |||||
* @return PtDatasetDto | |||||
*/ | |||||
PtDatasetDTO create(PtDataset resources); | |||||
/** | |||||
* 编辑 | |||||
* | |||||
* @param resources / | |||||
*/ | |||||
void update(PtDataset resources); | |||||
/** | |||||
* 多选删除 | |||||
* | |||||
* @param ids / | |||||
*/ | |||||
void deleteAll(Long[] ids); | |||||
/** | |||||
* 导出数据 | |||||
* | |||||
* @param all 待导出的数据 | |||||
* @param response / | |||||
* @throws IOException / | |||||
*/ | |||||
void download(List<PtDatasetDTO> all, HttpServletResponse response) throws IOException; | |||||
} |
@@ -1,91 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import org.dubhe.domain.PtDevEnvs; | |||||
import org.dubhe.domain.dto.PtDevEnvsDTO; | |||||
import org.dubhe.domain.dto.PtDevEnvsQueryCriteria; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
import java.util.List; | |||||
import java.util.Map; | |||||
/** | |||||
* @description 查询数据 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
public interface PtDevEnvsService { | |||||
/** | |||||
* 查询数据分页 | |||||
* | |||||
* @param criteria 条件 | |||||
* @param page 分页参数 | |||||
* @return Map<String, Object> | |||||
*/ | |||||
Map<String, Object> queryAll(PtDevEnvsQueryCriteria criteria, Page page); | |||||
/** | |||||
* 查询所有数据不分页 | |||||
* | |||||
* @param criteria 条件参数 | |||||
* @return List<PtDevEnvsDto> | |||||
*/ | |||||
List<PtDevEnvsDTO> queryAll(PtDevEnvsQueryCriteria criteria); | |||||
/** | |||||
* 根据ID查询 | |||||
* | |||||
* @param id ID | |||||
* @return PtDevEnvsDto | |||||
*/ | |||||
PtDevEnvsDTO findById(Long id); | |||||
/** | |||||
* 创建 | |||||
* | |||||
* @param resources / | |||||
* @return PtDevEnvsDto | |||||
*/ | |||||
PtDevEnvsDTO create(PtDevEnvs resources); | |||||
/** | |||||
* 编辑 | |||||
* | |||||
* @param resources / | |||||
*/ | |||||
void update(PtDevEnvs resources); | |||||
/** | |||||
* 多选删除 | |||||
* | |||||
* @param ids / | |||||
*/ | |||||
void deleteAll(Long[] ids); | |||||
/** | |||||
* 导出数据 | |||||
* | |||||
* @param all 待导出的数据 | |||||
* @param response / | |||||
* @throws IOException / | |||||
*/ | |||||
void download(List<PtDevEnvsDTO> all, HttpServletResponse response) throws IOException; | |||||
} |
@@ -1,92 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import org.dubhe.domain.PtStorage; | |||||
import org.dubhe.domain.dto.PtStorageDTO; | |||||
import org.dubhe.domain.dto.PtStorageQueryCriteria; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
import java.util.List; | |||||
import java.util.Map; | |||||
/** | |||||
* @description 查询数据 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
public interface PtStorageService { | |||||
/** | |||||
* 查询数据分页 | |||||
* | |||||
* @param criteria 条件 | |||||
* @param page 分页参数 | |||||
* @return Map<String, Object> | |||||
*/ | |||||
Map<String, Object> queryAll(PtStorageQueryCriteria criteria, Page page); | |||||
/** | |||||
* 查询所有数据不分页 | |||||
* | |||||
* @param criteria 条件参数 | |||||
* @return List<PtStorageDto> | |||||
*/ | |||||
List<PtStorageDTO> queryAll(PtStorageQueryCriteria criteria); | |||||
/** | |||||
* 根据ID查询 | |||||
* | |||||
* @param id ID | |||||
* @return PtStorageDto | |||||
*/ | |||||
PtStorageDTO findById(Long id); | |||||
/** | |||||
* 创建 | |||||
* | |||||
* @param resources / | |||||
* @return PtStorageDto | |||||
*/ | |||||
PtStorageDTO create(PtStorage resources); | |||||
/** | |||||
* 编辑 | |||||
* | |||||
* @param resources / | |||||
*/ | |||||
void update(PtStorage resources); | |||||
/** | |||||
* 多选删除 | |||||
* | |||||
* @param ids / | |||||
*/ | |||||
void deleteAll(Long[] ids); | |||||
/** | |||||
* 导出数据 | |||||
* | |||||
* @param all 待导出的数据 | |||||
* @param response / | |||||
* @throws IOException / | |||||
*/ | |||||
void download(List<PtStorageDTO> all, HttpServletResponse response) throws IOException; | |||||
} |
@@ -49,7 +49,7 @@ public abstract class AbstractPodCallback implements PodCallbackAsyncService { | |||||
Thread.sleep(tryTime * 1000); | Thread.sleep(tryTime * 1000); | ||||
continue; | continue; | ||||
} catch (InterruptedException e) { | } catch (InterruptedException e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK,"AbstractPodCallback podCallBack InterruptedException", e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK,"AbstractPodCallback podCallBack InterruptedException : {}", e); | |||||
// Restore interrupted state... | // Restore interrupted state... | ||||
Thread.currentThread().interrupt(); | Thread.currentThread().interrupt(); | ||||
} | } | ||||
@@ -61,7 +61,7 @@ public abstract class AbstractPodCallback implements PodCallbackAsyncService { | |||||
} | } | ||||
} | } | ||||
/** | |||||
/** | |||||
* pod 异步回调具体实现处理类 | * pod 异步回调具体实现处理类 | ||||
* @param times 第n次处理 | * @param times 第n次处理 | ||||
* @param k8sPodCallbackCreateDTO k8s回调实体类 | * @param k8sPodCallbackCreateDTO k8s回调实体类 | ||||
@@ -1,33 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.convert; | |||||
import org.dubhe.base.BaseConvert; | |||||
import org.dubhe.domain.PtDataset; | |||||
import org.dubhe.domain.dto.PtDatasetDTO; | |||||
import org.mapstruct.Mapper; | |||||
import org.mapstruct.ReportingPolicy; | |||||
/** | |||||
* @description 数据集 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE) | |||||
public interface PtDatasetConvert extends BaseConvert<PtDatasetDTO, PtDataset> { | |||||
} |
@@ -1,34 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.convert; | |||||
import org.dubhe.base.BaseConvert; | |||||
import org.dubhe.domain.PtDevEnvs; | |||||
import org.dubhe.domain.dto.PtDevEnvsDTO; | |||||
import org.mapstruct.Mapper; | |||||
import org.mapstruct.ReportingPolicy; | |||||
/** | |||||
* @description 开发环境 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE) | |||||
public interface PtDevEnvsConvert extends BaseConvert<PtDevEnvsDTO, PtDevEnvs> { | |||||
} |
@@ -1,34 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.convert; | |||||
import org.dubhe.base.BaseConvert; | |||||
import org.dubhe.domain.entity.PtImage; | |||||
import org.dubhe.domain.dto.PtImageDTO; | |||||
import org.mapstruct.Mapper; | |||||
import org.mapstruct.ReportingPolicy; | |||||
/** | |||||
* @description 镜像 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE) | |||||
public interface PtImageConvert extends BaseConvert<PtImageDTO, PtImage> { | |||||
} |
@@ -1,33 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.convert; | |||||
import org.dubhe.base.BaseConvert; | |||||
import org.dubhe.domain.PtStorage; | |||||
import org.dubhe.domain.dto.PtStorageDTO; | |||||
import org.mapstruct.Mapper; | |||||
import org.mapstruct.ReportingPolicy; | |||||
/** | |||||
* @description 存储类转化 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE) | |||||
public interface PtStorageConvert extends BaseConvert<PtStorageDTO, PtStorage> { | |||||
} |
@@ -1,34 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.convert; | |||||
import org.dubhe.base.BaseConvert; | |||||
import org.dubhe.domain.entity.PtTrainJob; | |||||
import org.dubhe.domain.dto.PtTrainJobDTO; | |||||
import org.mapstruct.Mapper; | |||||
import org.mapstruct.ReportingPolicy; | |||||
/** | |||||
* @description 训练任务转化 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Mapper(componentModel = "spring", unmappedTargetPolicy = ReportingPolicy.IGNORE) | |||||
public interface PtTrainJobConvert extends BaseConvert<PtTrainJobDTO, PtTrainJob> { | |||||
} |
@@ -65,7 +65,7 @@ public class NoteBookAsyncServiceImpl extends AbstractPodCallback implements Pod | |||||
noteBookService.updateById(notebook); | noteBookService.updateById(notebook); | ||||
return true; | return true; | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK, "NoteBook doCallback error!", e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK, "NoteBook doCallback error!{}", e); | |||||
return false; | return false; | ||||
} | } | ||||
} | } | ||||
@@ -222,7 +222,7 @@ public class NoteBookServiceImpl implements NoteBookService { | |||||
return (HttpUtils.isSuccess(result.getCode()) | return (HttpUtils.isSuccess(result.getCode()) | ||||
|| K8sResponseEnum.EXISTS.getCode().equals(result.getCode())); | || K8sResponseEnum.EXISTS.getCode().equals(result.getCode())); | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK, "createNoteBook调用jupyterResourceApi.createWithPvc异常!", e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK, "createNoteBook调用jupyterResourceApi.createWithPvc异常!{}", e); | |||||
noteBook.setK8sStatusCode(BLANK); | noteBook.setK8sStatusCode(BLANK); | ||||
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | ||||
return false; | return false; | ||||
@@ -305,7 +305,7 @@ public class NoteBookServiceImpl implements NoteBookService { | |||||
returnStr = "删除失败"; | returnStr = "删除失败"; | ||||
} | } | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK, "deleteNoteBook调用jupyterResourceApi.delete异常!", e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK, "deleteNoteBook调用jupyterResourceApi.delete异常!{}", e); | |||||
noteBook.setK8sStatusCode(BLANK); | noteBook.setK8sStatusCode(BLANK); | ||||
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | ||||
returnStr = "删除失败"; | returnStr = "删除失败"; | ||||
@@ -392,7 +392,7 @@ public class NoteBookServiceImpl implements NoteBookService { | |||||
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(result)); | noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(result)); | ||||
return HttpUtils.isSuccess(result.getCode()); | return HttpUtils.isSuccess(result.getCode()); | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK, "notebook调用jupyterResourceApi.createWithPvc异常!", e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK, "notebook调用jupyterResourceApi.createWithPvc异常!{}", e); | |||||
noteBook.setK8sStatusCode(BLANK); | noteBook.setK8sStatusCode(BLANK); | ||||
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | ||||
return false; | return false; | ||||
@@ -447,7 +447,7 @@ public class NoteBookServiceImpl implements NoteBookService { | |||||
returnStr = "停止" + NotebookUtil.FAILED; | returnStr = "停止" + NotebookUtil.FAILED; | ||||
} | } | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK, "停止notebook调用jupyterResourceApi.delete异常!", e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK, "停止notebook调用jupyterResourceApi.delete异常!{}", e); | |||||
noteBook.setK8sStatusCode(BLANK); | noteBook.setK8sStatusCode(BLANK); | ||||
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | ||||
returnStr = "停止" + NotebookUtil.FAILED; | returnStr = "停止" + NotebookUtil.FAILED; | ||||
@@ -527,7 +527,7 @@ public class NoteBookServiceImpl implements NoteBookService { | |||||
} | } | ||||
return NoteBookStatusEnum.convert(result.getPhase()); | return NoteBookStatusEnum.convert(result.getPhase()); | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.NOTE_BOOK, "notebook nameSpace:{} resourceName:{} 查询异常!", noteBook.getK8sNamespace(), noteBook.getK8sResourceName(), e); | |||||
LogUtil.error(LogEnum.NOTE_BOOK, "notebook nameSpace:{} resourceName:{} 查询异常!{}", noteBook.getK8sNamespace(), noteBook.getK8sResourceName(), e); | |||||
noteBook.setK8sStatusCode(BLANK); | noteBook.setK8sStatusCode(BLANK); | ||||
noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | noteBook.setK8sStatusInfo(NotebookUtil.getK8sStatusInfo(e)); | ||||
return null; | return null; | ||||
@@ -1,121 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.impl; | |||||
import com.baomidou.mybatisplus.core.metadata.IPage; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import org.dubhe.dao.PtDatasetMapper; | |||||
import org.dubhe.domain.PtDataset; | |||||
import org.dubhe.domain.dto.PtDatasetDTO; | |||||
import org.dubhe.domain.dto.PtDatasetQueryCriteria; | |||||
import org.dubhe.service.PtDatasetService; | |||||
import org.dubhe.service.convert.PtDatasetConvert; | |||||
import org.dubhe.utils.FileUtil; | |||||
import org.dubhe.utils.PageUtil; | |||||
import org.dubhe.utils.WrapperHelp; | |||||
import org.springframework.beans.factory.annotation.Autowired; | |||||
import org.springframework.cache.annotation.CacheConfig; | |||||
import org.springframework.cache.annotation.CacheEvict; | |||||
import org.springframework.cache.annotation.Cacheable; | |||||
import org.springframework.stereotype.Service; | |||||
import org.springframework.transaction.annotation.Propagation; | |||||
import org.springframework.transaction.annotation.Transactional; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
import java.util.ArrayList; | |||||
import java.util.LinkedHashMap; | |||||
import java.util.List; | |||||
import java.util.Map; | |||||
/** | |||||
* @description 数据集管理 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Service | |||||
@CacheConfig(cacheNames = "ptDataset") | |||||
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) | |||||
public class PtDatasetServiceImpl implements PtDatasetService { | |||||
@Autowired | |||||
private PtDatasetMapper ptDatasetMapper; | |||||
@Autowired | |||||
private PtDatasetConvert ptDatasetConvert; | |||||
@Override | |||||
@Cacheable | |||||
public Map<String, Object> queryAll(PtDatasetQueryCriteria criteria, Page page) { | |||||
IPage<PtDataset> ptDatasets = ptDatasetMapper.selectPage(page, WrapperHelp.getWrapper(criteria)); | |||||
return PageUtil.toPage(ptDatasets, ptDatasetConvert::toDto); | |||||
} | |||||
@Override | |||||
@Cacheable | |||||
public List<PtDatasetDTO> queryAll(PtDatasetQueryCriteria criteria) { | |||||
return ptDatasetConvert.toDto(ptDatasetMapper.selectList(WrapperHelp.getWrapper(criteria))); | |||||
} | |||||
@Override | |||||
@Cacheable(key = "#p0") | |||||
public PtDatasetDTO findById(Long id) { | |||||
PtDataset ptDataset = ptDatasetMapper.selectById(id); | |||||
return ptDatasetConvert.toDto(ptDataset); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
@Transactional(rollbackFor = Exception.class) | |||||
public PtDatasetDTO create(PtDataset resources) { | |||||
ptDatasetMapper.insert(resources); | |||||
return ptDatasetConvert.toDto(resources); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
@Transactional(rollbackFor = Exception.class) | |||||
public void update(PtDataset resources) { | |||||
PtDataset ptDataset = ptDatasetMapper.selectById(resources.getId()); | |||||
ptDataset.copy(resources); | |||||
ptDatasetMapper.updateById(ptDataset); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
public void deleteAll(Long[] ids) { | |||||
for (Long id : ids) { | |||||
ptDatasetMapper.deleteById(id); | |||||
} | |||||
} | |||||
@Override | |||||
public void download(List<PtDatasetDTO> all, HttpServletResponse response) throws IOException { | |||||
List<Map<String, Object>> list = new ArrayList<>(); | |||||
for (PtDatasetDTO ptDataset : all) { | |||||
Map<String, Object> map = new LinkedHashMap<>(); | |||||
map.put(" name", ptDataset.getName()); | |||||
map.put(" remark", ptDataset.getRemark()); | |||||
map.put(" type", ptDataset.getType()); | |||||
map.put(" team", ptDataset.getTeam()); | |||||
map.put(" createUser", ptDataset.getCreateUser()); | |||||
map.put(" createTime", ptDataset.getCreateTime()); | |||||
list.add(map); | |||||
} | |||||
FileUtil.downloadExcel(list, response); | |||||
} | |||||
} |
@@ -1,123 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.impl; | |||||
import com.baomidou.mybatisplus.core.metadata.IPage; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import org.dubhe.dao.PtDevEnvsMapper; | |||||
import org.dubhe.domain.PtDevEnvs; | |||||
import org.dubhe.domain.dto.PtDevEnvsDTO; | |||||
import org.dubhe.domain.dto.PtDevEnvsQueryCriteria; | |||||
import org.dubhe.service.PtDevEnvsService; | |||||
import org.dubhe.service.convert.PtDevEnvsConvert; | |||||
import org.dubhe.utils.FileUtil; | |||||
import org.dubhe.utils.PageUtil; | |||||
import org.dubhe.utils.WrapperHelp; | |||||
import org.springframework.beans.factory.annotation.Autowired; | |||||
import org.springframework.cache.annotation.CacheConfig; | |||||
import org.springframework.cache.annotation.CacheEvict; | |||||
import org.springframework.cache.annotation.Cacheable; | |||||
import org.springframework.stereotype.Service; | |||||
import org.springframework.transaction.annotation.Propagation; | |||||
import org.springframework.transaction.annotation.Transactional; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
import java.util.ArrayList; | |||||
import java.util.LinkedHashMap; | |||||
import java.util.List; | |||||
import java.util.Map; | |||||
/** | |||||
* @description 开发环境 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Service | |||||
@CacheConfig(cacheNames = "ptDevEnvs") | |||||
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) | |||||
public class PtDevEnvsServiceImpl implements PtDevEnvsService { | |||||
@Autowired | |||||
private PtDevEnvsMapper ptDevEnvsMapper; | |||||
private PtDevEnvsConvert ptDevEnvsConvert; | |||||
@Override | |||||
@Cacheable | |||||
public Map<String, Object> queryAll(PtDevEnvsQueryCriteria criteria, Page page) { | |||||
IPage<PtDevEnvs> ptDevEnvss = ptDevEnvsMapper.selectPage(page, WrapperHelp.getWrapper(criteria)); | |||||
return PageUtil.toPage(ptDevEnvss, ptDevEnvsConvert::toDto); | |||||
} | |||||
@Override | |||||
@Cacheable | |||||
public List<PtDevEnvsDTO> queryAll(PtDevEnvsQueryCriteria criteria) { | |||||
return ptDevEnvsConvert.toDto(ptDevEnvsMapper.selectList(WrapperHelp.getWrapper(criteria))); | |||||
} | |||||
@Override | |||||
@Cacheable(key = "#p0") | |||||
public PtDevEnvsDTO findById(Long id) { | |||||
PtDevEnvs ptDevEnvs = ptDevEnvsMapper.selectById(id); | |||||
return ptDevEnvsConvert.toDto(ptDevEnvs); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
@Transactional(rollbackFor = Exception.class) | |||||
public PtDevEnvsDTO create(PtDevEnvs resources) { | |||||
ptDevEnvsMapper.insert(resources); | |||||
return ptDevEnvsConvert.toDto(resources); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
@Transactional(rollbackFor = Exception.class) | |||||
public void update(PtDevEnvs resources) { | |||||
PtDevEnvs ptDevEnvs = ptDevEnvsMapper.selectById(resources.getId()); | |||||
ptDevEnvs.copy(resources); | |||||
ptDevEnvsMapper.updateById(ptDevEnvs); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
public void deleteAll(Long[] ids) { | |||||
for (Long id : ids) { | |||||
ptDevEnvsMapper.deleteById(id); | |||||
} | |||||
} | |||||
@Override | |||||
public void download(List<PtDevEnvsDTO> all, HttpServletResponse response) throws IOException { | |||||
List<Map<String, Object>> list = new ArrayList<>(); | |||||
for (PtDevEnvsDTO ptDevEnvs : all) { | |||||
Map<String, Object> map = new LinkedHashMap<>(); | |||||
map.put(" name", ptDevEnvs.getName()); | |||||
map.put(" remark", ptDevEnvs.getRemark()); | |||||
map.put(" podnum", ptDevEnvs.getPodNum()); | |||||
map.put(" gpunum", ptDevEnvs.getGpuNum()); | |||||
map.put(" memnum", ptDevEnvs.getMemNum()); | |||||
map.put(" cpunum", ptDevEnvs.getCpuNum()); | |||||
map.put(" duration", ptDevEnvs.getDuration()); | |||||
map.put(" startTime", ptDevEnvs.getStartTime()); | |||||
map.put(" closeTime", ptDevEnvs.getCloseTime()); | |||||
list.add(map); | |||||
} | |||||
FileUtil.downloadExcel(list, response); | |||||
} | |||||
} |
@@ -121,7 +121,7 @@ public class PtImageServiceImpl implements PtImageService { | |||||
} | } | ||||
ptImages = ptImageMapper.selectPage(page, query); | ptImages = ptImageMapper.selectPage(page, query); | ||||
} catch (Exception e) { | } catch (Exception e) { | ||||
LogUtil.error(LogEnum.BIZ_TRAIN, "User {} query mirror list display exception :{}, request information :{}", e, ptImageQueryDTO); | |||||
LogUtil.error(LogEnum.BIZ_TRAIN, "User {} query mirror list display exception :{}, request information :{}",user.getId() ,e, ptImageQueryDTO); | |||||
throw new BusinessException("查询镜像列表展示异常"); | throw new BusinessException("查询镜像列表展示异常"); | ||||
} | } | ||||
List<PtImageQueryVO> ptImageQueryResult = ptImages.getRecords().stream().map(x -> { | List<PtImageQueryVO> ptImageQueryResult = ptImages.getRecords().stream().map(x -> { | ||||
@@ -1,122 +0,0 @@ | |||||
/** | |||||
* Copyright 2020 Zhejiang Lab. All Rights Reserved. | |||||
* | |||||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||||
* you may not use this file except in compliance with the License. | |||||
* You may obtain a copy of the License at | |||||
* | |||||
* http://www.apache.org/licenses/LICENSE-2.0 | |||||
* | |||||
* Unless required by applicable law or agreed to in writing, software | |||||
* distributed under the License is distributed on an "AS IS" BASIS, | |||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
* See the License for the specific language governing permissions and | |||||
* limitations under the License. | |||||
* ============================================================= | |||||
*/ | |||||
package org.dubhe.service.impl; | |||||
import com.baomidou.mybatisplus.core.metadata.IPage; | |||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | |||||
import org.dubhe.dao.PtStorageMapper; | |||||
import org.dubhe.domain.PtStorage; | |||||
import org.dubhe.domain.dto.PtStorageDTO; | |||||
import org.dubhe.domain.dto.PtStorageQueryCriteria; | |||||
import org.dubhe.service.PtStorageService; | |||||
import org.dubhe.service.convert.PtStorageConvert; | |||||
import org.dubhe.utils.FileUtil; | |||||
import org.dubhe.utils.PageUtil; | |||||
import org.dubhe.utils.WrapperHelp; | |||||
import org.springframework.beans.factory.annotation.Autowired; | |||||
import org.springframework.cache.annotation.CacheConfig; | |||||
import org.springframework.cache.annotation.CacheEvict; | |||||
import org.springframework.cache.annotation.Cacheable; | |||||
import org.springframework.stereotype.Service; | |||||
import org.springframework.transaction.annotation.Propagation; | |||||
import org.springframework.transaction.annotation.Transactional; | |||||
import javax.servlet.http.HttpServletResponse; | |||||
import java.io.IOException; | |||||
import java.util.ArrayList; | |||||
import java.util.LinkedHashMap; | |||||
import java.util.List; | |||||
import java.util.Map; | |||||
/** | |||||
* @description 存储服务类 | |||||
* @date 2020-03-17 | |||||
*/ | |||||
@Service | |||||
@CacheConfig(cacheNames = "ptStorage") | |||||
@Transactional(propagation = Propagation.SUPPORTS, readOnly = true, rollbackFor = Exception.class) | |||||
public class PtStorageServiceImpl implements PtStorageService { | |||||
@Autowired | |||||
private PtStorageMapper ptStorageMapper; | |||||
@Autowired | |||||
private PtStorageConvert ptStorageConvert; | |||||
@Override | |||||
@Cacheable | |||||
public Map<String, Object> queryAll(PtStorageQueryCriteria criteria, Page page) { | |||||
IPage<PtStorage> ptStorages = ptStorageMapper.selectPage(page, WrapperHelp.getWrapper(criteria)); | |||||
return PageUtil.toPage(ptStorages, ptStorageConvert::toDto); | |||||
} | |||||
@Override | |||||
@Cacheable | |||||
public List<PtStorageDTO> queryAll(PtStorageQueryCriteria criteria) { | |||||
return ptStorageConvert.toDto(ptStorageMapper.selectList(WrapperHelp.getWrapper(criteria))); | |||||
} | |||||
@Override | |||||
@Cacheable(key = "#p0") | |||||
public PtStorageDTO findById(Long id) { | |||||
PtStorage ptStorage = ptStorageMapper.selectById(id); | |||||
return ptStorageConvert.toDto(ptStorage); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
@Transactional(rollbackFor = Exception.class) | |||||
public PtStorageDTO create(PtStorage resources) { | |||||
ptStorageMapper.insert(resources); | |||||
return ptStorageConvert.toDto(resources); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
@Transactional(rollbackFor = Exception.class) | |||||
public void update(PtStorage resources) { | |||||
PtStorage ptStorage = ptStorageMapper.selectById(resources.getId()); | |||||
ptStorage.copy(resources); | |||||
ptStorageMapper.updateById(ptStorage); | |||||
} | |||||
@Override | |||||
@CacheEvict(allEntries = true) | |||||
public void deleteAll(Long[] ids) { | |||||
for (Long id : ids) { | |||||
ptStorageMapper.deleteById(id); | |||||
} | |||||
} | |||||
@Override | |||||
public void download(List<PtStorageDTO> all, HttpServletResponse response) throws IOException { | |||||
List<Map<String, Object>> list = new ArrayList<>(); | |||||
for (PtStorageDTO ptStorage : all) { | |||||
Map<String, Object> map = new LinkedHashMap<>(); | |||||
map.put(" name", ptStorage.getName()); | |||||
map.put(" size", ptStorage.getSize()); | |||||
map.put(" storageclass", ptStorage.getStorageclass()); | |||||
map.put(" createUser", ptStorage.getCreateUser()); | |||||
map.put(" createTime", ptStorage.getCreateTime()); | |||||
list.add(map); | |||||
} | |||||
FileUtil.downloadExcel(list, response); | |||||
} | |||||
} |
@@ -8,7 +8,7 @@ spring: | |||||
redis: | redis: | ||||
#数据库索引 | #数据库索引 | ||||
database: 0 | database: 0 | ||||
host: | |||||
host: 127.0.0.1 | |||||
port: 6379 | port: 6379 | ||||
password: | password: | ||||
#连接超时时间 | #连接超时时间 | ||||
@@ -18,8 +18,8 @@ spring: | |||||
db-type: com.alibaba.druid.pool.DruidDataSource | db-type: com.alibaba.druid.pool.DruidDataSource | ||||
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | ||||
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | ||||
username: | |||||
password: | |||||
username: test | |||||
password: test | |||||
# 初始化配置 | # 初始化配置 | ||||
initial-size: 3 | initial-size: 3 | ||||
@@ -59,8 +59,8 @@ spring: | |||||
type: com.alibaba.druid.pool.DruidDataSource | type: com.alibaba.druid.pool.DruidDataSource | ||||
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | ||||
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | ||||
username: | |||||
password: | |||||
username: test | |||||
password: test | |||||
#指定data_dataset表 主键id 生成策略 | #指定data_dataset表 主键id 生成策略 | ||||
sharding: | sharding: | ||||
@@ -86,19 +86,19 @@ k8s: | |||||
# k8s集群配置文件 | # k8s集群配置文件 | ||||
kubeconfig: kubeconfig | kubeconfig: kubeconfig | ||||
# nfs服务暴露的IP地址 如需测试需修改为合适的地址 | # nfs服务暴露的IP地址 如需测试需修改为合适的地址 | ||||
nfs: | |||||
nfs: 127.0.0.1 | |||||
#nfs服务端 共享目录 | #nfs服务端 共享目录 | ||||
nfs-root-path: /nfs/ | nfs-root-path: /nfs/ | ||||
nfs-root-windows-path: "Z:" | nfs-root-windows-path: "Z:" | ||||
# 命名空间关键字 | # 命名空间关键字 | ||||
namespace: namespace | namespace: namespace | ||||
# k8s ingress域名 如需测试需修改为合适的域名 | # k8s ingress域名 如需测试需修改为合适的域名 | ||||
host: | |||||
# k8s ingress-controller 对外port | |||||
port: | |||||
# elasticsearch暴露的服务地址 | |||||
host: notebook.test.com | |||||
# k8s ingress-controller 对外port,获取方式:部署 ingress-controller 后,在k8s master节点执行 kubectl get svc -A | grep 'ingress-nginx-controller' 获取80对应的外部端口 | |||||
port: 33334 | |||||
# elasticsearch暴露的服务地址,获取方式 部署 管理集群日志 后,在k8s msater节点执行 kubectl get svc -A | grep 'elasticsearch' 获取9200对应的外部端口 | |||||
elasticsearch: | elasticsearch: | ||||
hostlist: ${eshostlist::} | |||||
hostlist: ${eshostlist:127.0.0.1:33333} | |||||
# 日志采集配置信息 | # 日志采集配置信息 | ||||
log: | log: | ||||
type: _doc | type: _doc | ||||
@@ -120,9 +120,10 @@ k8s: | |||||
nfs-storage-class-name: zjlab-nfs-storage | nfs-storage-class-name: zjlab-nfs-storage | ||||
#配置harbor | #配置harbor | ||||
harbor: | harbor: | ||||
address: | |||||
username: | |||||
password: | |||||
# habor服务域名,需要将用户持有域名解析到 harbor服务所在服务器ip | |||||
address: harbor.test.com | |||||
username: admin | |||||
password: Harbor12345 | |||||
model-name: train | model-name: train | ||||
# data模块配置 | # data模块配置 | ||||
data: | data: | ||||
@@ -157,8 +158,10 @@ data: | |||||
# minio配置 | # minio配置 | ||||
minio: | minio: | ||||
url: http://127.0.0.1:9000/ | url: http://127.0.0.1:9000/ | ||||
accessKey: | |||||
secretKey: | |||||
# 部署 minio 时指定的 MINIO_ACCESS_KEY | |||||
accessKey: admin | |||||
# 部署 minio 时指定的 MINIO_SECRET_KEY | |||||
secretKey: 123@abc.com | |||||
bucketName: dubhe-dev | bucketName: dubhe-dev | ||||
presignedUrlExpiryTime: 300 | presignedUrlExpiryTime: 300 | ||||
annotation: /annotation/ | annotation: /annotation/ | ||||
@@ -9,13 +9,13 @@ spring: | |||||
repositories: | repositories: | ||||
enabled: false | enabled: false | ||||
# 邮箱配置 | |||||
# 邮箱配置,用于发送用户注册邮箱验证码 | |||||
mail: | mail: | ||||
host: | |||||
host: smtp.163.com | |||||
# 邮件的发送者 163邮箱(开发测试时使用 发送延时 20秒,每天发送量限制 50) | # 邮件的发送者 163邮箱(开发测试时使用 发送延时 20秒,每天发送量限制 50) | ||||
username: | |||||
username: test@163.com | |||||
# SMTP授权密码 | # SMTP授权密码 | ||||
password: | |||||
password: AAAAAAAAAAAAAAAA | |||||
protocol: smtp | protocol: smtp | ||||
properties.mail.smtp.auth: true | properties.mail.smtp.auth: true | ||||
properties.mail.smtp.port: 465 #465或者994 | properties.mail.smtp.port: 465 #465或者994 | ||||
@@ -32,10 +32,10 @@ loginCode: | |||||
height: 28 | height: 28 | ||||
length: 4 | length: 4 | ||||
#密码加密传输,前端公钥加密,后端私钥解密 | |||||
#密码加密传输,前端公钥加密,后端私钥解密,与前端src/settings.js publicKey搭配使用;可使用genKeyPair方法进行生成 | |||||
rsa: | rsa: | ||||
private_key: | |||||
public_key: | |||||
private_key: MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEA0vfvyTdGJkdbHkB8mp0f3FE0GYP3AYPaJF7jUd1M0XxFSE2ceK3k2kw20YvQ09NJKk+OMjWQl9WitG9pB6tSCQIDAQABAkA2SimBrWC2/wvauBuYqjCFwLvYiRYqZKThUS3MZlebXJiLB+Ue/gUifAAKIg1avttUZsHBHrop4qfJCwAI0+YRAiEA+W3NK/RaXtnRqmoUUkb59zsZUBLpvZgQPfj1MhyHDz0CIQDYhsAhPJ3mgS64NbUZmGWuuNKp5coY2GIj/zYDMJp6vQIgUueLFXv/eZ1ekgz2Oi67MNCk5jeTF2BurZqNLR3MSmUCIFT3Q6uHMtsB9Eha4u7hS31tj1UWE+D+ADzp59MGnoftAiBeHT7gDMuqeJHPL4b+kC+gzV4FGTfhR9q3tTbklZkD2A== | |||||
public_key: MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANL378k3RiZHWx5AfJqdH9xRNBmD9wGD2iRe41HdTNF8RUhNnHit5NpMNtGL0NPTSSpPjjI1kJfVorRvaQerUgkCAwEAAQ== | |||||
#jwt | #jwt | ||||
jwt: | jwt: | ||||
@@ -43,7 +43,7 @@ jwt: | |||||
# 令牌前缀 | # 令牌前缀 | ||||
token-start-with: Bearer | token-start-with: Bearer | ||||
# 必须使用最少88位的Base64对该令牌进行编码 | # 必须使用最少88位的Base64对该令牌进行编码 | ||||
base64-secret: | |||||
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmMTIzNDU= | |||||
# 令牌过期时间 此处单位/毫秒 ,默认24小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html | # 令牌过期时间 此处单位/毫秒 ,默认24小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html | ||||
token-validity-in-seconds: 86400000 | token-validity-in-seconds: 86400000 | ||||
# 在线用户key | # 在线用户key | ||||
@@ -56,7 +56,7 @@ swagger: | |||||
enabled: true | enabled: true | ||||
# 后台添加用户的初始密码 | # 后台添加用户的初始密码 | ||||
initial_password: "" | |||||
initial_password: "123456" | |||||
train-job: | train-job: | ||||
namespace: "namespace-" | namespace: "namespace-" | ||||
@@ -123,12 +123,14 @@ logging: | |||||
dubhe-proxy: | dubhe-proxy: | ||||
visual: | visual: | ||||
keyword: visual | keyword: visual | ||||
server: | |||||
port: | |||||
# 可视化服务后端 ip | |||||
server: 127.0.0.1 | |||||
# 可视化服务后端 端口 | |||||
port: 9898 | |||||
refine: | refine: | ||||
keyword: refine | keyword: refine | ||||
server: localhost | server: localhost | ||||
port: | |||||
port: 9797 | |||||
# 延时全局配置 | # 延时全局配置 | ||||
delay: | delay: | ||||
@@ -1,19 +0,0 @@ | |||||
apiVersion: v1 | |||||
clusters: | |||||
- cluster: | |||||
certificate-authority-data: | |||||
server: | |||||
name: kubernetes | |||||
contexts: | |||||
- context: | |||||
cluster: kubernetes | |||||
user: kubernetes-admin | |||||
name: kubernetes-admin@kubernetes | |||||
current-context: kubernetes-admin@kubernetes | |||||
kind: Config | |||||
preferences: {} | |||||
users: | |||||
- name: kubernetes-admin | |||||
user: | |||||
client-certificate-data: | |||||
client-key-data: |
@@ -19,6 +19,7 @@ package org.dubhe; | |||||
import org.apache.shiro.SecurityUtils; | import org.apache.shiro.SecurityUtils; | ||||
import org.apache.shiro.subject.Subject; | import org.apache.shiro.subject.Subject; | ||||
import org.apache.shiro.util.ThreadContext; | |||||
import org.apache.shiro.web.mgt.DefaultWebSecurityManager; | import org.apache.shiro.web.mgt.DefaultWebSecurityManager; | ||||
import org.dubhe.support.login.UsernamePasswordCaptchaToken; | import org.dubhe.support.login.UsernamePasswordCaptchaToken; | ||||
import org.junit.Assert; | import org.junit.Assert; | ||||
@@ -65,8 +66,9 @@ public class BaseTest { | |||||
public void setup() { | public void setup() { | ||||
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); | this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); | ||||
SecurityUtils.setSecurityManager(defaultWebSecurityManager); | SecurityUtils.setSecurityManager(defaultWebSecurityManager); | ||||
ThreadContext.bind(defaultWebSecurityManager); | |||||
Subject subject = SecurityUtils.getSubject(); | Subject subject = SecurityUtils.getSubject(); | ||||
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "admin"); | |||||
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "123456"); | |||||
token.setRememberMe(true); | token.setRememberMe(true); | ||||
subject.login(token); | subject.login(token); | ||||
} | } | ||||
@@ -37,4 +37,4 @@ public class GlobalStateMachine { | |||||
*/ | */ | ||||
private FileStateMachine fileStateMachine; | private FileStateMachine fileStateMachine; | ||||
} | |||||
} |
@@ -35,7 +35,6 @@ import org.dubhe.base.MagicNumConstant; | |||||
import org.dubhe.constant.NumberConstant; | import org.dubhe.constant.NumberConstant; | ||||
import org.dubhe.data.constant.*; | import org.dubhe.data.constant.*; | ||||
import org.dubhe.data.dao.DatasetMapper; | import org.dubhe.data.dao.DatasetMapper; | ||||
import org.dubhe.data.dao.DatasetVersionFileMapper; | |||||
import org.dubhe.data.dao.DatasetVersionMapper; | import org.dubhe.data.dao.DatasetVersionMapper; | ||||
import org.dubhe.data.dao.TaskMapper; | import org.dubhe.data.dao.TaskMapper; | ||||
import org.dubhe.data.domain.dto.*; | import org.dubhe.data.domain.dto.*; | ||||
@@ -136,12 +135,6 @@ public class DatasetServiceImpl extends ServiceImpl<DatasetMapper, Dataset> impl | |||||
@Autowired | @Autowired | ||||
public FileService fileService; | public FileService fileService; | ||||
/** | |||||
* 数据集转换 | |||||
*/ | |||||
@Autowired | |||||
private DatasetConvert datasetConvert; | |||||
/** | /** | ||||
* 数据集标签服务类 | * 数据集标签服务类 | ||||
*/ | */ | ||||
@@ -201,9 +194,6 @@ public class DatasetServiceImpl extends ServiceImpl<DatasetMapper, Dataset> impl | |||||
@Autowired | @Autowired | ||||
private LabelGroupServiceImpl labelGroupService; | private LabelGroupServiceImpl labelGroupService; | ||||
@Autowired | |||||
private DatasetVersionFileMapper datasetVersionFileMapper; | |||||
/** | /** | ||||
* 检测是否为公共数据集 | * 检测是否为公共数据集 | ||||
* | * | ||||
@@ -434,12 +424,10 @@ public class DatasetServiceImpl extends ServiceImpl<DatasetMapper, Dataset> impl | |||||
Map<String, Long> labelNameMap = labelList.stream().collect(Collectors.toMap(Label::getName, Label::getId)); | Map<String, Long> labelNameMap = labelList.stream().collect(Collectors.toMap(Label::getName, Label::getId)); | ||||
if(!Objects.isNull(labelNameMap.get(label.getName()))){ | if(!Objects.isNull(labelNameMap.get(label.getName()))){ | ||||
datasetLabelService.insert(DatasetLabel.builder().datasetId(datasetId).labelId(labelNameMap.get(label.getName())).build()); | datasetLabelService.insert(DatasetLabel.builder().datasetId(datasetId).labelId(labelNameMap.get(label.getName())).build()); | ||||
datasetGroupLabelService.insert(DatasetGroupLabel.builder().labelGroupId(dataset.getLabelGroupId()).labelId(labelNameMap.get(label.getName())).build()); | |||||
}else { | }else { | ||||
insertLabelData(label,datasetId); | insertLabelData(label,datasetId); | ||||
} | } | ||||
if(!Objects.isNull(dataset.getLabelGroupId()) && COCO_ID.compareTo(dataset.getLabelGroupId()) != 0){ | |||||
datasetGroupLabelService.insert(DatasetGroupLabel.builder().labelGroupId(dataset.getLabelGroupId()).labelId(label.getId()).build()); | |||||
} | |||||
}else { | }else { | ||||
insertLabelData(label,datasetId); | insertLabelData(label,datasetId); | ||||
} | } | ||||
@@ -27,13 +27,11 @@ import org.dubhe.data.constant.ErrorEnum; | |||||
import org.dubhe.data.dao.LabelMapper; | import org.dubhe.data.dao.LabelMapper; | ||||
import org.dubhe.data.domain.dto.LabelCreateDTO; | import org.dubhe.data.domain.dto.LabelCreateDTO; | ||||
import org.dubhe.data.domain.dto.LabelDTO; | import org.dubhe.data.domain.dto.LabelDTO; | ||||
import org.dubhe.data.domain.entity.Dataset; | |||||
import org.dubhe.data.domain.entity.DatasetGroupLabel; | import org.dubhe.data.domain.entity.DatasetGroupLabel; | ||||
import org.dubhe.data.domain.entity.DatasetLabel; | import org.dubhe.data.domain.entity.DatasetLabel; | ||||
import org.dubhe.data.domain.entity.Label; | import org.dubhe.data.domain.entity.Label; | ||||
import org.dubhe.data.service.DatasetGroupLabelService; | import org.dubhe.data.service.DatasetGroupLabelService; | ||||
import org.dubhe.data.service.DatasetLabelService; | import org.dubhe.data.service.DatasetLabelService; | ||||
import org.dubhe.data.service.DatasetService; | |||||
import org.dubhe.data.service.LabelService; | import org.dubhe.data.service.LabelService; | ||||
import org.dubhe.enums.LogEnum; | import org.dubhe.enums.LogEnum; | ||||
import org.dubhe.exception.BusinessException; | import org.dubhe.exception.BusinessException; | ||||
@@ -92,12 +90,15 @@ public class LabelServiceImpl extends ServiceImpl<LabelMapper, Label> implements | |||||
List<Label> labels = getBaseMapper().listLabelByDatasetId(datasetId); | List<Label> labels = getBaseMapper().listLabelByDatasetId(datasetId); | ||||
List<Long> pubLabelIds = getPubLabelIds(); | List<Long> pubLabelIds = getPubLabelIds(); | ||||
if(!CollectionUtils.isEmpty(labels)){ | if(!CollectionUtils.isEmpty(labels)){ | ||||
//根据数据集ID查询标签组ID | |||||
List<LabelDTO> labelDTOS = baseMapper.listByDatesetId(datasetId); | |||||
Map<Long, Long> labelMap = labelDTOS.stream().collect(Collectors.toMap(LabelDTO::getId, LabelDTO::getLabelGroupId)); | |||||
//查询数据集所属标签组下标签 | //查询数据集所属标签组下标签 | ||||
return labels.stream().map(a -> { | return labels.stream().map(a -> { | ||||
LabelDTO dto = new LabelDTO(); | LabelDTO dto = new LabelDTO(); | ||||
dto.setName(a.getName()); | dto.setName(a.getName()); | ||||
dto.setColor(a.getColor()); | dto.setColor(a.getColor()); | ||||
dto.setLabelGroupId(pubLabelIds.contains(a.getId()) ? COCO_ID : null); | |||||
dto.setLabelGroupId(pubLabelIds.contains(a.getId()) ? COCO_ID : labelMap.get(a.getId())); | |||||
dto.setType(a.getType()); | dto.setType(a.getType()); | ||||
dto.setId(a.getId()); | dto.setId(a.getId()); | ||||
return dto; | return dto; | ||||
@@ -403,7 +404,7 @@ public class LabelServiceImpl extends ServiceImpl<LabelMapper, Label> implements | |||||
*/ | */ | ||||
@Override | @Override | ||||
public int selectCount(Long id) { | public int selectCount(Long id) { | ||||
return baseMapper.listByGroupId(id).size(); | |||||
return datasetGroupLabelService.listByGroupId(id).size(); | |||||
} | } | ||||
/** | /** | ||||
@@ -298,7 +298,7 @@ public class HarborApiImpl implements HarborApi { | |||||
//获取harbor中所有项目的名称 | //获取harbor中所有项目的名称 | ||||
Set<String> names = projectIdMap.keySet(); | Set<String> names = projectIdMap.keySet(); | ||||
//判断harbor中是否具有改项目 | //判断harbor中是否具有改项目 | ||||
names.stream().forEach(name->{ | |||||
names.forEach(name->{ | |||||
if(urlSplits[MagicNumConstant.ONE].equals(name)){ | if(urlSplits[MagicNumConstant.ONE].equals(name)){ | ||||
//发送删除请求 | //发送删除请求 | ||||
HttpClientUtils.sendHttpsDelete(tagSearchUrl+dataRep+TAG_SEARCH_PARAMS+SymbolConstant.SLASH+tagUrls[MagicNumConstant.ONE],harborName,harborPassword); | HttpClientUtils.sendHttpsDelete(tagSearchUrl+dataRep+TAG_SEARCH_PARAMS+SymbolConstant.SLASH+tagUrls[MagicNumConstant.ONE],harborName,harborPassword); | ||||
@@ -583,7 +583,7 @@ public class JupyterResourceApiImpl implements JupyterResourceApi { | |||||
if (delayDelete != null && delayDelete > 0){ | if (delayDelete != null && delayDelete > 0){ | ||||
taskYamlBO.append(statefulSet); | taskYamlBO.append(statefulSet); | ||||
} | } | ||||
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml信息为{}", statefulSetName, YamlUtils.dumpAsYaml(statefulSet)); | |||||
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml info is : {}", statefulSetName, YamlUtils.dumpAsYaml(statefulSet)); | |||||
statefulSet = client.apps().statefulSets().create(statefulSet); | statefulSet = client.apps().statefulSets().create(statefulSet); | ||||
LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", statefulSetName); | LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", statefulSetName); | ||||
return statefulSet; | return statefulSet; | ||||
@@ -625,7 +625,7 @@ public class JupyterResourceApiImpl implements JupyterResourceApi { | |||||
if (delayDelete != null && delayDelete > 0){ | if (delayDelete != null && delayDelete > 0){ | ||||
taskYamlBO.append(svc); | taskYamlBO.append(svc); | ||||
} | } | ||||
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml信息为{}", svcName, YamlUtils.dumpAsYaml(svc)); | |||||
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml info is : {}", svcName, YamlUtils.dumpAsYaml(svc)); | |||||
svc = client.services().create(svc); | svc = client.services().create(svc); | ||||
LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", svcName); | LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", svcName); | ||||
return svc; | return svc; | ||||
@@ -674,7 +674,7 @@ public class JupyterResourceApiImpl implements JupyterResourceApi { | |||||
if (delayDelete != null && delayDelete > 0){ | if (delayDelete != null && delayDelete > 0){ | ||||
taskYamlBO.append(ingress); | taskYamlBO.append(ingress); | ||||
} | } | ||||
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml信息为{}", ingressName, YamlUtils.dumpAsYaml(ingress)); | |||||
LogUtil.info(LogEnum.BIZ_K8S, "Ready to deploy {}, yaml info is : {}", ingressName, YamlUtils.dumpAsYaml(ingress)); | |||||
ingress = client.extensions().ingresses().create(ingress); | ingress = client.extensions().ingresses().create(ingress); | ||||
LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", ingressName); | LogUtil.info(LogEnum.BIZ_K8S, "{} deployed successfully", ingressName); | ||||
return ingress; | return ingress; | ||||
@@ -54,7 +54,6 @@ import org.dubhe.k8s.domain.bo.TaskYamlBO; | |||||
import org.dubhe.k8s.domain.entity.K8sTask; | import org.dubhe.k8s.domain.entity.K8sTask; | ||||
import org.dubhe.k8s.domain.resource.BizJob; | import org.dubhe.k8s.domain.resource.BizJob; | ||||
import org.dubhe.k8s.domain.resource.BizPersistentVolumeClaim; | import org.dubhe.k8s.domain.resource.BizPersistentVolumeClaim; | ||||
import org.dubhe.k8s.domain.resource.BizPod; | |||||
import org.dubhe.k8s.domain.vo.PtJupyterJobVO; | import org.dubhe.k8s.domain.vo.PtJupyterJobVO; | ||||
import org.dubhe.k8s.enums.ImagePullPolicyEnum; | import org.dubhe.k8s.enums.ImagePullPolicyEnum; | ||||
import org.dubhe.k8s.enums.K8sKindEnum; | import org.dubhe.k8s.enums.K8sKindEnum; | ||||
@@ -37,23 +37,4 @@ public class K8sTaskBO extends K8sTask { | |||||
*/ | */ | ||||
private Long maxStopUnixTime; | private Long maxStopUnixTime; | ||||
public K8sTaskBO(K8sTask k8sTask){ | |||||
this.setId(k8sTask.getId()); | |||||
this.setNamespace(k8sTask.getNamespace()); | |||||
this.setResourceName(k8sTask.getResourceName()); | |||||
this.setTaskYaml(k8sTask.getTaskYaml()); | |||||
this.setBusiness(k8sTask.getBusiness()); | |||||
this.setApplyUnixTime(k8sTask.getApplyUnixTime()); | |||||
this.setApplyDisplayTime(k8sTask.getApplyDisplayTime()); | |||||
this.setApplyStatus(k8sTask.getApplyStatus()); | |||||
this.setStopUnixTime(k8sTask.getStopUnixTime()); | |||||
this.setStopDisplayTime(k8sTask.getStopDisplayTime()); | |||||
this.setStopStatus(k8sTask.getStopStatus()); | |||||
this.setCreateTime(k8sTask.getCreateTime()); | |||||
this.setCreateUserId(k8sTask.getCreateUserId()); | |||||
this.setUpdateTime(k8sTask.getUpdateTime()); | |||||
this.setUpdateUserId(k8sTask.getUpdateUserId()); | |||||
this.setDeleted(k8sTask.getDeleted()); | |||||
} | |||||
} | } |
@@ -98,4 +98,13 @@ public class K8sTask extends BaseEntity{ | |||||
boolean needDelete = stopUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(stopStatus); | boolean needDelete = stopUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(stopStatus); | ||||
return needDelete && (needCreate ^ needDelete); | return needDelete && (needCreate ^ needDelete); | ||||
} | } | ||||
/** | |||||
* 判断任务是否已超时 | |||||
* @param time | |||||
* @return | |||||
*/ | |||||
public boolean overtime(Long time){ | |||||
return applyUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(applyStatus) && stopUnixTime < time && K8sTaskStatusEnum.UNEXECUTED.getStatus().equals(stopStatus); | |||||
} | |||||
} | } |
@@ -97,7 +97,7 @@ public class PodWatcher implements CommandLineRunner, Watcher<Pod> { | |||||
*/ | */ | ||||
@Override | @Override | ||||
public void onClose(KubernetesClientException cause) { | public void onClose(KubernetesClientException cause) { | ||||
LogUtil.warn(LogEnum.BIZ_K8S, cause.getMessage()); | |||||
LogUtil.warn(LogEnum.BIZ_K8S," onClose=>cause : {}", cause.getMessage()); | |||||
k8sUtils.getClient().pods().inAnyNamespace().watch(this); | k8sUtils.getClient().pods().inAnyNamespace().watch(this); | ||||
} | } | ||||
@@ -55,7 +55,7 @@ public class TrainJobObserver implements Observer { | |||||
boolean trainJobFailed = PodPhaseEnum.FAILED.getPhase().equals(pod.getPhase()) && BizEnum.ALGORITHM.getBizCode().equals(pod.getBusinessLabel()) && SpringContextHolder.getActiveProfile().equals(pod.getLabel(K8sLabelConstants.PLATFORM_RUNTIME_ENV)); | boolean trainJobFailed = PodPhaseEnum.FAILED.getPhase().equals(pod.getPhase()) && BizEnum.ALGORITHM.getBizCode().equals(pod.getBusinessLabel()) && SpringContextHolder.getActiveProfile().equals(pod.getLabel(K8sLabelConstants.PLATFORM_RUNTIME_ENV)); | ||||
if (trainJobFailed){ | if (trainJobFailed){ | ||||
LogUtil.warn(LogEnum.BIZ_K8S,"delete failed train job resourceName {};phase {};podName {}",pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE),pod.getPhase(),pod.getName()); | LogUtil.warn(LogEnum.BIZ_K8S,"delete failed train job resourceName {};phase {};podName {}",pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE),pod.getPhase(),pod.getName()); | ||||
//trainJobApi.delete(pod.getNamespace(),pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE)); | |||||
trainJobApi.delete(pod.getNamespace(),pod.getLabel(K8sLabelConstants.BASE_TAG_SOURCE)); | |||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -73,8 +73,9 @@ public interface RecycleTaskService { | |||||
* 回收文件资源 | * 回收文件资源 | ||||
* | * | ||||
* @param recycleTask 回收任务 | * @param recycleTask 回收任务 | ||||
* @return String 回收任务失败返回的失败信息 | |||||
*/ | */ | ||||
void deleteFileByCMD(RecycleTask recycleTask); | |||||
String deleteFileByCMD(RecycleTask recycleTask); | |||||
/** | /** | ||||
* 修改回收任务状态 | * 修改回收任务状态 | ||||
@@ -17,10 +17,12 @@ | |||||
package org.dubhe.service.impl; | package org.dubhe.service.impl; | ||||
import cn.hutool.core.date.DateUtil; | import cn.hutool.core.date.DateUtil; | ||||
import cn.hutool.core.util.RandomUtil; | |||||
import cn.hutool.core.util.StrUtil; | import cn.hutool.core.util.StrUtil; | ||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; | import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; | ||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; | ||||
import org.dubhe.base.MagicNumConstant; | import org.dubhe.base.MagicNumConstant; | ||||
import org.dubhe.base.ResponseCode; | |||||
import org.dubhe.config.NfsConfig; | import org.dubhe.config.NfsConfig; | ||||
import org.dubhe.config.RecycleConfig; | import org.dubhe.config.RecycleConfig; | ||||
import org.dubhe.constatnts.UserConstant; | import org.dubhe.constatnts.UserConstant; | ||||
@@ -31,7 +33,6 @@ import org.dubhe.domain.dto.UserDTO; | |||||
import org.dubhe.domain.entity.RecycleTask; | import org.dubhe.domain.entity.RecycleTask; | ||||
import org.dubhe.enums.LogEnum; | import org.dubhe.enums.LogEnum; | ||||
import org.dubhe.enums.RecycleStatusEnum; | import org.dubhe.enums.RecycleStatusEnum; | ||||
import org.dubhe.enums.RecycleTypeEnum; | |||||
import org.dubhe.exception.BusinessException; | import org.dubhe.exception.BusinessException; | ||||
import org.dubhe.service.RecycleTaskService; | import org.dubhe.service.RecycleTaskService; | ||||
import org.dubhe.utils.*; | import org.dubhe.utils.*; | ||||
@@ -122,13 +123,6 @@ public class RecycleTaskServiceImpl implements RecycleTaskService { | |||||
recycleTaskCreateDTO.setRecycleDelayDate(recycleConfig.getDate()); | recycleTaskCreateDTO.setRecycleDelayDate(recycleConfig.getDate()); | ||||
} | } | ||||
//如果是删除文件任务,校验根目录及系统环境 | |||||
if (Objects.equals(recycleTaskCreateDTO.getRecycleType(), RecycleTypeEnum.FILE.getCode()) && | |||||
recycleTaskCreateDTO.getRecycleCondition().startsWith(nfsConfig.getRootDir() + nfsConfig.getBucket())) { | |||||
LogUtil.error(LogEnum.GARBAGE_RECYCLE, "User {} created recycle task failed,file sourcePath :{} invalid", currentUser.getUsername(), recycleTaskCreateDTO.getRecycleCondition()); | |||||
throw new BusinessException("创建回收文件任务失败"); | |||||
} | |||||
RecycleTask recycleTask = new RecycleTask(); | RecycleTask recycleTask = new RecycleTask(); | ||||
BeanUtils.copyProperties(recycleTaskCreateDTO, recycleTask); | BeanUtils.copyProperties(recycleTaskCreateDTO, recycleTask); | ||||
@@ -153,11 +147,14 @@ public class RecycleTaskServiceImpl implements RecycleTaskService { | |||||
public void delTempInvalidResources(String sourcePath) { | public void delTempInvalidResources(String sourcePath) { | ||||
UserDTO currentUser = JwtUtils.getCurrentUserDto(); | UserDTO currentUser = JwtUtils.getCurrentUserDto(); | ||||
if (currentUser.getId() != UserConstant.ADMIN_USER_ID) { | if (currentUser.getId() != UserConstant.ADMIN_USER_ID) { | ||||
throw new BusinessException("不支持普通用户操作"); | |||||
throw new BusinessException(ResponseCode.UNAUTHORIZED, "不支持普通用户操作"); | |||||
} | } | ||||
RecycleTask recycleTask = new RecycleTask(); | RecycleTask recycleTask = new RecycleTask(); | ||||
recycleTask.setRecycleCondition(sourcePath); | recycleTask.setRecycleCondition(sourcePath); | ||||
deleteFileByCMD(recycleTask); | |||||
String resMsg = deleteFileByCMD(recycleTask); | |||||
if (StrUtil.isNotEmpty(resMsg)) { | |||||
throw new BusinessException(ResponseCode.ERROR, resMsg); | |||||
} | |||||
} | } | ||||
/** | /** | ||||
@@ -195,34 +192,47 @@ public class RecycleTaskServiceImpl implements RecycleTaskService { | |||||
List<RecycleTask> recycleTaskList = recycleTaskMapper.selectList(new LambdaQueryWrapper<RecycleTask>() | List<RecycleTask> recycleTaskList = recycleTaskMapper.selectList(new LambdaQueryWrapper<RecycleTask>() | ||||
.ne(RecycleTask::getRecycleStatus, RecycleStatusEnum.SUCCEEDED.getCode()) | .ne(RecycleTask::getRecycleStatus, RecycleStatusEnum.SUCCEEDED.getCode()) | ||||
.le(RecycleTask::getRecycleDelayDate, new Date())); | |||||
.le(RecycleTask::getRecycleDelayDate, DateUtil.format(new Date(), "yyyy-MM-dd"))); | |||||
return recycleTaskList; | return recycleTaskList; | ||||
} | } | ||||
/** | /** | ||||
* 回收文件资源 | |||||
* | |||||
* 回收天枢一站式平台中的无效文件资源 | |||||
* 处理方式:获取到回收任务表中的无效文件路径,通过linux命令进行具体删除 | |||||
* 文件路径必须满足格式如:/nfs/当前系统环境/具体删除的文件或文件夹(至少三层目录) | |||||
* @param recycleTask 回收任务 | * @param recycleTask 回收任务 | ||||
* @return String 回收任务失败返回的失败信息 | |||||
*/ | */ | ||||
@Override | @Override | ||||
public void deleteFileByCMD(RecycleTask recycleTask) { | |||||
public String deleteFileByCMD(RecycleTask recycleTask) { | |||||
String sourcePath = nfsUtil.formatPath(recycleTask.getRecycleCondition()); | String sourcePath = nfsUtil.formatPath(recycleTask.getRecycleCondition()); | ||||
//判断该路径是否存在文件或文件夹 | //判断该路径是否存在文件或文件夹 | ||||
String emptyDir = ""; | String emptyDir = ""; | ||||
if (!nfsUtil.fileOrDirIsEmpty(sourcePath) && sourcePath.startsWith(nfsUtil.formatPath(nfsConfig.getRootDir() + nfsConfig.getBucket()))) { | |||||
try { | |||||
sourcePath = sourcePath.endsWith(StrUtil.SLASH) ? sourcePath : sourcePath + StrUtil.SLASH; | |||||
emptyDir = "/tmp/empty_" + recycleTask.getId() + StrUtil.SLASH; | |||||
String errMsg = ""; | |||||
String nfsBucket = nfsUtil.formatPath(nfsConfig.getRootDir() + nfsConfig.getBucket() + StrUtil.SLASH); | |||||
sourcePath = sourcePath.endsWith(StrUtil.SLASH) ? sourcePath : sourcePath + StrUtil.SLASH; | |||||
try { | |||||
//校验回收文件是否存在以及回收文件必须至少在当前环境目录下还有一层目录,如:/nfs/dubhe-test/xxxx/ | |||||
if (!nfsUtil.fileOrDirIsEmpty(sourcePath) | |||||
&& sourcePath.startsWith((nfsBucket)) | |||||
&& sourcePath.length() > nfsBucket.length()) { | |||||
emptyDir = "/tmp/empty_" + (recycleTask.getId() == null ? RandomUtil.randomString(MagicNumConstant.TWO) : recycleTask.getId()) + StrUtil.SLASH; | |||||
LogUtil.info(LogEnum.GARBAGE_RECYCLE, "recycle task sourcePath:{},emptyDir:{}", sourcePath, emptyDir); | LogUtil.info(LogEnum.GARBAGE_RECYCLE, "recycle task sourcePath:{},emptyDir:{}", sourcePath, emptyDir); | ||||
Process process = Runtime.getRuntime().exec(new String[]{"/bin/sh", "-c", String.format(RecycleConfig.DEL_COMMAND, userName, nfsIp, emptyDir, emptyDir, sourcePath, emptyDir, sourcePath)}); | Process process = Runtime.getRuntime().exec(new String[]{"/bin/sh", "-c", String.format(RecycleConfig.DEL_COMMAND, userName, nfsIp, emptyDir, emptyDir, sourcePath, emptyDir, sourcePath)}); | ||||
//资源回收完毕修改回收表状态 | //资源回收完毕修改回收表状态 | ||||
if (recycleTask.getId() != null) { | if (recycleTask.getId() != null) { | ||||
updateRecycleStatus(recycleTask, recycleSourceIsOk(process)); | updateRecycleStatus(recycleTask, recycleSourceIsOk(process)); | ||||
} | } | ||||
} catch (Exception e) { | |||||
LogUtil.error(LogEnum.GARBAGE_RECYCLE, "recycle task id:{} Run failed,fail Exception:{}", recycleTask.getId(), e); | |||||
} else { | |||||
LogUtil.info(LogEnum.GARBAGE_RECYCLE, "recycle task failure!!! sourcePath:{}", sourcePath); | |||||
errMsg = "recycle task failure!!! sourcePath:" + sourcePath; | |||||
} | } | ||||
} catch (Exception e) { | |||||
LogUtil.error(LogEnum.GARBAGE_RECYCLE, "recycle task id:{} Run failed, fail Exception:{}", recycleTask.getId(), e); | |||||
errMsg = "recycle task failure!!! sourcePath:" + sourcePath + "and exception message:" + e.getMessage(); | |||||
} | } | ||||
return errMsg; | |||||
} | } | ||||
/** | /** | ||||
@@ -316,5 +326,4 @@ public class RecycleTaskServiceImpl implements RecycleTaskService { | |||||
} | } | ||||
} | } | ||||
} | } | ||||
} | } |
@@ -19,6 +19,7 @@ package org.dubhe; | |||||
import org.apache.shiro.SecurityUtils; | import org.apache.shiro.SecurityUtils; | ||||
import org.apache.shiro.subject.Subject; | import org.apache.shiro.subject.Subject; | ||||
import org.apache.shiro.util.ThreadContext; | |||||
import org.apache.shiro.web.mgt.DefaultWebSecurityManager; | import org.apache.shiro.web.mgt.DefaultWebSecurityManager; | ||||
import org.dubhe.support.login.UsernamePasswordCaptchaToken; | import org.dubhe.support.login.UsernamePasswordCaptchaToken; | ||||
import org.junit.Assert; | import org.junit.Assert; | ||||
@@ -65,8 +66,9 @@ public class BaseTest { | |||||
public void setup() { | public void setup() { | ||||
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); | this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build(); | ||||
SecurityUtils.setSecurityManager(defaultWebSecurityManager); | SecurityUtils.setSecurityManager(defaultWebSecurityManager); | ||||
ThreadContext.bind(defaultWebSecurityManager); | |||||
Subject subject = SecurityUtils.getSubject(); | Subject subject = SecurityUtils.getSubject(); | ||||
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "admin"); | |||||
UsernamePasswordCaptchaToken token = new UsernamePasswordCaptchaToken("admin", "123456"); | |||||
token.setRememberMe(true); | token.setRememberMe(true); | ||||
subject.login(token); | subject.login(token); | ||||
} | } | ||||
@@ -116,6 +116,11 @@ public class DelayCudResourceTask { | |||||
k8sTaskService.update(k8sTask); | k8sTaskService.update(k8sTask); | ||||
}); | }); | ||||
} | } | ||||
if (k8sTask.overtime(curUnixTime)){ | |||||
k8sTask.setApplyStatus(K8sTaskStatusEnum.EXECUTED.getStatus()); | |||||
k8sTask.setStopStatus(K8sTaskStatusEnum.EXECUTED.getStatus()); | |||||
k8sTaskService.update(k8sTask); | |||||
} | |||||
} | } | ||||
}catch (Exception e){ | }catch (Exception e){ | ||||
LogUtil.error(LogEnum.BIZ_K8S,"delayCudResource error {}",e); | LogUtil.error(LogEnum.BIZ_K8S,"delayCudResource error {}",e); | ||||
@@ -8,7 +8,7 @@ spring: | |||||
redis: | redis: | ||||
#数据库索引 | #数据库索引 | ||||
database: 0 | database: 0 | ||||
host: | |||||
host: 127.0.0.1 | |||||
port: 6379 | port: 6379 | ||||
password: | password: | ||||
#连接超时时间 | #连接超时时间 | ||||
@@ -17,9 +17,9 @@ spring: | |||||
druid: | druid: | ||||
db-type: com.alibaba.druid.pool.DruidDataSource | db-type: com.alibaba.druid.pool.DruidDataSource | ||||
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | ||||
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true | |||||
username: | |||||
password: | |||||
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | |||||
username: test | |||||
password: test | |||||
# 初始化配置 | # 初始化配置 | ||||
initial-size: 3 | initial-size: 3 | ||||
@@ -59,8 +59,8 @@ spring: | |||||
type: com.alibaba.druid.pool.DruidDataSource | type: com.alibaba.druid.pool.DruidDataSource | ||||
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy | ||||
url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | url: jdbc:log4jdbc:mysql://127.0.0.1:3306/dubhe-dev?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&allowMultiQueries=true&useInformationSchema=true | ||||
username: | |||||
password: | |||||
username: test | |||||
password: test | |||||
#指定data_dataset表 主键id 生成策略 | #指定data_dataset表 主键id 生成策略 | ||||
sharding: | sharding: | ||||
@@ -83,22 +83,22 @@ spring: | |||||
show: true | show: true | ||||
k8s: | k8s: | ||||
# k8s集群配置文件 | |||||
# k8s集群配置文件,将k8s集群master下$HOME/.kube/config文件 复制到dubhe-task/src/main/resources/下 重命名为 kubeconfig | |||||
kubeconfig: kubeconfig | kubeconfig: kubeconfig | ||||
# nfs服务暴露的IP地址 如需测试需修改为合适的地址 | # nfs服务暴露的IP地址 如需测试需修改为合适的地址 | ||||
nfs: | |||||
nfs: 127.0.0.1 | |||||
#nfs服务端 共享目录 | #nfs服务端 共享目录 | ||||
nfs-root-path: /nfs/ | nfs-root-path: /nfs/ | ||||
nfs-root-windows-path: "Z:" | nfs-root-windows-path: "Z:" | ||||
# 命名空间关键字 | # 命名空间关键字 | ||||
namespace: namespace | namespace: namespace | ||||
# k8s ingress域名 如需测试需修改为合适的域名 | # k8s ingress域名 如需测试需修改为合适的域名 | ||||
host: | |||||
# k8s ingress-controller 对外port | |||||
port: 32493 | |||||
# elasticsearch暴露的服务地址 | |||||
host: notebook.test.com | |||||
# k8s ingress-controller 对外port,获取方式:部署 ingress-controller 后,在k8s master节点执行 kubectl get svc -A | grep 'ingress-nginx-controller' 获取80对应的外部端口 | |||||
port: 33334 | |||||
# elasticsearch暴露的服务地址,获取方式 部署 管理集群日志 后,在k8s msater节点执行 kubectl get svc -A | grep 'elasticsearch' 获取9200对应的外部端口 | |||||
elasticsearch: | elasticsearch: | ||||
hostlist: ${eshostlist::32321} | |||||
hostlist: ${eshostlist:127.0.0.1:33333} | |||||
# 日志采集配置信息 | # 日志采集配置信息 | ||||
log: | log: | ||||
type: _doc | type: _doc | ||||
@@ -116,13 +116,14 @@ k8s: | |||||
# 展示Pod的CPU使用率,Memory使用量,GPU使用率的grafana地址 | # 展示Pod的CPU使用率,Memory使用量,GPU使用率的grafana地址 | ||||
pod: | pod: | ||||
metrics: | metrics: | ||||
grafanaUrl: http://127.0.0.1:30006/d/Sx0_4-WGk/jian-kong-xin-xi?orgId=1&refresh=5s&kiosk&var-pod= | |||||
grafanaUrl: http://127.0.0.1:30006/d/job/monitor?orgId=1&refresh=5s&kiosk&var-pod= | |||||
nfs-storage-class-name: zjlab-nfs-storage | nfs-storage-class-name: zjlab-nfs-storage | ||||
#配置harbor | #配置harbor | ||||
harbor: | harbor: | ||||
address: | |||||
username: | |||||
password: | |||||
# habor服务域名,需要将用户持有域名解析到 harbor服务所在服务器ip | |||||
address: harbor.test.com | |||||
username: admin | |||||
password: Harbor12345 | |||||
model-name: train | model-name: train | ||||
# data模块配置 | # data模块配置 | ||||
data: | data: | ||||
@@ -157,8 +158,10 @@ data: | |||||
# minio配置 | # minio配置 | ||||
minio: | minio: | ||||
url: http://127.0.0.1:9000/ | url: http://127.0.0.1:9000/ | ||||
accessKey: | |||||
secretKey: | |||||
# 部署 minio 时指定的 MINIO_ACCESS_KEY | |||||
accessKey: admin | |||||
# 部署 minio 时指定的 MINIO_SECRET_KEY | |||||
secretKey: 123@abc.com | |||||
bucketName: dubhe-dev | bucketName: dubhe-dev | ||||
presignedUrlExpiryTime: 300 | presignedUrlExpiryTime: 300 | ||||
annotation: /annotation/ | annotation: /annotation/ | ||||
@@ -8,13 +8,14 @@ spring: | |||||
redis: | redis: | ||||
repositories: | repositories: | ||||
enabled: false | enabled: false | ||||
# 邮箱配置 | |||||
# 邮箱配置,用于发送用户注册邮箱验证码 | |||||
mail: | mail: | ||||
host: | |||||
host: smtp.163.com | |||||
# 邮件的发送者 163邮箱(开发测试时使用 发送延时 20秒,每天发送量限制 50) | # 邮件的发送者 163邮箱(开发测试时使用 发送延时 20秒,每天发送量限制 50) | ||||
username: | |||||
username: test@163.com | |||||
# SMTP授权密码 | # SMTP授权密码 | ||||
password: | |||||
password: AAAAAAAAAAAAAAAA | |||||
protocol: smtp | protocol: smtp | ||||
properties.mail.smtp.auth: true | properties.mail.smtp.auth: true | ||||
properties.mail.smtp.port: 465 #465或者994 | properties.mail.smtp.port: 465 #465或者994 | ||||
@@ -31,10 +32,10 @@ loginCode: | |||||
height: 28 | height: 28 | ||||
length: 4 | length: 4 | ||||
#密码加密传输,前端公钥加密,后端私钥解密 | |||||
#密码加密传输,前端公钥加密,后端私钥解密,与前端src/settings.js publicKey搭配使用;可使用genKeyPair方法进行生成 | |||||
rsa: | rsa: | ||||
private_key: | |||||
public_key: | |||||
private_key: MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEA0vfvyTdGJkdbHkB8mp0f3FE0GYP3AYPaJF7jUd1M0XxFSE2ceK3k2kw20YvQ09NJKk+OMjWQl9WitG9pB6tSCQIDAQABAkA2SimBrWC2/wvauBuYqjCFwLvYiRYqZKThUS3MZlebXJiLB+Ue/gUifAAKIg1avttUZsHBHrop4qfJCwAI0+YRAiEA+W3NK/RaXtnRqmoUUkb59zsZUBLpvZgQPfj1MhyHDz0CIQDYhsAhPJ3mgS64NbUZmGWuuNKp5coY2GIj/zYDMJp6vQIgUueLFXv/eZ1ekgz2Oi67MNCk5jeTF2BurZqNLR3MSmUCIFT3Q6uHMtsB9Eha4u7hS31tj1UWE+D+ADzp59MGnoftAiBeHT7gDMuqeJHPL4b+kC+gzV4FGTfhR9q3tTbklZkD2A== | |||||
public_key: MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANL378k3RiZHWx5AfJqdH9xRNBmD9wGD2iRe41HdTNF8RUhNnHit5NpMNtGL0NPTSSpPjjI1kJfVorRvaQerUgkCAwEAAQ== | |||||
#jwt | #jwt | ||||
jwt: | jwt: | ||||
@@ -42,7 +43,7 @@ jwt: | |||||
# 令牌前缀 | # 令牌前缀 | ||||
token-start-with: Bearer | token-start-with: Bearer | ||||
# 必须使用最少88位的Base64对该令牌进行编码 | # 必须使用最少88位的Base64对该令牌进行编码 | ||||
base64-secret: | |||||
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmMTIzNDU= | |||||
# 令牌过期时间 此处单位/毫秒 ,默认24小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html | # 令牌过期时间 此处单位/毫秒 ,默认24小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html | ||||
token-validity-in-seconds: 86400000 | token-validity-in-seconds: 86400000 | ||||
# 在线用户key | # 在线用户key | ||||
@@ -55,7 +56,7 @@ swagger: | |||||
enabled: true | enabled: true | ||||
# 后台添加用户的初始密码 | # 后台添加用户的初始密码 | ||||
initial_password: "" | |||||
initial_password: "123456" | |||||
train-job: | train-job: | ||||
namespace: "namespace-" | namespace: "namespace-" | ||||
@@ -68,15 +69,23 @@ train-job: | |||||
log-path: "log" | log-path: "log" | ||||
load-path: "load" | load-path: "load" | ||||
load-key: "model_load_dir" | load-key: "model_load_dir" | ||||
load-val-dataset-key: "val_data_url" | |||||
visualized-log-path: "visualizedlog" | visualized-log-path: "visualizedlog" | ||||
docker-dataset-path: "/dataset" | docker-dataset-path: "/dataset" | ||||
docker-train-path: "/train" | |||||
docker-train-path: "/workspace" | |||||
docker-model-path: "/modeldir" | |||||
docker-val-dataset-path: "/valdataset" | |||||
docker-out-path: "train_out=${train-job.docker-train-path}/${train-job.out-path}" | docker-out-path: "train_out=${train-job.docker-train-path}/${train-job.out-path}" | ||||
docker-log-path: "train_log=${train-job.docker-train-path}/${train-job.log-path}" | docker-log-path: "train_log=${train-job.docker-train-path}/${train-job.log-path}" | ||||
docker-visualized-log-path: "train_visualized_log=${train-job.docker-train-path}/${train-job.visualized-log-path}" | docker-visualized-log-path: "train_visualized_log=${train-job.docker-train-path}/${train-job.visualized-log-path}" | ||||
docker-dataset: "data_url=${train-job.docker-dataset-path}" | docker-dataset: "data_url=${train-job.docker-dataset-path}" | ||||
eight: "8" | eight: "8" | ||||
plus-eight: "+8" | plus-eight: "+8" | ||||
node-ips: "node_ips" | |||||
node-num: "num_nodes" | |||||
gpu-num-per-node: "gpu_num_per_node" | |||||
minioweb: | minioweb: | ||||
GetToken: | GetToken: | ||||
@@ -88,6 +97,7 @@ minioweb: | |||||
zip: | zip: | ||||
url: minio/zip?token= | url: minio/zip?token= | ||||
train-algorithm: | train-algorithm: | ||||
#是否输出训练结果 | #是否输出训练结果 | ||||
is-train-out: true | is-train-out: true | ||||
@@ -99,12 +109,7 @@ train-algorithm: | |||||
algorithm-source: 1 | algorithm-source: 1 | ||||
#设置fork默认值 | #设置fork默认值 | ||||
fork: false | fork: false | ||||
#上传算法文件路径名 | |||||
upload-algorithm-path: "upload-temp/algorithm-manage" | |||||
docker: | |||||
host: | |||||
port: | |||||
# 配置slq打印日志 | # 配置slq打印日志 | ||||
logging: | logging: | ||||
@@ -118,7 +123,9 @@ logging: | |||||
dubhe-proxy: | dubhe-proxy: | ||||
visual: | visual: | ||||
keyword: visual | keyword: visual | ||||
server: 10.5.18.239 | |||||
# 可视化服务后端 ip | |||||
server: 127.0.0.1 | |||||
# 可视化服务后端 端口 | |||||
port: 9898 | port: 9898 | ||||
refine: | refine: | ||||
keyword: refine | keyword: refine | ||||
@@ -129,4 +136,19 @@ dubhe-proxy: | |||||
delay: | delay: | ||||
notebook: | notebook: | ||||
#模型开发延时关闭时间 | #模型开发延时关闭时间 | ||||
delete: 240 | |||||
delete: 240 | |||||
#垃圾回收默认配置 | |||||
recycle: | |||||
#过期时间设置 | |||||
timeout: | |||||
#用户上传文件至临时路径下后文件最大有效时长,以小时为单位 | |||||
file-valid: 24 | |||||
#用户删除数据后,默认其文件最大有效时长,以天为单位 | |||||
date: 7 | |||||
#用户删除某一算法后,其算法文件最大有效时长,以天为单位 | |||||
algorithm-valid: 3 | |||||
#用户删除某一模型后,其模型文件最大有效时长,以天为单位 | |||||
model-valid: 3 | |||||
#用户删除训练任务后,其训练管理文件最大有效时长,以天为单位 | |||||
train-valid: 3 |
@@ -225,7 +225,7 @@ start transaction; -- 整个存储过程指定为一个事务 | |||||
apply_status tinyint(1) default 0 not null comment '状态(0无需操作,1未创建,2已创建)', | apply_status tinyint(1) default 0 not null comment '状态(0无需操作,1未创建,2已创建)', | ||||
stop_unix_time bigint default 0 not null comment '资源停止unix时间(精确到秒)', | stop_unix_time bigint default 0 not null comment '资源停止unix时间(精确到秒)', | ||||
stop_display_time timestamp null comment '资源停止展示时间', | stop_display_time timestamp null comment '资源停止展示时间', | ||||
stop_status tinyint(1) default 0 not null comment '状态(0无需操作,1已停止,2已创建)', | |||||
stop_status tinyint(1) default 0 not null comment '状态(0无需操作,1未停止,2已停止)', | |||||
create_time timestamp default CURRENT_TIMESTAMP null comment '创建时间', | create_time timestamp default CURRENT_TIMESTAMP null comment '创建时间', | ||||
create_user_id bigint(20) default 0 null comment '创建用户ID', | create_user_id bigint(20) default 0 null comment '创建用户ID', | ||||
update_time timestamp default CURRENT_TIMESTAMP null on update CURRENT_TIMESTAMP comment '更新时间', | update_time timestamp default CURRENT_TIMESTAMP null on update CURRENT_TIMESTAMP comment '更新时间', | ||||