@@ -17,6 +17,11 @@ pipeline { | |||||
steps { | steps { | ||||
sh 'pytest ./tests --durations=0 --html=other.html --self-contained-html -m "not (torch or paddle or paddledist or jittor or torchpaddle or torchjittor)"' | sh 'pytest ./tests --durations=0 --html=other.html --self-contained-html -m "not (torch or paddle or paddledist or jittor or torchpaddle or torchjittor)"' | ||||
} | } | ||||
post { | |||||
always { | |||||
sh 'html_path=/ci/${PJ_NAME}/report-${BUILD_NUMBER}-${GIT_BRANCH#*/}-${GIT_COMMIT} && mkdir -p ${html_path} && mv other.html ${html_path}' | |||||
} | |||||
} | |||||
} | } | ||||
stage('Test Torch-1.11') { | stage('Test Torch-1.11') { | ||||
agent { | agent { | ||||
@@ -26,7 +31,12 @@ pipeline { | |||||
} | } | ||||
} | } | ||||
steps { | steps { | ||||
sh 'pytest ./tests --durations=0 --html=torch-1.11.html --self-contained-html -m torch' | |||||
sh 'pytest ./tests/ --durations=0 --html=torch-1.11.html --self-contained-html -m torch' | |||||
} | |||||
post { | |||||
always { | |||||
sh 'html_path=/ci/${PJ_NAME}/report-${BUILD_NUMBER}-${GIT_BRANCH#*/}-${GIT_COMMIT} && mkdir -p ${html_path} && mv torch-1.11.html ${html_path}' | |||||
} | |||||
} | } | ||||
} | } | ||||
stage('Test Torch-1.6') { | stage('Test Torch-1.6') { | ||||
@@ -39,6 +49,11 @@ pipeline { | |||||
steps { | steps { | ||||
sh 'pytest ./tests/ --durations=0 --html=torch-1.6.html --self-contained-html -m torch' | sh 'pytest ./tests/ --durations=0 --html=torch-1.6.html --self-contained-html -m torch' | ||||
} | } | ||||
post { | |||||
always { | |||||
sh 'html_path=/ci/${PJ_NAME}/report-${BUILD_NUMBER}-${GIT_BRANCH#*/}-${GIT_COMMIT} && mkdir -p ${html_path} && mv torch-1.6.html ${html_path}' | |||||
} | |||||
} | |||||
} | } | ||||
stage('Test Paddle') { | stage('Test Paddle') { | ||||
agent { | agent { | ||||
@@ -54,6 +69,11 @@ pipeline { | |||||
sh 'FASTNLP_BACKEND=paddle pytest ./tests/core/drivers/paddle_driver/test_fleet.py --durations=0 --html=paddle_fleet.html --self-contained-html --co' | sh 'FASTNLP_BACKEND=paddle pytest ./tests/core/drivers/paddle_driver/test_fleet.py --durations=0 --html=paddle_fleet.html --self-contained-html --co' | ||||
sh 'FASTNLP_BACKEND=paddle pytest ./tests/core/controllers/test_trainer_paddle.py --durations=0 --html=paddle_trainer.html --self-contained-html --co' | sh 'FASTNLP_BACKEND=paddle pytest ./tests/core/controllers/test_trainer_paddle.py --durations=0 --html=paddle_trainer.html --self-contained-html --co' | ||||
} | } | ||||
post { | |||||
always { | |||||
sh 'html_path=/ci/${PJ_NAME}/report-${BUILD_NUMBER}-${GIT_BRANCH#*/}-${GIT_COMMIT} && mkdir -p ${html_path} && mv paddle*.html ${html_path}' | |||||
} | |||||
} | |||||
} | } | ||||
// stage('Test Jittor') { | // stage('Test Jittor') { | ||||
// agent { | // agent { | ||||
@@ -70,12 +90,6 @@ pipeline { | |||||
// } | // } | ||||
} | } | ||||
} | } | ||||
stage('Moving Test Results') { | |||||
agent any | |||||
steps { | |||||
sh 'post mv' | |||||
} | |||||
} | |||||
} | } | ||||
post { | post { | ||||
failure { | failure { | ||||
@@ -139,7 +139,7 @@ class TestFdl: | |||||
logger.set_stdout() | logger.set_stdout() | ||||
ds = DataSet({"x": [[1, 2], [2, 3, 4], [4, 5, 6, 7]] * 10, "y": [1, 0, 1] * 10}) | ds = DataSet({"x": [[1, 2], [2, 3, 4], [4, 5, 6, 7]] * 10, "y": [1, 0, 1] * 10}) | ||||
with Capturing() as out: | with Capturing() as out: | ||||
dl = TorchDataLoader(ds, prefetch_factor=3, shuffle=False) | |||||
dl = TorchDataLoader(ds, batch_size=1, prefetch_factor=3, shuffle=False) | |||||
for idx, batch in enumerate(dl): | for idx, batch in enumerate(dl): | ||||
assert len(batch['x'])==1 | assert len(batch['x'])==1 | ||||
assert batch['x'][0].tolist() == ds[idx]['x'] | assert batch['x'][0].tolist() == ds[idx]['x'] | ||||
@@ -154,7 +154,7 @@ class TestFdl: | |||||
logger.set_stdout() | logger.set_stdout() | ||||
ds = DataSet({"x": [[1, 2], [2, 3, 4], [4, 5, 6, 7]] * 10, "y": [1, 0, 1] * 10}) | ds = DataSet({"x": [[1, 2], [2, 3, 4], [4, 5, 6, 7]] * 10, "y": [1, 0, 1] * 10}) | ||||
with Capturing() as out: | with Capturing() as out: | ||||
dl = TorchDataLoader(ds, num_workers=0, prefetch_factor=2, generator=torch.Generator(), shuffle=False) | |||||
dl = TorchDataLoader(ds, batch_size=1, num_workers=0, prefetch_factor=2, generator=torch.Generator(), shuffle=False) | |||||
for idx, batch in enumerate(dl): | for idx, batch in enumerate(dl): | ||||
assert len(batch['x'])==1 | assert len(batch['x'])==1 | ||||
assert batch['x'][0].tolist() == ds[idx]['x'] | assert batch['x'][0].tolist() == ds[idx]['x'] | ||||
@@ -1,6 +1,8 @@ | |||||
import pytest | import pytest | ||||
from pathlib import Path | from pathlib import Path | ||||
from pkg_resources import parse_version | |||||
from fastNLP.core.drivers.torch_driver.single_device import TorchSingleDriver | from fastNLP.core.drivers.torch_driver.single_device import TorchSingleDriver | ||||
from fastNLP.core.samplers import ReproduceBatchSampler, RandomSampler | from fastNLP.core.samplers import ReproduceBatchSampler, RandomSampler | ||||
from tests.helpers.models.torch_model import TorchNormalModel_Classification_1 | from tests.helpers.models.torch_model import TorchNormalModel_Classification_1 | ||||
@@ -9,6 +11,7 @@ from tests.helpers.datasets.paddle_data import PaddleNormalDataset | |||||
from tests.helpers.models.paddle_model import PaddleNormalModel_Classification_1 | from tests.helpers.models.paddle_model import PaddleNormalModel_Classification_1 | ||||
from fastNLP.envs.distributed import rank_zero_rm | from fastNLP.envs.distributed import rank_zero_rm | ||||
from fastNLP.envs.imports import _NEED_IMPORT_PADDLE, _NEED_IMPORT_TORCH | from fastNLP.envs.imports import _NEED_IMPORT_PADDLE, _NEED_IMPORT_TORCH | ||||
if _NEED_IMPORT_TORCH: | if _NEED_IMPORT_TORCH: | ||||
import torch | import torch | ||||
from torch.utils.data import DataLoader, BatchSampler | from torch.utils.data import DataLoader, BatchSampler | ||||
@@ -245,6 +248,9 @@ class TestTorchDriverFunctions: | |||||
""" | """ | ||||
# 先确保不影响运行 | # 先确保不影响运行 | ||||
# TODO:正确性 | # TODO:正确性 | ||||
if parse_version(torch.__version__) < parse_version('1.7'): | |||||
pytest.skip("Skip if torch version smaller than 1.6 since torch.manual_seed my cause bug:" | |||||
"Overflow when unpacking long") | |||||
TorchSingleDriver.worker_init_function(0) | TorchSingleDriver.worker_init_function(0) | ||||
@pytest.mark.torch | @pytest.mark.torch | ||||