| @@ -136,7 +136,7 @@ class Expr: | |||||
| if outputs is None: | if outputs is None: | ||||
| return | return | ||||
| current_graph = active_module_tracer().current_scope() | current_graph = active_module_tracer().current_scope() | ||||
| if not isinstance(outputs, collections.Sequence): | |||||
| if not isinstance(outputs, collections.abc.Sequence): | |||||
| outputs = (outputs,) | outputs = (outputs,) | ||||
| for i in outputs: | for i in outputs: | ||||
| assert isinstance(i, RawTensor), "The output must be a Tensor" | assert isinstance(i, RawTensor), "The output must be a Tensor" | ||||
| @@ -82,7 +82,7 @@ class _OprStableOrderHeapq: | |||||
| _used_id_name_pairs = None | _used_id_name_pairs = None | ||||
| def __init__(self, extra_priority): | def __init__(self, extra_priority): | ||||
| assert isinstance(extra_priority, collections.Callable) | |||||
| assert isinstance(extra_priority, collections.abc.Callable) | |||||
| self._list = [] | self._list = [] | ||||
| self._extra_priority = extra_priority | self._extra_priority = extra_priority | ||||
| self._used_id_name_pairs = {} | self._used_id_name_pairs = {} | ||||
| @@ -324,7 +324,7 @@ class Network: | |||||
| if isinstance(modifier, str): | if isinstance(modifier, str): | ||||
| om = modifier | om = modifier | ||||
| modifier = lambda v: "{}.{}".format(om, v) | modifier = lambda v: "{}.{}".format(om, v) | ||||
| assert isinstance(modifier, collections.Callable) | |||||
| assert isinstance(modifier, collections.abc.Callable) | |||||
| for i in self.all_oprs: | for i in self.all_oprs: | ||||
| v0 = i.name | v0 = i.name | ||||
| v1 = modifier(v0) | v1 = modifier(v0) | ||||
| @@ -550,7 +550,7 @@ def as_varnode(obj): | |||||
| return ret | return ret | ||||
| assert isinstance( | assert isinstance( | ||||
| obj, collections.Iterable | |||||
| obj, collections.abc.Iterable | |||||
| ), "{} is not compatible with VarNode".format(obj) | ), "{} is not compatible with VarNode".format(obj) | ||||
| val = list(obj) | val = list(obj) | ||||
| @@ -573,7 +573,7 @@ def as_oprnode(obj): | |||||
| return obj | return obj | ||||
| assert isinstance( | assert isinstance( | ||||
| obj, collections.Iterable | |||||
| obj, collections.abc.Iterable | |||||
| ), "{} is not compatible with OpNode".format(obj) | ), "{} is not compatible with OpNode".format(obj) | ||||
| val = list(obj) | val = list(obj) | ||||
| @@ -619,7 +619,7 @@ class NodeFilter: | |||||
| oprs = get_oprs_seq(node_iter.inputs, False, False) | oprs = get_oprs_seq(node_iter.inputs, False, False) | ||||
| node_iter = itertools.islice(oprs, len(oprs) - 1) | node_iter = itertools.islice(oprs, len(oprs) - 1) | ||||
| assert isinstance(node_iter, collections.Iterable) | |||||
| assert isinstance(node_iter, collections.abc.Iterable) | |||||
| if (not isinstance(node_iter, NodeFilter)) and type( | if (not isinstance(node_iter, NodeFilter)) and type( | ||||
| self | self | ||||
| ) is not NodeFilterCheckType: | ) is not NodeFilterCheckType: | ||||
| @@ -2,3 +2,4 @@ black==19.10b0 | |||||
| isort==4.3.21 | isort==4.3.21 | ||||
| pylint==2.4.3 | pylint==2.4.3 | ||||
| mypy==0.982 | mypy==0.982 | ||||
| typed_ast==1.5.0; python_version > '3.8' | |||||
| @@ -1,4 +1,5 @@ | |||||
| pytest==5.3.0 | |||||
| pytest==5.3.0 ; python_version <= '3.9' | |||||
| pytest==6.2.5 ; python_version > '3.9' | |||||
| pytest-sphinx==0.3.1 | pytest-sphinx==0.3.1 | ||||
| tensorboardX==2.4 | tensorboardX==2.4 | ||||
| protobuf==3.20.0 ; python_version > '3.8' | protobuf==3.20.0 ; python_version > '3.8' | ||||
| @@ -4,6 +4,13 @@ | |||||
| #include "megbrain/imperative/op_def.h" | #include "megbrain/imperative/op_def.h" | ||||
| #include "megbrain/utils/persistent_cache.h" | #include "megbrain/utils/persistent_cache.h" | ||||
| // in python 3.10, ssize_t is not defined on windows | |||||
| // so ssize_t should be defined manually before include pybind headers | |||||
| #if defined(_MSC_VER) | |||||
| #include <BaseTsd.h> | |||||
| typedef SSIZE_T ssize_t; | |||||
| #endif | |||||
| #include <Python.h> | #include <Python.h> | ||||
| #include <iterator> | #include <iterator> | ||||
| #include <string> | #include <string> | ||||
| @@ -7,6 +7,7 @@ | |||||
| # software distributed under the License is distributed on an | # software distributed under the License is distributed on an | ||||
| # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| import math | import math | ||||
| import multiprocessing | |||||
| import os | import os | ||||
| import platform | import platform | ||||
| import time | import time | ||||
| @@ -134,6 +135,10 @@ def test_dataloader_parallel(): | |||||
| platform.system() == "Windows", | platform.system() == "Windows", | ||||
| reason="dataloader do not support parallel on windows", | reason="dataloader do not support parallel on windows", | ||||
| ) | ) | ||||
| @pytest.mark.skipif( | |||||
| multiprocessing.get_start_method() != "fork", | |||||
| reason="the runtime error is only raised when fork", | |||||
| ) | |||||
| def test_dataloader_parallel_timeout(): | def test_dataloader_parallel_timeout(): | ||||
| dataset = init_dataset() | dataset = init_dataset() | ||||
| @@ -161,6 +166,10 @@ def test_dataloader_parallel_timeout(): | |||||
| platform.system() == "Windows", | platform.system() == "Windows", | ||||
| reason="dataloader do not support parallel on windows", | reason="dataloader do not support parallel on windows", | ||||
| ) | ) | ||||
| @pytest.mark.skipif( | |||||
| multiprocessing.get_start_method() != "fork", | |||||
| reason="the runtime error is only raised when fork", | |||||
| ) | |||||
| def test_dataloader_parallel_worker_exception(): | def test_dataloader_parallel_worker_exception(): | ||||
| dataset = init_dataset() | dataset = init_dataset() | ||||
| @@ -287,6 +296,10 @@ def test_prestream_dataloader_multiprocessing(): | |||||
| platform.system() == "Windows", | platform.system() == "Windows", | ||||
| reason="dataloader do not support parallel on windows", | reason="dataloader do not support parallel on windows", | ||||
| ) | ) | ||||
| @pytest.mark.skipif( | |||||
| multiprocessing.get_start_method() != "fork", | |||||
| reason="the runtime error is only raised when fork", | |||||
| ) | |||||
| def test_predataloader_parallel_worker_exception(): | def test_predataloader_parallel_worker_exception(): | ||||
| dataset = MyPreStream(100) | dataset = MyPreStream(100) | ||||
| @@ -8,6 +8,7 @@ | |||||
| # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| import gc | import gc | ||||
| import math | import math | ||||
| import multiprocessing | |||||
| import os | import os | ||||
| import platform | import platform | ||||
| import time | import time | ||||
| @@ -146,6 +147,10 @@ def test_dataloader_parallel(): | |||||
| platform.system() == "Windows", | platform.system() == "Windows", | ||||
| reason="dataloader do not support parallel on windows", | reason="dataloader do not support parallel on windows", | ||||
| ) | ) | ||||
| @pytest.mark.skipif( | |||||
| multiprocessing.get_start_method() != "fork", | |||||
| reason="the runtime error is only raised when fork", | |||||
| ) | |||||
| def test_dataloader_parallel_timeout(): | def test_dataloader_parallel_timeout(): | ||||
| dataset = init_dataset() | dataset = init_dataset() | ||||
| @@ -174,6 +179,10 @@ def test_dataloader_parallel_timeout(): | |||||
| platform.system() == "Windows", | platform.system() == "Windows", | ||||
| reason="dataloader do not support parallel on windows", | reason="dataloader do not support parallel on windows", | ||||
| ) | ) | ||||
| @pytest.mark.skipif( | |||||
| multiprocessing.get_start_method() != "fork", | |||||
| reason="the runtime error is only raised when fork", | |||||
| ) | |||||
| def test_dataloader_parallel_worker_exception(): | def test_dataloader_parallel_worker_exception(): | ||||
| dataset = init_dataset() | dataset = init_dataset() | ||||
| @@ -2,6 +2,13 @@ | |||||
| #include <variant> | #include <variant> | ||||
| // in python 3.10, ssize_t is not defined on windows | |||||
| // so ssize_t should be defined manually before include pybind headers | |||||
| #if defined(_MSC_VER) | |||||
| #include <BaseTsd.h> | |||||
| typedef SSIZE_T ssize_t; | |||||
| #endif | |||||
| #include "megbrain/imperative.h" | #include "megbrain/imperative.h" | ||||
| #include "megbrain/test/helper.h" | #include "megbrain/test/helper.h" | ||||
| @@ -42,7 +42,7 @@ platform=$(uname -m | awk '{print $0}') | |||||
| if [ $platform = 'arm64' ];then | if [ $platform = 'arm64' ];then | ||||
| FULL_PYTHON_VER="3.8.10 3.9.4 3.10.1" | FULL_PYTHON_VER="3.8.10 3.9.4 3.10.1" | ||||
| else | else | ||||
| FULL_PYTHON_VER="3.6.10 3.7.7 3.8.3 3.9.4" | |||||
| FULL_PYTHON_VER="3.6.10 3.7.7 3.8.3 3.9.4 3.10.1" | |||||
| fi | fi | ||||
| if [[ -z ${ALL_PYTHON} ]] | if [[ -z ${ALL_PYTHON} ]] | ||||
| @@ -1,4 +1,4 @@ | |||||
| FROM quay.io/pypa/manylinux2014_x86_64:2020-12-31-56195b3 | |||||
| FROM quay.io/pypa/manylinux2014_x86_64:2021-12-30-cb9fd5b | |||||
| ENV UID=1024 \ | ENV UID=1024 \ | ||||
| PATH=${PATH}:/usr/local/cuda/bin \ | PATH=${PATH}:/usr/local/cuda/bin \ | ||||
| @@ -1,4 +1,4 @@ | |||||
| FROM quay.io/pypa/manylinux2014_aarch64:2020-12-31-56195b3 | |||||
| FROM quay.io/pypa/manylinux2014_aarch64:2021-12-30-cb9fd5b | |||||
| ENV UID=1024 PATH=${PATH}:/usr/local/cuda/bin | ENV UID=1024 PATH=${PATH}:/usr/local/cuda/bin | ||||
| @@ -74,7 +74,7 @@ function patch_elf_depend_lib_megenginelite() { | |||||
| SRC_DIR=$(readlink -f "`dirname $0`/../../../") | SRC_DIR=$(readlink -f "`dirname $0`/../../../") | ||||
| source ${SRC_DIR}/scripts/whl/utils/utils.sh | source ${SRC_DIR}/scripts/whl/utils/utils.sh | ||||
| SUPPORT_ALL_VERSION="36m 37m 38 39" | |||||
| SUPPORT_ALL_VERSION="36m 37m 38 39 310" | |||||
| ALL_PYTHON=${ALL_PYTHON} | ALL_PYTHON=${ALL_PYTHON} | ||||
| if [[ -z ${ALL_PYTHON} ]] | if [[ -z ${ALL_PYTHON} ]] | ||||
| then | then | ||||
| @@ -1,7 +1,5 @@ | |||||
| #!/bin/bash -e | #!/bin/bash -e | ||||
| GET_PIP_URL='https://bootstrap.pypa.io/get-pip.py' | |||||
| GET_PIP_URL_36='https://bootstrap.pypa.io/pip/3.6/get-pip.py' | |||||
| SWIG_URL='https://codeload.github.com/swig/swig/tar.gz/refs/tags/rel-3.0.12' | SWIG_URL='https://codeload.github.com/swig/swig/tar.gz/refs/tags/rel-3.0.12' | ||||
| LLVM_URL='https://github.com/llvm-mirror/llvm/archive/release_60.tar.gz' | LLVM_URL='https://github.com/llvm-mirror/llvm/archive/release_60.tar.gz' | ||||
| CLANG_URL='https://github.com/llvm-mirror/clang/archive/release_60.tar.gz' | CLANG_URL='https://github.com/llvm-mirror/clang/archive/release_60.tar.gz' | ||||
| @@ -17,20 +15,14 @@ yum install -y python3 python3-devel | |||||
| python3 -m pip install cython -i https://mirrors.aliyun.com/pypi/simple | python3 -m pip install cython -i https://mirrors.aliyun.com/pypi/simple | ||||
| python3 -m pip install numpy -i https://mirrors.aliyun.com/pypi/simple | python3 -m pip install numpy -i https://mirrors.aliyun.com/pypi/simple | ||||
| # FIXME: failed when install pip with python3.10 because python3.10 | |||||
| # is not installed on aarch64, so we remove 310 from ALL_PYTHON version now | |||||
| ALL_PYTHON="36m 37m 38 39" | |||||
| numpy_version="1.19.5" | |||||
| ALL_PYTHON="36m 37m 38 39 310" | |||||
| for ver in ${ALL_PYTHON} | for ver in ${ALL_PYTHON} | ||||
| do | do | ||||
| python_ver=`echo $ver | tr -d m` | python_ver=`echo $ver | tr -d m` | ||||
| PIP_URL=${GET_PIP_URL} | |||||
| if [ ${ver} = "36m" ];then | |||||
| PIP_URL=${GET_PIP_URL_36} | |||||
| numpy_version="1.19.5" | |||||
| if [ ${ver} = "310" ];then | |||||
| numpy_version="1.21.6" | |||||
| fi | fi | ||||
| echo "use pip url: ${PIP_URL}" | |||||
| curl ${PIP_URL} | /opt/python/cp${python_ver}-cp${ver}/bin/python - \ | |||||
| --no-cache-dir --only-binary :all: | |||||
| /opt/python/cp${python_ver}-cp${ver}/bin/pip install \ | /opt/python/cp${python_ver}-cp${ver}/bin/pip install \ | ||||
| --no-cache-dir --only-binary :all: numpy==${numpy_version} setuptools==46.1.3 \ | --no-cache-dir --only-binary :all: numpy==${numpy_version} setuptools==46.1.3 \ | ||||
| -i https://mirrors.aliyun.com/pypi/simple | -i https://mirrors.aliyun.com/pypi/simple | ||||
| @@ -78,7 +78,8 @@ function check_build_ninja_python_api() { | |||||
| INCLUDE_KEYWORD="${ver}\\\\include" | INCLUDE_KEYWORD="${ver}\\\\include" | ||||
| PYTHON_API_INCLUDES="3.6.8\\\\include 3.7.7\\\\include 3.8.3\\\\include 3.9.4\\\\include 3.10.1\\\\include" | PYTHON_API_INCLUDES="3.6.8\\\\include 3.7.7\\\\include 3.8.3\\\\include 3.9.4\\\\include 3.10.1\\\\include" | ||||
| elif [[ $OS =~ "Linux" ]]; then | elif [[ $OS =~ "Linux" ]]; then | ||||
| INCLUDE_KEYWORD="include/python3.${ver:1:1}" | |||||
| ver=`echo $ver | tr -d m` | |||||
| INCLUDE_KEYWORD="include/python3.${ver:1}" # like 39/310 | |||||
| info=`command -v termux-info || true` | info=`command -v termux-info || true` | ||||
| if [[ "${info}" =~ "com.termux" ]]; then | if [[ "${info}" =~ "com.termux" ]]; then | ||||
| echo "find termux-info at: ${info}" | echo "find termux-info at: ${info}" | ||||
| @@ -26,7 +26,7 @@ SRC_DIR=$(READLINK -f "`dirname $0`/../../../") | |||||
| source ${SRC_DIR}/scripts/whl/utils/utils.sh | source ${SRC_DIR}/scripts/whl/utils/utils.sh | ||||
| ALL_PYTHON=${ALL_PYTHON} | ALL_PYTHON=${ALL_PYTHON} | ||||
| FULL_PYTHON_VER="3.6.8 3.7.7 3.8.3 3.9.4" | |||||
| FULL_PYTHON_VER="3.6.8 3.7.7 3.8.3 3.9.4 3.10.1" | |||||
| if [[ -z ${ALL_PYTHON} ]] | if [[ -z ${ALL_PYTHON} ]] | ||||
| then | then | ||||
| ALL_PYTHON=${FULL_PYTHON_VER} | ALL_PYTHON=${FULL_PYTHON_VER} | ||||