| @@ -16,7 +16,7 @@ | |||||
| using namespace mgb; | using namespace mgb; | ||||
| using namespace imperative; | using namespace imperative; | ||||
| OptimizedBackwardGraphResult::OptimizedBackwardGraphResult(const EncodedSubraph& src) | |||||
| OptimizedBackwardGraphResult::OptimizedBackwardGraphResult(const EncodedSubgraph& src) | |||||
| : input_has_grad(src.output_mask) { | : input_has_grad(src.output_mask) { | ||||
| if (src.graph.exprs.size() <= 1) { | if (src.graph.exprs.size() <= 1) { | ||||
| // backward graph only contains a single op | // backward graph only contains a single op | ||||
| @@ -80,12 +80,12 @@ std::tuple<SmallVector<LogicalTensorDesc>, bool> OpDef::infer_output_attrs_falli | |||||
| return def.trait()->infer_output_attrs_fallible(def, inputs); | return def.trait()->infer_output_attrs_fallible(def, inputs); | ||||
| } | } | ||||
| EncodedSubraph OpDef::make_backward_graph( | |||||
| EncodedSubgraph OpDef::make_backward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| const SmallVector<bool>& output_has_grad) { | const SmallVector<bool>& output_has_grad) { | ||||
| using BackwardGraphCache = OpMethResultCache<EncodedSubraph, SmallVector<bool>, SmallVector<bool>>; | |||||
| using BackwardGraphCache = OpMethResultCache<EncodedSubgraph, SmallVector<bool>, SmallVector<bool>>; | |||||
| thread_local BackwardGraphCache cache; | thread_local BackwardGraphCache cache; | ||||
| decltype(cache)::key_t cache_key{const_cast<OpDef&>(def).shared_from_this(), inputs, {input_requires_grad, output_has_grad}}; | decltype(cache)::key_t cache_key{const_cast<OpDef&>(def).shared_from_this(), inputs, {input_requires_grad, output_has_grad}}; | ||||
| auto iter = cache.find(cache_key); | auto iter = cache.find(cache_key); | ||||
| @@ -100,10 +100,10 @@ std::vector<std::pair<const char*, std::string>> OpDef::props( | |||||
| return def.trait()->props(def); | return def.trait()->props(def); | ||||
| } | } | ||||
| EncodedSubraph OpDef::make_forward_graph( | |||||
| EncodedSubgraph OpDef::make_forward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs){ | const SmallVector<LogicalTensorDesc>& inputs){ | ||||
| using ForwardGraphCache = OpMethResultCache<EncodedSubraph, SmallVector<bool>, SmallVector<bool>>; | |||||
| using ForwardGraphCache = OpMethResultCache<EncodedSubgraph, SmallVector<bool>, SmallVector<bool>>; | |||||
| thread_local ForwardGraphCache cache; | thread_local ForwardGraphCache cache; | ||||
| decltype(cache)::key_t cache_key{const_cast<OpDef&>(def).shared_from_this(), inputs}; | decltype(cache)::key_t cache_key{const_cast<OpDef&>(def).shared_from_this(), inputs}; | ||||
| auto iter = cache.find(cache_key); | auto iter = cache.find(cache_key); | ||||
| @@ -182,11 +182,11 @@ OP_TRAIT_REG(Identity, Identity) | |||||
| namespace { namespace subgraph { | namespace { namespace subgraph { | ||||
| EncodedSubraph make_forward_graph(const OpDef& def, SmallVector<LogicalTensorDesc> inputs) { | |||||
| return EncodedSubraph::make(*def.cast_final_safe<SubgraphOp>().graph); | |||||
| EncodedSubgraph make_forward_graph(const OpDef& def, SmallVector<LogicalTensorDesc> inputs) { | |||||
| return EncodedSubgraph::make(*def.cast_final_safe<SubgraphOp>().graph); | |||||
| } | } | ||||
| EncodedSubraph make_backward_graph( | |||||
| EncodedSubgraph make_backward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -199,7 +199,7 @@ EncodedSubraph make_backward_graph( | |||||
| } | } | ||||
| } | } | ||||
| auto bgraph = subgraph_detail::make_backward_graph(def, inputs, input_requires_grad, output_has_grad); | auto bgraph = subgraph_detail::make_backward_graph(def, inputs, input_requires_grad, output_has_grad); | ||||
| return EncodedSubraph::make_single( | |||||
| return EncodedSubgraph::make_single( | |||||
| SubgraphOp::make(op.name + "Grad", | SubgraphOp::make(op.name + "Grad", | ||||
| std::make_shared<Subgraph>(bgraph.graph)), | std::make_shared<Subgraph>(bgraph.graph)), | ||||
| bgraph.input_mask, bgraph.output_mask); | bgraph.input_mask, bgraph.output_mask); | ||||
| @@ -430,7 +430,7 @@ std::tuple<SmallVector<MemoryDesc>, SmallVector<MemoryDesc>> infer_output_mem_de | |||||
| return {}; | return {}; | ||||
| } | } | ||||
| EncodedSubraph make_backward_graph( | |||||
| EncodedSubgraph make_backward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -452,7 +452,7 @@ EncodedSubraph make_backward_graph( | |||||
| grad_outputs_has_grad, key); | grad_outputs_has_grad, key); | ||||
| } | } | ||||
| auto compiled_op = CompiledOp::make(bgraph_op, op.gopt_level); | auto compiled_op = CompiledOp::make(bgraph_op, op.gopt_level); | ||||
| auto encoded_graph = EncodedSubraph::make_single(compiled_op, backward_graph.input_mask, backward_graph.output_mask); | |||||
| auto encoded_graph = EncodedSubgraph::make_single(compiled_op, backward_graph.input_mask, backward_graph.output_mask); | |||||
| return encoded_graph; | return encoded_graph; | ||||
| } | } | ||||
| @@ -669,7 +669,7 @@ struct ProxyGraph::GradGraph { | |||||
| cg::VarNode* grad; | cg::VarNode* grad; | ||||
| }; | }; | ||||
| EncodedSubraph | |||||
| EncodedSubgraph | |||||
| ProxyGraph::make_backward_graph( | ProxyGraph::make_backward_graph( | ||||
| const OpDef& opdef, | const OpDef& opdef, | ||||
| const SmallVector<LogicalTensorDesc>& input_descs, | const SmallVector<LogicalTensorDesc>& input_descs, | ||||
| @@ -704,7 +704,7 @@ ProxyGraph::make_backward_graph( | |||||
| } | } | ||||
| auto* gfunc = cg::lookup_grad_func(fwd->dyn_typeinfo()); | auto* gfunc = cg::lookup_grad_func(fwd->dyn_typeinfo()); | ||||
| EncodedSubraph result; | |||||
| EncodedSubgraph result; | |||||
| auto&& igraph = result.graph; | auto&& igraph = result.graph; | ||||
| size_t nr_backward_graph_inputs = 0; | size_t nr_backward_graph_inputs = 0; | ||||
| @@ -40,7 +40,7 @@ public: | |||||
| const SmallVector<Tensor*>& outputs, | const SmallVector<Tensor*>& outputs, | ||||
| const SmallVector<Tensor*>& workspace); | const SmallVector<Tensor*>& workspace); | ||||
| EncodedSubraph make_backward_graph( | |||||
| EncodedSubgraph make_backward_graph( | |||||
| const OpDef& opdef, | const OpDef& opdef, | ||||
| const SmallVector<LogicalTensorDesc>& input_descs, | const SmallVector<LogicalTensorDesc>& input_descs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -113,7 +113,7 @@ void execute(const OpDef& def, | |||||
| // return graph->infer_output_attrs_fallible(def, inputs); | // return graph->infer_output_attrs_fallible(def, inputs); | ||||
| // } | // } | ||||
| EncodedSubraph | |||||
| EncodedSubgraph | |||||
| make_backward_graph(const OpDef& def, | make_backward_graph(const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -101,7 +101,7 @@ void Subgraph::replace_vars( | |||||
| } | } | ||||
| } | } | ||||
| std::string EncodedSubraph::repr() const { | |||||
| std::string EncodedSubgraph::repr() const { | |||||
| std::string buffer; | std::string buffer; | ||||
| buffer.push_back('|'); | buffer.push_back('|'); | ||||
| for (size_t i = 0; i < input_mask.size(); ++i) { | for (size_t i = 0; i < input_mask.size(); ++i) { | ||||
| @@ -118,7 +118,7 @@ std::string EncodedSubraph::repr() const { | |||||
| return buffer; | return buffer; | ||||
| } | } | ||||
| size_t EncodedSubraph::hash() const { | |||||
| size_t EncodedSubgraph::hash() const { | |||||
| return std::hash<std::string>{}(repr()); | return std::hash<std::string>{}(repr()); | ||||
| } | } | ||||
| @@ -76,11 +76,11 @@ SmallVector<TensorPtr> apply_on_physical_tensor( | |||||
| return outputs; | return outputs; | ||||
| } | } | ||||
| static EncodedSubraph make_backward_graph_from_forward( | |||||
| static EncodedSubgraph make_backward_graph_from_forward( | |||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| const SmallVector<bool>& output_has_grad, | const SmallVector<bool>& output_has_grad, | ||||
| EncodedSubraph forward_graph) { | |||||
| EncodedSubgraph forward_graph) { | |||||
| using namespace std::placeholders; | using namespace std::placeholders; | ||||
| using var_t = Subgraph::var_t; | using var_t = Subgraph::var_t; | ||||
| using vars_t = Subgraph::vars_t; | using vars_t = Subgraph::vars_t; | ||||
| @@ -149,7 +149,7 @@ static EncodedSubraph make_backward_graph_from_forward( | |||||
| return backward_graph; | return backward_graph; | ||||
| } | } | ||||
| EncodedSubraph make_backward_graph( | |||||
| EncodedSubgraph make_backward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -19,7 +19,7 @@ struct OptimizedBackwardGraphResult { | |||||
| SmallVector<bool> save_for_backward; | SmallVector<bool> save_for_backward; | ||||
| SmallVector<bool> input_has_grad; | SmallVector<bool> input_has_grad; | ||||
| OptimizedBackwardGraphResult(const EncodedSubraph& bgraph); | |||||
| OptimizedBackwardGraphResult(const EncodedSubgraph& bgraph); | |||||
| }; | }; | ||||
| } // namespace mgb::imperative | } // namespace mgb::imperative | ||||
| @@ -29,7 +29,7 @@ class Subgraph::Builder { | |||||
| using desc_t = TDesc; | using desc_t = TDesc; | ||||
| using descs_t = SmallVector<TDesc>; | using descs_t = SmallVector<TDesc>; | ||||
| using infer_fn_t = std::function<descs_t(op_t, descs_t, size_t)>; | using infer_fn_t = std::function<descs_t(op_t, descs_t, size_t)>; | ||||
| using encoded_graph_t = EncodedSubraph; | |||||
| using encoded_graph_t = EncodedSubgraph; | |||||
| using var_map_t = std::unordered_map<var_t, var_t>; | using var_map_t = std::unordered_map<var_t, var_t>; | ||||
| vars_t m_inputs; | vars_t m_inputs; | ||||
| SmallVector<std::pair<var_t, TensorPtr>> m_constants; | SmallVector<std::pair<var_t, TensorPtr>> m_constants; | ||||
| @@ -87,7 +87,7 @@ public: | |||||
| const SmallVector<TensorPtr>& inputs_tensors, | const SmallVector<TensorPtr>& inputs_tensors, | ||||
| const SmallVector<MemoryDesc>& inputs_mems); | const SmallVector<MemoryDesc>& inputs_mems); | ||||
| static EncodedSubraph make_backward_graph( | |||||
| static EncodedSubgraph make_backward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -96,7 +96,7 @@ public: | |||||
| static std::vector<std::pair<const char*, std::string>> props( | static std::vector<std::pair<const char*, std::string>> props( | ||||
| const OpDef& def); | const OpDef& def); | ||||
| static EncodedSubraph make_forward_graph( | |||||
| static EncodedSubgraph make_forward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs); | const SmallVector<LogicalTensorDesc>& inputs); | ||||
| @@ -40,7 +40,7 @@ struct ShapeInfer final : OpDefImplBase<ShapeInfer> { | |||||
| std::shared_ptr<OpDef> op; | std::shared_ptr<OpDef> op; | ||||
| SmallVector<CompNode> devices; | SmallVector<CompNode> devices; | ||||
| SmallVector<DType> dtypes; | SmallVector<DType> dtypes; | ||||
| EncodedSubraph graph; | |||||
| EncodedSubgraph graph; | |||||
| ShapeInfer() = default; | ShapeInfer() = default; | ||||
| ShapeInfer(std::shared_ptr<OpDef> op, SmallVector<CompNode> devices, | ShapeInfer(std::shared_ptr<OpDef> op, SmallVector<CompNode> devices, | ||||
| SmallVector<DType> dtypes) | SmallVector<DType> dtypes) | ||||
| @@ -38,7 +38,7 @@ void exec(const OpDef& def, | |||||
| const SmallVector<TensorPtr>& inputs, | const SmallVector<TensorPtr>& inputs, | ||||
| const SmallVector<TensorPtr>& outputs); | const SmallVector<TensorPtr>& outputs); | ||||
| EncodedSubraph | |||||
| EncodedSubgraph | |||||
| make_backward_graph(const OpDef& def, | make_backward_graph(const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -96,7 +96,7 @@ struct Subgraph { | |||||
| bool operator==(const Subgraph& rhs) const; | bool operator==(const Subgraph& rhs) const; | ||||
| }; | }; | ||||
| struct EncodedSubraph { | |||||
| struct EncodedSubgraph { | |||||
| Subgraph graph; | Subgraph graph; | ||||
| SmallVector<bool> input_mask; | SmallVector<bool> input_mask; | ||||
| SmallVector<bool> output_mask; | SmallVector<bool> output_mask; | ||||
| @@ -146,8 +146,8 @@ struct EncodedSubraph { | |||||
| return decoded_outputs; | return decoded_outputs; | ||||
| } | } | ||||
| static EncodedSubraph make(Subgraph graph) { | |||||
| EncodedSubraph result; | |||||
| static EncodedSubgraph make(Subgraph graph) { | |||||
| EncodedSubgraph result; | |||||
| result.input_mask = graph.gen_input_mask(); | result.input_mask = graph.gen_input_mask(); | ||||
| result.output_mask = graph.gen_output_mask(); | result.output_mask = graph.gen_output_mask(); | ||||
| graph.inputs = result.encode_inputs(graph.inputs); | graph.inputs = result.encode_inputs(graph.inputs); | ||||
| @@ -156,11 +156,11 @@ struct EncodedSubraph { | |||||
| return result; | return result; | ||||
| } | } | ||||
| static EncodedSubraph make_single( | |||||
| static EncodedSubgraph make_single( | |||||
| std::shared_ptr<OpDef> op, | std::shared_ptr<OpDef> op, | ||||
| SmallVector<bool> input_mask, | SmallVector<bool> input_mask, | ||||
| SmallVector<bool> output_mask) { | SmallVector<bool> output_mask) { | ||||
| EncodedSubraph result; | |||||
| EncodedSubgraph result; | |||||
| result.input_mask = input_mask; | result.input_mask = input_mask; | ||||
| result.output_mask = output_mask; | result.output_mask = output_mask; | ||||
| Subgraph::var_t last_var = 0; | Subgraph::var_t last_var = 0; | ||||
| @@ -24,7 +24,7 @@ apply_on_physical_tensor(const OpDef& def, | |||||
| std::tuple<SmallVector<LogicalTensorDesc>, bool> infer_output_attrs_fallible(const OpDef& def, | std::tuple<SmallVector<LogicalTensorDesc>, bool> infer_output_attrs_fallible(const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs); | const SmallVector<LogicalTensorDesc>& inputs); | ||||
| EncodedSubraph | |||||
| EncodedSubgraph | |||||
| make_backward_graph(const OpDef& def, | make_backward_graph(const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -35,7 +35,7 @@ apply_on_var_node( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const VarNodeArray& inputs); | const VarNodeArray& inputs); | ||||
| EncodedSubraph make_backward_graph( | |||||
| EncodedSubgraph make_backward_graph( | |||||
| const OpDef& def, | const OpDef& def, | ||||
| const SmallVector<LogicalTensorDesc>& inputs, | const SmallVector<LogicalTensorDesc>& inputs, | ||||
| const SmallVector<bool>& input_requires_grad, | const SmallVector<bool>& input_requires_grad, | ||||
| @@ -22,7 +22,7 @@ using namespace cg; | |||||
| using namespace imperative; | using namespace imperative; | ||||
| template <typename T> | template <typename T> | ||||
| T prepare_backward_graph_inputs(const EncodedSubraph& bg, const T& inputs, | |||||
| T prepare_backward_graph_inputs(const EncodedSubgraph& bg, const T& inputs, | |||||
| const T& outputs, const T& grads) { | const T& outputs, const T& grads) { | ||||
| T ret; | T ret; | ||||
| size_t i = 0; | size_t i = 0; | ||||
| @@ -143,7 +143,7 @@ LiteNetwork 主要为用户提供模型载入,运行等功能。使用的模 | |||||
| * CPU 基本模型载入运行的 example | * CPU 基本模型载入运行的 example | ||||
| ``` | ``` | ||||
| def test_network_basic(): | def test_network_basic(): | ||||
| source_dir = os.getenv("LITE_TEST_RESOUCE") | |||||
| source_dir = os.getenv("LITE_TEST_RESOURCE") | |||||
| input_data_path = os.path.join(source_dir, "input_data.npy") | input_data_path = os.path.join(source_dir, "input_data.npy") | ||||
| # read input to input_data | # read input to input_data | ||||
| input_data = np.load(input_data_path) | input_data = np.load(input_data_path) | ||||
| @@ -176,7 +176,7 @@ def test_network_basic(): | |||||
| * CUDA 上使用 device 内存作为模型输入,需要在构造 network 候配置 config 和 IO 信息 | * CUDA 上使用 device 内存作为模型输入,需要在构造 network 候配置 config 和 IO 信息 | ||||
| ``` | ``` | ||||
| def test_network_device_IO(): | def test_network_device_IO(): | ||||
| source_dir = os.getenv("LITE_TEST_RESOUCE") | |||||
| source_dir = os.getenv("LITE_TEST_RESOURCE") | |||||
| input_data_path = os.path.join(source_dir, "input_data.npy") | input_data_path = os.path.join(source_dir, "input_data.npy") | ||||
| model_path = os.path.join(source_dir, "shufflenet.mge") | model_path = os.path.join(source_dir, "shufflenet.mge") | ||||
| # read input to input_data | # read input to input_data | ||||
| @@ -18,7 +18,7 @@ set_log_level(2) | |||||
| class TestShuffleNet(unittest.TestCase): | class TestShuffleNet(unittest.TestCase): | ||||
| source_dir = os.getenv("LITE_TEST_RESOUCE") | |||||
| source_dir = os.getenv("LITE_TEST_RESOURCE") | |||||
| input_data_path = os.path.join(source_dir, "input_data.npy") | input_data_path = os.path.join(source_dir, "input_data.npy") | ||||
| correct_data_path = os.path.join(source_dir, "output_data.npy") | correct_data_path = os.path.join(source_dir, "output_data.npy") | ||||
| correct_data = np.load(correct_data_path).flatten() | correct_data = np.load(correct_data_path).flatten() | ||||
| @@ -52,7 +52,7 @@ def test_network_io(): | |||||
| class TestShuffleNet(unittest.TestCase): | class TestShuffleNet(unittest.TestCase): | ||||
| source_dir = os.getenv("LITE_TEST_RESOUCE") | |||||
| source_dir = os.getenv("LITE_TEST_RESOURCE") | |||||
| input_data_path = os.path.join(source_dir, "input_data.npy") | input_data_path = os.path.join(source_dir, "input_data.npy") | ||||
| correct_data_path = os.path.join(source_dir, "output_data.npy") | correct_data_path = os.path.join(source_dir, "output_data.npy") | ||||
| model_path = os.path.join(source_dir, "shufflenet.mge") | model_path = os.path.join(source_dir, "shufflenet.mge") | ||||
| @@ -33,7 +33,7 @@ def require_cuda(ngpu=1): | |||||
| class TestShuffleNetCuda(unittest.TestCase): | class TestShuffleNetCuda(unittest.TestCase): | ||||
| source_dir = os.getenv("LITE_TEST_RESOUCE") | |||||
| source_dir = os.getenv("LITE_TEST_RESOURCE") | |||||
| input_data_path = os.path.join(source_dir, "input_data.npy") | input_data_path = os.path.join(source_dir, "input_data.npy") | ||||
| correct_data_path = os.path.join(source_dir, "output_data.npy") | correct_data_path = os.path.join(source_dir, "output_data.npy") | ||||
| model_path = os.path.join(source_dir, "shufflenet.mge") | model_path = os.path.join(source_dir, "shufflenet.mge") | ||||