Skip to content

Commit

Permalink
change the function in op_teller, test=release/1.7 (#22834)
Browse files Browse the repository at this point in the history
* change the function in op_teller, test=develop

* correct the commit-id, test=develop
  • Loading branch information
Shixiaowei02 committed Mar 9, 2020
1 parent 7a92e75 commit 2a792de
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 4 deletions.
2 changes: 1 addition & 1 deletion cmake/external/lite.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ if (NOT LITE_SOURCE_DIR OR NOT LITE_BINARY_DIR)
${LITE_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS}
GIT_REPOSITORY "https://github.com/PaddlePaddle/Paddle-Lite.git"
GIT_TAG 922ace19a45f30075618f71428523e7a2d5898d6
GIT_TAG 0f875ef367bd2dbfa2e557eb2a2fc841bacdf6cf
PREFIX ${LITE_SOURCES_DIR}
UPDATE_COMMAND ""
BUILD_COMMAND ${LITE_BUILD_COMMAND}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/lite/op_teller.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ namespace lite {
struct SimpleOpTeller : public Teller {
SimpleOpTeller() {
const std::map<std::string, std::string>& op2path =
OpKernelInfoCollector::Global().GetOp2PathDict();
paddle::lite::GetOp2PathDict();
auto is_non_inst = [](const std::string& op) -> bool {
const std::vector<std::string> ops = {"feed", "fetch", "while"};
return std::find(ops.begin(), ops.end(), op) != ops.end();
Expand Down
6 changes: 6 additions & 0 deletions paddle/fluid/inference/lite/tensor_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,10 @@ void TensorCopyAsync(paddle::lite::Tensor* dst, const framework::LoDTensor& src,
dst->Resize(framework::vectorize(src.dims()));
const void* src_data = src.data<void>();
void* dst_data = dst->mutable_data(bytes);
VLOG(3) << "[CopyAsync fluid -> lite] Bytes = " << bytes << ", src = " << &src
<< ", dst = " << dst << ", src_type = " << src.type();
MemoryCopyAsync(dst_place, dst_data, src_place, src_data, bytes, ctx);
VLOG(3) << "[Lite memory size] Bytes = " << dst->memory_size();
}

template <>
Expand All @@ -172,7 +175,10 @@ void TensorCopyAsync(framework::LoDTensor* dst, const paddle::lite::Tensor& src,
const void* src_data = src.raw_data();
// When Lite is ready, the source type needs to be modified here.
void* dst_data = dst->mutable_data(dst_place, dst->type());
VLOG(3) << "[CopyAsync lite -> fluid] Bytes = " << bytes << ", src = " << &src
<< ", dst = " << dst << ", src_type = " << dst->type();
MemoryCopyAsync(dst_place, dst_data, src_place, src_data, bytes, ctx);
VLOG(3) << "[Lite memory size] Bytes = " << src.memory_size();
}

} // namespace utils
Expand Down
6 changes: 4 additions & 2 deletions paddle/fluid/operators/lite/lite_engine_op.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ class LiteEngineOp : public framework::OperatorBase {
inference::analysis::GetFromScope<framework::LoDTensor>(scope,
in_names_[i]);
paddle::lite::Tensor *dst_t = engine_->GetInput(i);
VLOG(3) << "fluid -> lite: " << in_names_[i];
VLOG(3) << "[Copy] fluid -> lite (" << in_names_[i] << " -> "
<< engine_->GetInputNames()[i] << ")";
inference::lite::utils::TensorCopyAsync(dst_t, src_t, *ctx);
}
#ifdef PADDLE_WITH_CUDA
Expand All @@ -94,7 +95,8 @@ class LiteEngineOp : public framework::OperatorBase {
framework::LoDTensor *dst_t =
&inference::analysis::GetFromScope<framework::LoDTensor>(
scope, out_names_[i]);
VLOG(3) << "lite -> fluid: " << out_names_[i];
VLOG(3) << "[Copy] lite -> fluid (" << out_names_[i] << " -> "
<< engine_->GetOutputNames()[i] << ")";
inference::lite::utils::TensorCopyAsync(dst_t, src_t, *ctx);
}
#ifdef PADDLE_WITH_CUDA
Expand Down

0 comments on commit 2a792de

Please sign in to comment.