Skip to content

Commit

Permalink
Revert "support running test_mobile_profiler with buck1/buck2 and OSS (
Browse files Browse the repository at this point in the history
…pytorch#89001)"

This reverts commit 3b33a27.

Reverted pytorch#89001 on behalf of https://github.com/kit1980 due to Broke trunk / macos-12-py3-x86-64-lite-interpreter / build
  • Loading branch information
pytorchmergebot committed Nov 14, 2022
1 parent 3c3bd55 commit c53a5ac
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 34 deletions.
1 change: 0 additions & 1 deletion test/cpp/lite_interpreter_runtime/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ target_link_libraries(test_lite_interpreter_runtime PRIVATE torch gtest backend_

if(LINUX)
target_link_libraries(test_lite_interpreter_runtime PRIVATE "-Wl,--no-as-needed,$<TARGET_FILE:backend_with_compiler_runtime>,--as-needed")
target_link_libraries(test_lite_interpreter_runtime PRIVATE stdc++fs)
endif()

if(INSTALL_TEST)
Expand Down
19 changes: 0 additions & 19 deletions test/cpp/lite_interpreter_runtime/resources.h

This file was deleted.

34 changes: 20 additions & 14 deletions test/cpp/lite_interpreter_runtime/test_mobile_profiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@

#include <torch/csrc/profiler/events.h>

#include "test/cpp/lite_interpreter_runtime/resources.h"

#ifdef EDGE_PROFILER_USE_KINETO
namespace torch {
namespace jit {
Expand Down Expand Up @@ -44,15 +42,16 @@ bool checkMetaData(
} // namespace

TEST(MobileProfiler, ModuleHierarchy) {
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/to_be_profiled_module.ptl");
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("to_be_profiled_module.ptl");

std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
inputs.emplace_back(at::rand({64, 64}));
std::string trace_file_name("/tmp/test_trace.trace");

mobile::Module bc = _load_for_mobile(testModelFile.string());
mobile::Module bc = _load_for_mobile(testModelFile);
{
KinetoEdgeCPUProfiler profiler(
bc,
Expand Down Expand Up @@ -96,15 +95,16 @@ TEST(MobileProfiler, ModuleHierarchy) {
}

TEST(MobileProfiler, Backend) {
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/test_backend_for_profiling.ptl");
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("test_backend_for_profiling.ptl");

std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
inputs.emplace_back(at::rand({64, 64}));
std::string trace_file_name("/tmp/test_trace_backend.trace");

mobile::Module bc = _load_for_mobile(testModelFile.string());
mobile::Module bc = _load_for_mobile(testModelFile);
{
KinetoEdgeCPUProfiler profiler(
bc,
Expand All @@ -130,15 +130,16 @@ TEST(MobileProfiler, Backend) {
}

TEST(MobileProfiler, BackendMemoryEvents) {
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/test_backend_for_profiling.ptl");
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("test_backend_for_profiling.ptl");

std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
inputs.emplace_back(at::rand({64, 64}));
std::string trace_file_name("/tmp/test_trace_backend_memory.trace");

mobile::Module bc = _load_for_mobile(testModelFile.string());
mobile::Module bc = _load_for_mobile(testModelFile);
{
mobile::KinetoEdgeCPUProfiler profiler(
bc,
Expand All @@ -162,8 +163,13 @@ TEST(MobileProfiler, BackendMemoryEvents) {
}

TEST(MobileProfiler, ProfilerEvent) {
auto testModelFile = torch::testing::getResourcePath(
"test/cpp/lite_interpreter_runtime/test_backend_for_profiling.ptl");
/*
* TODO: Using __FILE__ is unreliable e.g. it fails to resolve correctly when
* using buck2, works ok with buck1
*/
std::string filePath(__FILE__);
auto testModelFile = filePath.substr(0, filePath.find_last_of("/\\") + 1);
testModelFile.append("test_backend_for_profiling.ptl");

std::vector<IValue> inputs;
inputs.emplace_back(at::rand({64, 64}));
Expand All @@ -174,7 +180,7 @@ TEST(MobileProfiler, ProfilerEvent) {
torch::profiler::ProfilerPerfEvents.begin(),
torch::profiler::ProfilerPerfEvents.end());

mobile::Module bc = _load_for_mobile(testModelFile.string());
mobile::Module bc = _load_for_mobile(testModelFile);
{
// Bail if something goes wrong here
try {
Expand Down

0 comments on commit c53a5ac

Please sign in to comment.