Skip to content

Commit

Permalink
Add back test semicolon and format
Browse files Browse the repository at this point in the history
  • Loading branch information
fpetrini15 committed Aug 25, 2023
1 parent 6604d84 commit f3b7b4b
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 30 deletions.
20 changes: 9 additions & 11 deletions include/triton/core/tritonbackend.h
Original file line number Diff line number Diff line change
Expand Up @@ -1588,11 +1588,11 @@ TRITONBACKEND_ISPEC TRITONSERVER_Error* TRITONBACKEND_ModelBatchFinalize(
/// \param userp The user-specified value associated with the buffer
/// in TRITONSERVER_ResponseAllocatorAllocFn_t.
/// \return a TRITONSERVER_Error indicating success or failure.
TRITONBACKEND_ISPEC TRITONSERVER_Error* TRITONBACKEND_InferenceResponseOutputByName(
TRITONBACKEND_Response* response,
const char* name, TRITONSERVER_DataType* datatype,
const int64_t** shape, uint64_t* dim_count, const void** base,
size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
TRITONBACKEND_ISPEC TRITONSERVER_Error*
TRITONBACKEND_InferenceResponseOutputByName(
TRITONBACKEND_Response* response, const char* name,
TRITONSERVER_DataType* datatype, const int64_t** shape, uint64_t* dim_count,
const void** base, size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
int64_t* memory_type_id, void** userp);

/// Get all information about an output tensor by index. The tensor data is
Expand All @@ -1617,12 +1617,10 @@ TRITONBACKEND_ISPEC TRITONSERVER_Error* TRITONBACKEND_InferenceResponseOutputByN
/// \param userp The user-specified value associated with the buffer
/// in TRITONSERVER_ResponseAllocatorAllocFn_t.
/// \return a TRITONSERVER_Error indicating success or failure.
TRITONSERVER_DECLSPEC TRITONSERVER_Error*
TRITONBACKEND_InferenceResponseOutput(
TRITONBACKEND_Response* response,
const uint32_t index, const char** name, TRITONSERVER_DataType* datatype,
const int64_t** shape, uint64_t* dim_count, const void** base,
size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
TRITONSERVER_DECLSPEC TRITONSERVER_Error* TRITONBACKEND_InferenceResponseOutput(
TRITONBACKEND_Response* response, const uint32_t index, const char** name,
TRITONSERVER_DataType* datatype, const int64_t** shape, uint64_t* dim_count,
const void** base, size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
int64_t* memory_type_id, void** userp);

#ifdef __cplusplus
Expand Down
40 changes: 21 additions & 19 deletions src/backend_model.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1753,12 +1753,11 @@ TRITONBACKEND_BackendAttributeSetParallelModelInstanceLoading(
return nullptr;
}

TRITONAPI_DECLSPEC TRITONSERVER_Error*
TRITONAPI_DECLSPEC TRITONSERVER_Error*
TRITONBACKEND_InferenceResponseOutputByName(
TRITONBACKEND_Response* response,
const char* name, TRITONSERVER_DataType* datatype,
const int64_t** shape, uint64_t* dim_count, const void** base,
size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
TRITONBACKEND_Response* response, const char* name,
TRITONSERVER_DataType* datatype, const int64_t** shape, uint64_t* dim_count,
const void** base, size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
int64_t* memory_type_id, void** userp)
{
InferenceResponse* tr = reinterpret_cast<InferenceResponse*>(response);
Expand All @@ -1773,28 +1772,29 @@ TRITONBACKEND_InferenceResponseOutputByName(
const std::vector<int64_t>& oshape = outputs[idx].Shape();
*shape = &oshape[0];
*dim_count = oshape.size();
Status status = outputs[idx].DataBuffer(base, byte_size, memory_type, memory_type_id, userp);
Status status = outputs[idx].DataBuffer(
base, byte_size, memory_type, memory_type_id, userp);
if (!status.IsOk()) {
*base = nullptr;
*byte_size = 0;
return TRITONSERVER_ErrorNew(
StatusCodeToTritonCode(status.StatusCode()), status.Message().c_str());
StatusCodeToTritonCode(status.StatusCode()),
status.Message().c_str());
}
return nullptr; // success
return nullptr; // success
}
}
return TRITONSERVER_ErrorNew(
TRITONSERVER_ERROR_NOT_FOUND,
("Output name " + output_name + "not found.").c_str());

TRITONSERVER_ERROR_NOT_FOUND,
("Output name " + output_name + "not found.").c_str());
}

TRITONAPI_DECLSPEC TRITONSERVER_Error*
TRITONBACKEND_InferenceResponseOutput(
TRITONBACKEND_Response* response, const uint32_t index,
const char** name, TRITONSERVER_DataType* datatype, const int64_t** shape,
uint64_t* dim_count, const void** base, size_t* byte_size,
TRITONSERVER_MemoryType* memory_type, int64_t* memory_type_id, void** userp)
TRITONBACKEND_Response* response, const uint32_t index, const char** name,
TRITONSERVER_DataType* datatype, const int64_t** shape, uint64_t* dim_count,
const void** base, size_t* byte_size, TRITONSERVER_MemoryType* memory_type,
int64_t* memory_type_id, void** userp)
{
InferenceResponse* tr = reinterpret_cast<InferenceResponse*>(response);

Expand All @@ -1803,19 +1803,21 @@ TRITONBACKEND_InferenceResponseOutput(
return TRITONSERVER_ErrorNew(
TRITONSERVER_ERROR_INVALID_ARG,
("out of bounds index " + std::to_string(index) +
std::string(": response has ") + std::to_string(outputs.size()) +
" outputs").c_str());
std::string(": response has ") + std::to_string(outputs.size()) +
" outputs")
.c_str());
}

const InferenceResponse::Output& output = outputs[index];

*name = output.Name().c_str()
*name = output.Name().c_str();
*datatype = DataTypeToTriton(output.DType());

const std::vector<int64_t>& oshape = output.Shape();
*shape = &oshape[0];
*dim_count = oshape.size();
Status status = output.DataBuffer(base, byte_size, memory_type, memory_type_id, userp);
Status status =
output.DataBuffer(base, byte_size, memory_type, memory_type_id, userp);
if (!status.IsOk()) {
*base = nullptr;
*byte_size = 0;
Expand Down

0 comments on commit f3b7b4b

Please sign in to comment.