@@ -316,19 +316,19 @@ TritonModel::ResolveBackendConfigs(
316
316
std::map<std::string, std::string> lconfig;
317
317
if (global_itr != backend_cmdline_config_map.end ()) {
318
318
// Accumulate all global settings
319
- for (auto & setting : global_itr->second ){
319
+ for (auto & setting : global_itr->second ) {
320
320
lconfig[setting.first ] = setting.second ;
321
321
}
322
322
}
323
323
if (specific_itr != backend_cmdline_config_map.end ()) {
324
324
// Accumulate backend specific settings and override
325
- // global settings with specific configs if needed
326
- for (auto & setting : specific_itr->second ){
325
+ // global settings with specific configs if needed
326
+ for (auto & setting : specific_itr->second ) {
327
327
lconfig[setting.first ] = setting.second ;
328
328
}
329
- }
330
- for (auto & final_setting : lconfig){
331
- config.emplace_back (final_setting);
329
+ }
330
+ for (auto & final_setting : lconfig) {
331
+ config.emplace_back (final_setting);
332
332
}
333
333
334
334
return Status::Success;
@@ -591,9 +591,11 @@ TritonModel::SetBatchingStrategy(const std::string& batch_libpath)
591
591
TRITONSERVER_Error* err = batcher_init_fn_ (
592
592
Batcher (), reinterpret_cast <TRITONBACKEND_Model*>(this ));
593
593
if (err) {
594
- auto err_message = TRITONSERVER_ErrorMessage (err);
594
+ auto status = Status (
595
+ TritonCodeToStatusCode (TRITONSERVER_ErrorCode (err)),
596
+ TRITONSERVER_ErrorMessage (err));
595
597
TRITONSERVER_ErrorDelete (err);
596
- return Status (Status::Code::INVALID_ARG, err_message) ;
598
+ return status ;
597
599
}
598
600
}
599
601
@@ -1312,8 +1314,9 @@ TRITONBACKEND_RequestParameter(
1312
1314
return TRITONSERVER_ErrorNew (
1313
1315
TRITONSERVER_ERROR_INVALID_ARG,
1314
1316
(" out of bounds index " + std::to_string (index ) +
1315
- std::string (" : request has " ) + std::to_string (parameters.size ()) +
1316
- " parameters" ).c_str ());
1317
+ std::string (" : request has " ) + std::to_string (parameters.size ()) +
1318
+ " parameters" )
1319
+ .c_str ());
1317
1320
}
1318
1321
1319
1322
const InferenceParameter& param = parameters[index ];
@@ -1405,7 +1408,8 @@ TRITONBACKEND_InputBuffer(
1405
1408
InferenceRequest::Input* ti =
1406
1409
reinterpret_cast <InferenceRequest::Input*>(input);
1407
1410
Status status = ti->DataBuffer (
1408
- index , buffer, reinterpret_cast <size_t *>(buffer_byte_size), memory_type, memory_type_id);
1411
+ index , buffer, reinterpret_cast <size_t *>(buffer_byte_size), memory_type,
1412
+ memory_type_id);
1409
1413
if (!status.IsOk ()) {
1410
1414
*buffer = nullptr ;
1411
1415
*buffer_byte_size = 0 ;
@@ -1445,10 +1449,11 @@ TRITONBACKEND_InputBufferForHostPolicy(
1445
1449
Status status =
1446
1450
(host_policy_name == nullptr )
1447
1451
? ti->DataBuffer (
1448
- index , buffer, reinterpret_cast <size_t *>(buffer_byte_size), memory_type, memory_type_id)
1452
+ index , buffer, reinterpret_cast <size_t *>(buffer_byte_size),
1453
+ memory_type, memory_type_id)
1449
1454
: ti->DataBufferForHostPolicy (
1450
- index , buffer, reinterpret_cast <size_t *>(buffer_byte_size), memory_type, memory_type_id,
1451
- host_policy_name);
1455
+ index , buffer, reinterpret_cast <size_t *>(buffer_byte_size),
1456
+ memory_type, memory_type_id, host_policy_name);
1452
1457
if (!status.IsOk ()) {
1453
1458
*buffer = nullptr ;
1454
1459
*buffer_byte_size = 0 ;
0 commit comments