Skip to content

Commit

Permalink
C++ CLI app, added arguments to select a GPU to use
Browse files Browse the repository at this point in the history
  • Loading branch information
Const-me committed Mar 11, 2023
1 parent f9dcf6b commit 1b2aa95
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 2 deletions.
6 changes: 4 additions & 2 deletions Examples/main/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@ using namespace Whisper;

#define STREAM_AUDIO 1

static HRESULT loadWhisperModel( const wchar_t* path, iModel** pp )
static HRESULT loadWhisperModel( const wchar_t* path, const std::wstring& gpu, iModel** pp )
{
using namespace Whisper;
sModelSetup setup;
setup.impl = eModelImplementation::GPU;
if( !gpu.empty() )
setup.adapter = gpu.c_str();
return Whisper::loadModel( path, setup, nullptr, pp );
}

Expand Down Expand Up @@ -199,7 +201,7 @@ int wmain( int argc, wchar_t* argv[] )
}

ComLight::CComPtr<iModel> model;
HRESULT hr = loadWhisperModel( params.model.c_str(), &model );
HRESULT hr = loadWhisperModel( params.model.c_str(), params.gpu, &model );
if( FAILED( hr ) )
{
printError( "failed to load the model", hr );
Expand Down
24 changes: 24 additions & 0 deletions Examples/main/params.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include <algorithm>
#include <thread>
#include "miscUtils.h"
#include "../../Whisper/API/iContext.cl.h"

whisper_params::whisper_params()
{
Expand All @@ -27,6 +28,8 @@ void whisper_print_usage( int argc, wchar_t** argv, const whisper_params& params
fprintf( stderr, "\n" );
fprintf( stderr, "options:\n" );
fprintf( stderr, " -h, --help [default] show this help message and exit\n" );
fprintf( stderr, " -la, --list-adapters List graphic adapters and exit\n" );
fprintf( stderr, " -gpu, --use-gpu The graphic adapter to use for inference\n" );
fprintf( stderr, " -t N, --threads N [%-7d] number of threads to use during computation\n", params.n_threads );
fprintf( stderr, " -p N, --processors N [%-7d] number of processors to use during computation\n", params.n_processors );
fprintf( stderr, " -ot N, --offset-t N [%-7d] time offset in milliseconds\n", params.offset_t_ms );
Expand All @@ -51,6 +54,20 @@ void whisper_print_usage( int argc, wchar_t** argv, const whisper_params& params
fprintf( stderr, "\n" );
}

static void __stdcall pfnListAdapter( const wchar_t* name, void* )
{
wprintf( L"\"%s\"\n", name );
}

static void listGpus()
{
printf( " Available graphic adapters:\n" );
HRESULT hr = Whisper::listGPUs( &pfnListAdapter, nullptr );
if( SUCCEEDED( hr ) )
return;
printError( "Unable to enumerate GPUs", hr );
}

bool whisper_params::parse( int argc, wchar_t* argv[] )
{
for( int i = 1; i < argc; i++ )
Expand All @@ -69,6 +86,12 @@ bool whisper_params::parse( int argc, wchar_t* argv[] )
return false;
}

if( arg == L"-la" || arg == L"--list-adapters" )
{
listGpus();
return false;
}

else if( arg == L"-t" || arg == L"--threads" ) { n_threads = std::stoul( argv[ ++i ] ); }
else if( arg == L"-p" || arg == L"--processors" ) { n_processors = std::stoul( argv[ ++i ] ); }
else if( arg == L"-ot" || arg == L"--offset-t" ) { offset_t_ms = std::stoul( argv[ ++i ] ); }
Expand All @@ -90,6 +113,7 @@ bool whisper_params::parse( int argc, wchar_t* argv[] )
else if( arg == L"-l" || arg == L"--language" ) { language = utf8( argv[ ++i ] ); }
else if( arg == L"-m" || arg == L"--model" ) { model = argv[ ++i ]; }
else if( arg == L"-f" || arg == L"--file" ) { fname_inp.push_back( argv[ ++i ] ); }
else if( arg == L"-gpu" || arg == L"--use-gpu" ) { gpu = argv[ ++i ]; }
else
{
fprintf( stderr, "error: unknown argument: %S\n", arg.c_str() );
Expand Down
1 change: 1 addition & 0 deletions Examples/main/params.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ struct whisper_params

std::string language = "en";
std::wstring model = L"models/ggml-base.en.bin";
std::wstring gpu;
std::vector<std::wstring> fname_inp;

whisper_params();
Expand Down

0 comments on commit 1b2aa95

Please sign in to comment.