- WSL2 (Use whatever)
- Nvidia GPU drivers
- Micromamba / Miniconda
- tensorflow: 2.11.0
- cudnn: 8.4.1.50
- cudatoolkit: 11.8.0
- tensorrt: 8.5.3.1
- Create .mambarc / .condarc in $HOME
Required if you don't want to type '-c conda-forge' during installation
echo "channels:\n - conda-forge" > $HOME/.mambarc
- Create tf environment & complete conda-forge installs
micromamba env create --name tf python=3.10
micromamba activate tf
micromamba install cudatoolkit cudnn
- Set LD_LIBRARY_PATH
The first export prevents you from having to exit and re-enter the environment
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/
mkdir -p $CONDA_PREFIX/etc/conda/activate.d
echo 'export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/' > $CONDA_PREFIX/etc/conda/activate.d/env_vars.sh
- Pip installations
pip install tensorflow
pip install tensorrt
- Symlink tensorrt libs to $CONDA_PREFIX/lib/
cd $CONDA_PREFIX/lib
ln -s $CONDA_PREFIX/lib/python3.1/site-packages/tensorrt/libnvinfer.so.8 libnvinfer.so.7
ln -s $CONDA_PREFIX/lib/python3.1/site-packages/tensorrt/libnvinfer_plugin.so.8 libnvinfer_plugin.so.7