Skip to content

Commit

Permalink
Add MobileOne model. (huggingface#1595)
Browse files Browse the repository at this point in the history
* Add MobileOne model.

* Clippy fixes

* Remove a comment.

---------

Co-authored-by: laurent <[email protected]>
  • Loading branch information
janimo and LaurentMazare committed Jan 16, 2024
1 parent 7e3349d commit 5270224
Show file tree
Hide file tree
Showing 4 changed files with 452 additions and 0 deletions.
22 changes: 22 additions & 0 deletions candle-examples/examples/mobileone/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# candle-mobileone

[MobileOne: An Improved One millisecond Mobile Backbone](https://arxiv.org/abs/2206.04040).

This candle implementation uses a pre-trained MobileOne network for inference. The
classification head has been trained on the ImageNet dataset and returns the
probabilities for the top-5 classes.

## Running an example

```
$ cargo run --example mobileone --release -- --image candle-examples/examples/yolo-v8/assets/bike.jpg --which s2
loaded image Tensor[dims 3, 224, 224; f32]
model built
mountain bike, all-terrain bike, off-roader: 79.33%
bicycle-built-for-two, tandem bicycle, tandem: 15.32%
crash helmet : 2.58%
unicycle, monocycle : 1.70%
alp : 0.21%
```
96 changes: 96 additions & 0 deletions candle-examples/examples/mobileone/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
#[cfg(feature = "mkl")]
extern crate intel_mkl_src;

#[cfg(feature = "accelerate")]
extern crate accelerate_src;

use clap::{Parser, ValueEnum};

use candle::{DType, IndexOp, D};
use candle_nn::{Module, VarBuilder};
use candle_transformers::models::mobileone;

#[derive(Clone, Copy, Debug, ValueEnum)]
enum Which {
S0,
S1,
S2,
S3,
S4,
}

impl Which {
fn model_filename(&self) -> String {
let name = match self {
Self::S0 => "s0",
Self::S1 => "s1",
Self::S2 => "s2",
Self::S3 => "s3",
Self::S4 => "s4",
};
format!("timm/mobileone_{}.apple_in1k", name)
}

fn config(&self) -> mobileone::Config {
match self {
Self::S0 => mobileone::Config::s0(),
Self::S1 => mobileone::Config::s1(),
Self::S2 => mobileone::Config::s2(),
Self::S3 => mobileone::Config::s3(),
Self::S4 => mobileone::Config::s4(),
}
}
}

#[derive(Parser)]
struct Args {
#[arg(long)]
model: Option<String>,

#[arg(long)]
image: String,

/// Run on CPU rather than on GPU.
#[arg(long)]
cpu: bool,

#[arg(value_enum, long, default_value_t=Which::S0)]
which: Which,
}

pub fn main() -> anyhow::Result<()> {
let args = Args::parse();

let device = candle_examples::device(args.cpu)?;

let image = candle_examples::imagenet::load_image224(args.image)?;
println!("loaded image {image:?}");

let model_file = match args.model {
None => {
let model_name = args.which.model_filename();
let api = hf_hub::api::sync::Api::new()?;
let api = api.model(model_name);
api.get("model.safetensors")?
}
Some(model) => model.into(),
};

let vb = unsafe { VarBuilder::from_mmaped_safetensors(&[model_file], DType::F32, &device)? };
let model = mobileone::mobileone(&args.which.config(), 1000, vb)?;
println!("model built");
let logits = model.forward(&image.unsqueeze(0)?)?;
let prs = candle_nn::ops::softmax(&logits, D::Minus1)?
.i(0)?
.to_vec1::<f32>()?;
let mut prs = prs.iter().enumerate().collect::<Vec<_>>();
prs.sort_by(|(_, p1), (_, p2)| p2.total_cmp(p1));
for &(category_idx, pr) in prs.iter().take(5) {
println!(
"{:24}: {:.2}%",
candle_examples::imagenet::CLASSES[category_idx],
100. * pr
);
}
Ok(())
}
Loading

0 comments on commit 5270224

Please sign in to comment.