Skip to content

Commit

Permalink
#42 Add FIW with deepface analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
matheuslevi11 committed Mar 10, 2024
1 parent 238ddac commit c15f580
Show file tree
Hide file tree
Showing 4 changed files with 26,178 additions and 0 deletions.
165 changes: 165 additions & 0 deletions notebooks/apply deepface.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from deepface import DeepFace\n",
"import pandas as pd\n",
"import os\n",
"\n",
"\n",
"os.chdir('../../rfiw2021')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"df = pd.read_csv('Track1/sample0/val.txt', sep=\" \", header=None)\n",
"df.columns = ['index', 'face1_path', 'face2_path', 'kinship', 'label']\n",
"df.drop('index', axis=1, inplace=True) "
]
},
{
"cell_type": "code",
"execution_count": 112,
"metadata": {},
"outputs": [],
"source": [
"from tqdm import tqdm\n",
"\n",
"def analyze_batch(df, b, name):\n",
" base_file = 'Track1/'\n",
" \n",
" age = []\n",
" gender = []\n",
" race = []\n",
"\n",
" for i, data in tqdm(df.iterrows(), total=len(df), desc=f\"Processing batch {b}\"):\n",
"\n",
" obj = DeepFace.analyze(img_path = base_file+data['face'], enforce_detection=False, silent=True,\n",
" actions = ['age', 'gender', 'race']\n",
" )[0]\n",
"\n",
" age.append(obj['age'])\n",
" gender.append(obj['dominant_gender'])\n",
" race.append(obj['dominant_race'])\n",
"\n",
"\n",
" df['age'] = age\n",
" df['gender'] = gender\n",
" df['race'] = race\n",
"\n",
" df.to_csv(f'fiw/{name}-{b}.csv', index=False)"
]
},
{
"cell_type": "code",
"execution_count": 115,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Processing batch 0: 100%|██████████| 128/128 [00:50<00:00, 2.55it/s]\n",
"Processing batch 1: 100%|██████████| 128/128 [00:49<00:00, 2.58it/s]\n",
"Processing batch 2: 100%|██████████| 128/128 [00:51<00:00, 2.49it/s]\n",
"Processing batch 3: 100%|██████████| 128/128 [00:51<00:00, 2.48it/s]\n",
"Processing batch 4: 100%|██████████| 128/128 [00:50<00:00, 2.52it/s]\n",
"Processing batch 5: 100%|██████████| 128/128 [00:48<00:00, 2.63it/s]\n",
"Processing batch 6: 100%|██████████| 128/128 [00:51<00:00, 2.46it/s]\n",
"Processing batch 7: 100%|██████████| 128/128 [00:51<00:00, 2.49it/s]\n",
"Processing batch 8: 100%|██████████| 128/128 [00:48<00:00, 2.62it/s]\n",
"Processing batch 9: 100%|██████████| 128/128 [00:47<00:00, 2.71it/s]\n",
"Processing batch 10: 100%|██████████| 128/128 [00:49<00:00, 2.58it/s]\n",
"Processing batch 11: 100%|██████████| 128/128 [00:49<00:00, 2.61it/s]\n",
"Processing batch 12: 100%|██████████| 128/128 [00:49<00:00, 2.59it/s]\n",
"Processing batch 13: 100%|██████████| 128/128 [00:48<00:00, 2.63it/s]\n",
"Processing batch 14: 100%|██████████| 128/128 [00:51<00:00, 2.47it/s]\n",
"Processing batch 15: 100%|██████████| 128/128 [00:49<00:00, 2.59it/s]\n",
"Processing batch 16: 100%|██████████| 128/128 [00:48<00:00, 2.64it/s]\n",
"Processing batch 17: 100%|██████████| 128/128 [00:47<00:00, 2.69it/s]\n",
"Processing batch 18: 100%|██████████| 128/128 [00:50<00:00, 2.55it/s]\n",
"Processing batch 19: 100%|██████████| 128/128 [00:51<00:00, 2.48it/s]\n",
"Processing batch 20: 100%|██████████| 128/128 [00:49<00:00, 2.61it/s]\n",
"Processing batch 21: 100%|██████████| 128/128 [00:48<00:00, 2.66it/s]\n",
"Processing batch 22: 100%|██████████| 128/128 [00:50<00:00, 2.54it/s]\n",
"Processing batch 23: 100%|██████████| 128/128 [00:50<00:00, 2.55it/s]\n",
"Processing batch 24: 100%|██████████| 128/128 [00:49<00:00, 2.57it/s]\n",
"Processing batch 25: 100%|██████████| 128/128 [00:48<00:00, 2.66it/s]\n",
"Processing batch 26: 100%|██████████| 128/128 [00:51<00:00, 2.50it/s]\n",
"Processing batch 27: 100%|██████████| 128/128 [00:53<00:00, 2.39it/s]\n",
"Processing batch 28: 100%|██████████| 128/128 [00:49<00:00, 2.56it/s]\n",
"Processing batch 29: 100%|██████████| 128/128 [00:50<00:00, 2.55it/s]\n",
"Processing batch 30: 100%|██████████| 128/128 [00:49<00:00, 2.57it/s]\n",
"Processing batch 31: 100%|██████████| 128/128 [00:49<00:00, 2.59it/s]\n",
"Processing batch 32: 100%|██████████| 128/128 [00:52<00:00, 2.45it/s]\n",
"Processing batch 33: 100%|██████████| 128/128 [00:53<00:00, 2.38it/s]\n",
"Processing batch 34: 100%|██████████| 128/128 [00:54<00:00, 2.33it/s]\n",
"Processing batch 35: 100%|██████████| 128/128 [00:55<00:00, 2.32it/s]\n",
"Processing batch 36: 100%|██████████| 128/128 [00:55<00:00, 2.29it/s]\n",
"Processing batch 37: 100%|██████████| 128/128 [00:57<00:00, 2.21it/s]\n",
"Processing batch 38: 100%|██████████| 128/128 [00:59<00:00, 2.15it/s]\n",
"Processing batch 39: 100%|██████████| 128/128 [00:59<00:00, 2.16it/s]\n",
"Processing batch 40: 100%|██████████| 106/106 [00:48<00:00, 2.17it/s]\n"
]
}
],
"source": [
"batch_size = 128\n",
"\n",
"num_batches = len(df) // batch_size + 1 if len(df) % batch_size != 0 else len(df) // batch_size\n",
"\n",
"for i in range(num_batches):\n",
" start_idx = i * batch_size\n",
" end_idx = (i + 1) * batch_size\n",
" batch_df = df.iloc[start_idx:end_idx]\n",
"\n",
" if os.path.exists(f'fiw/test-{i}.csv'):\n",
" continue\n",
"\n",
" analyze_batch(batch_df, i, 'test') "
]
},
{
"cell_type": "code",
"execution_count": 117,
"metadata": {},
"outputs": [],
"source": [
"dfs = []\n",
"for i in range(40):\n",
" dfs.append(pd.read_csv(f'fiw/test-{i}.csv'))\n",
"\n",
"df = pd.concat(dfs).to_csv('fiw/test_with_deepface.csv', index=False)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "research",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.18"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit c15f580

Please sign in to comment.