From bf20f7294ab75d69dd03383c054a079cb47fc022 Mon Sep 17 00:00:00 2001 From: zyddnys Date: Sun, 11 Oct 2020 17:14:39 -0400 Subject: [PATCH] first commit --- README.md | 11 + RegDanbooru2019_8G.py | 33 + RegNetY-8.0GF_dds_8gpu.yaml | 27 + RegNetY_8G.py | 67 + danbooru_labels.txt | 4096 +++++++++++++++++ pycls/__init__.py | 0 pycls/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 132 bytes pycls/core/__init__.py | 0 .../core/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 137 bytes pycls/core/__pycache__/config.cpython-37.pyc | Bin 0 -> 4257 bytes pycls/core/__pycache__/losses.cpython-37.pyc | Bin 0 -> 755 bytes .../__pycache__/model_builder.cpython-37.pyc | Bin 0 -> 1317 bytes pycls/core/config.py | 410 ++ pycls/core/losses.py | 28 + pycls/core/model_builder.py | 50 + pycls/core/optimizer.py | 79 + pycls/datasets/__init__.py | 0 .../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 141 bytes .../__pycache__/cifar10.cpython-37.pyc | Bin 0 -> 2902 bytes .../__pycache__/imagenet.cpython-37.pyc | Bin 0 -> 3347 bytes .../__pycache__/loader.cpython-37.pyc | Bin 0 -> 2056 bytes .../datasets/__pycache__/paths.cpython-37.pyc | Bin 0 -> 815 bytes .../__pycache__/transforms.cpython-37.pyc | Bin 0 -> 3422 bytes pycls/datasets/cifar10.py | 83 + pycls/datasets/imagenet.py | 108 + pycls/datasets/loader.py | 80 + pycls/datasets/paths.py | 35 + pycls/datasets/transforms.py | 108 + pycls/models/__init__.py | 0 .../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 139 bytes .../models/__pycache__/anynet.cpython-37.pyc | Bin 0 -> 13481 bytes .../models/__pycache__/effnet.cpython-37.pyc | Bin 0 -> 7300 bytes .../models/__pycache__/regnet.cpython-37.pyc | Bin 0 -> 3809 bytes .../models/__pycache__/resnet.cpython-37.pyc | Bin 0 -> 9154 bytes pycls/models/anynet.py | 380 ++ pycls/models/effnet.py | 235 + pycls/models/regnet.py | 86 + pycls/models/resnet.py | 275 ++ pycls/utils/__init__.py | 0 .../utils/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 138 bytes .../__pycache__/benchmark.cpython-37.pyc | Bin 0 -> 1877 bytes .../__pycache__/checkpoint.cpython-37.pyc | Bin 0 -> 2798 bytes .../__pycache__/distributed.cpython-37.pyc | Bin 0 -> 1883 bytes .../__pycache__/error_handler.cpython-37.pyc | Bin 0 -> 1945 bytes pycls/utils/__pycache__/io.cpython-37.pyc | Bin 0 -> 2286 bytes .../utils/__pycache__/logging.cpython-37.pyc | Bin 0 -> 3790 bytes pycls/utils/__pycache__/meters.cpython-37.pyc | Bin 0 -> 7415 bytes .../utils/__pycache__/metrics.cpython-37.pyc | Bin 0 -> 3724 bytes .../multiprocessing.cpython-37.pyc | Bin 0 -> 1234 bytes pycls/utils/__pycache__/net.cpython-37.pyc | Bin 0 -> 3408 bytes pycls/utils/__pycache__/timer.cpython-37.pyc | Bin 0 -> 1031 bytes pycls/utils/benchmark.py | 89 + pycls/utils/checkpoint.py | 91 + pycls/utils/distributed.py | 61 + pycls/utils/error_handler.py | 59 + pycls/utils/io.py | 90 + pycls/utils/logging.py | 100 + pycls/utils/lr_policy.py | 47 + pycls/utils/meters.py | 239 + pycls/utils/metrics.py | 104 + pycls/utils/multiprocessing.py | 57 + pycls/utils/net.py | 94 + pycls/utils/plotting.py | 132 + pycls/utils/timer.py | 35 + test.py | 78 + 65 files changed, 7467 insertions(+) create mode 100644 README.md create mode 100644 RegDanbooru2019_8G.py create mode 100644 RegNetY-8.0GF_dds_8gpu.yaml create mode 100644 RegNetY_8G.py create mode 100644 danbooru_labels.txt create mode 100644 pycls/__init__.py create mode 100644 pycls/__pycache__/__init__.cpython-37.pyc create mode 100644 pycls/core/__init__.py create mode 100644 pycls/core/__pycache__/__init__.cpython-37.pyc create mode 100644 pycls/core/__pycache__/config.cpython-37.pyc create mode 100644 pycls/core/__pycache__/losses.cpython-37.pyc create mode 100644 pycls/core/__pycache__/model_builder.cpython-37.pyc create mode 100644 pycls/core/config.py create mode 100644 pycls/core/losses.py create mode 100644 pycls/core/model_builder.py create mode 100644 pycls/core/optimizer.py create mode 100644 pycls/datasets/__init__.py create mode 100644 pycls/datasets/__pycache__/__init__.cpython-37.pyc create mode 100644 pycls/datasets/__pycache__/cifar10.cpython-37.pyc create mode 100644 pycls/datasets/__pycache__/imagenet.cpython-37.pyc create mode 100644 pycls/datasets/__pycache__/loader.cpython-37.pyc create mode 100644 pycls/datasets/__pycache__/paths.cpython-37.pyc create mode 100644 pycls/datasets/__pycache__/transforms.cpython-37.pyc create mode 100644 pycls/datasets/cifar10.py create mode 100644 pycls/datasets/imagenet.py create mode 100644 pycls/datasets/loader.py create mode 100644 pycls/datasets/paths.py create mode 100644 pycls/datasets/transforms.py create mode 100644 pycls/models/__init__.py create mode 100644 pycls/models/__pycache__/__init__.cpython-37.pyc create mode 100644 pycls/models/__pycache__/anynet.cpython-37.pyc create mode 100644 pycls/models/__pycache__/effnet.cpython-37.pyc create mode 100644 pycls/models/__pycache__/regnet.cpython-37.pyc create mode 100644 pycls/models/__pycache__/resnet.cpython-37.pyc create mode 100644 pycls/models/anynet.py create mode 100644 pycls/models/effnet.py create mode 100644 pycls/models/regnet.py create mode 100644 pycls/models/resnet.py create mode 100644 pycls/utils/__init__.py create mode 100644 pycls/utils/__pycache__/__init__.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/benchmark.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/checkpoint.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/distributed.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/error_handler.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/io.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/logging.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/meters.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/metrics.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/multiprocessing.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/net.cpython-37.pyc create mode 100644 pycls/utils/__pycache__/timer.cpython-37.pyc create mode 100644 pycls/utils/benchmark.py create mode 100644 pycls/utils/checkpoint.py create mode 100644 pycls/utils/distributed.py create mode 100644 pycls/utils/error_handler.py create mode 100644 pycls/utils/io.py create mode 100644 pycls/utils/logging.py create mode 100644 pycls/utils/lr_policy.py create mode 100644 pycls/utils/meters.py create mode 100644 pycls/utils/metrics.py create mode 100644 pycls/utils/multiprocessing.py create mode 100644 pycls/utils/net.py create mode 100644 pycls/utils/plotting.py create mode 100644 pycls/utils/timer.py create mode 100644 test.py diff --git a/README.md b/README.md new file mode 100644 index 0000000..195a08d --- /dev/null +++ b/README.md @@ -0,0 +1,11 @@ +# Yet another Deep Danbooru project +But based on [RegNetY-8G](https://arxiv.org/abs/2003.13678), relative lightweight, designed to run fast on GPU. \ +Training is done using mixed precision training on a single RTX2080Ti for 3 weeks. \ +Some code are from https://github.com/facebookresearch/pycls +# What do I need? +You need to download [save_4000000.ckpt]() from release and place on the same folder as `test.py`. +# How to use? +`python test.py --model save_4000000.ckpt --image ` +# What to do in the future? +1. Quantize to 8 bit + diff --git a/RegDanbooru2019_8G.py b/RegDanbooru2019_8G.py new file mode 100644 index 0000000..d73ac99 --- /dev/null +++ b/RegDanbooru2019_8G.py @@ -0,0 +1,33 @@ + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from RegNetY_8G import build_model + +class RegDanbooru2019(nn.Module) : + def __init__(self) : + super(RegDanbooru2019, self).__init__() + self.backbone = build_model() + num_p = sum(p.numel() for p in self.backbone.parameters() if p.requires_grad) + print( 'Backbone has %d parameters' % num_p ) + self.head_danbooru = nn.Linear(2016, 4096) + + def forward_train_head(self, images) : + """ + images of shape [N, 3, 512, 512] + """ + with torch.no_grad() : + feats = self.backbone(images) + feats = F.adaptive_avg_pool2d(feats, 1).view(-1, 2016) + danbooru_logits = self.head_danbooru(feats) # [N, 4096] + return danbooru_logits + + def forward(self, images) : + """ + images of shape [N, 3, 512, 512] + """ + feats = self.backbone(images) + feats = F.adaptive_avg_pool2d(feats, 1).view(-1, 2016) + danbooru_logits = self.head_danbooru(feats) # [N, 4096] + return danbooru_logits diff --git a/RegNetY-8.0GF_dds_8gpu.yaml b/RegNetY-8.0GF_dds_8gpu.yaml new file mode 100644 index 0000000..912754d --- /dev/null +++ b/RegNetY-8.0GF_dds_8gpu.yaml @@ -0,0 +1,27 @@ +MODEL: + TYPE: regnet + NUM_CLASSES: 1000 +REGNET: + SE_ON: true + DEPTH: 17 + W0: 192 + WA: 76.82 + WM: 2.19 + GROUP_W: 56 +OPTIM: + LR_POLICY: cos + BASE_LR: 0.4 + MAX_EPOCH: 100 + MOMENTUM: 0.9 + WEIGHT_DECAY: 5e-5 + WARMUP_EPOCHS: 5 +TRAIN: + DATASET: imagenet + IM_SIZE: 512 + BATCH_SIZE: 512 +TEST: + DATASET: imagenet + IM_SIZE: 512 + BATCH_SIZE: 400 +NUM_GPUS: 1 +OUT_DIR: . diff --git a/RegNetY_8G.py b/RegNetY_8G.py new file mode 100644 index 0000000..bc78a23 --- /dev/null +++ b/RegNetY_8G.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Test a trained classification model.""" + +import argparse +import sys + +import numpy as np +import pycls.core.losses as losses +import pycls.core.model_builder as model_builder +import pycls.datasets.loader as loader +import pycls.utils.benchmark as bu +import pycls.utils.checkpoint as cu +import pycls.utils.distributed as du +import pycls.utils.logging as lu +import pycls.utils.metrics as mu +import pycls.utils.multiprocessing as mpu +import pycls.utils.net as nu +import torch +from pycls.core.config import assert_and_infer_cfg, cfg +from pycls.utils.meters import TestMeter + + +logger = lu.get_logger(__name__) + +def log_model_info(model): + """Logs model info""" + logger.info("Model:\n{}".format(model)) + logger.info("Params: {:,}".format(mu.params_count(model))) + logger.info("Flops: {:,}".format(mu.flops_count(model))) + logger.info("Acts: {:,}".format(mu.acts_count(model))) + +def build_model(): + + # Load config options + cfg.merge_from_file('RegNetY-8.0GF_dds_8gpu.yaml') + cfg.merge_from_list([]) + assert_and_infer_cfg() + cfg.freeze() + # Setup logging + lu.setup_logging() + # Show the config + logger.info("Config:\n{}".format(cfg)) + + # Fix the RNG seeds (see RNG comment in core/config.py for discussion) + np.random.seed(cfg.RNG_SEED) + torch.manual_seed(cfg.RNG_SEED) + # Configure the CUDNN backend + torch.backends.cudnn.benchmark = cfg.CUDNN.BENCHMARK + + # Build the model (before the loaders to speed up debugging) + model = model_builder.build_model() + log_model_info(model) + + # Load model weights + #cu.load_checkpoint('RegNetY-8.0GF_dds_8gpu.pyth', model) + logger.info("Loaded model weights from: {}".format('RegNetY-8.0GF_dds_8gpu.pyth')) + + del model.head + + return model + diff --git a/danbooru_labels.txt b/danbooru_labels.txt new file mode 100644 index 0000000..51ec5ff --- /dev/null +++ b/danbooru_labels.txt @@ -0,0 +1,4096 @@ +0 1girl +1 solo +2 long_hair +3 breasts +4 blush +5 smile +6 looking_at_viewer +7 short_hair +8 open_mouth +9 multiple_girls +10 blue_eyes +11 blonde_hair +12 brown_hair +13 skirt +14 large_breasts +15 hat +16 thighhighs +17 black_hair +18 red_eyes +19 bangs +20 hair_ornament +21 simple_background +22 ribbon +23 2girls +24 gloves +25 dress +26 bow +27 1boy +28 brown_eyes +29 navel +30 white_background +31 twintails +32 shirt +33 school_uniform +34 underwear +35 long_sleeves +36 eyebrows_visible_through_hair +37 cleavage +38 medium_breasts +39 sitting +40 green_eyes +41 animal_ears +42 monochrome +43 panties +44 very_long_hair +45 nipples +46 blue_hair +47 bare_shoulders +48 jewelry +49 comic +50 purple_eyes +51 hair_ribbon +52 closed_eyes +53 holding +54 black_legwear +55 weapon +56 greyscale +57 hair_between_eyes +58 ponytail +59 standing +60 purple_hair +61 yellow_eyes +62 pink_hair +63 ass +64 swimsuit +65 collarbone +66 hair_bow +67 tail +68 silver_hair +69 flower +70 full_body +71 braid +72 :d +73 ahoge +74 pantyhose +75 hairband +76 closed_mouth +77 boots +78 nude +79 red_hair +80 heart +81 jacket +82 wings +83 green_hair +84 short_sleeves +85 pleated_skirt +86 japanese_clothes +87 one_eye_closed +88 serafuku +89 glasses +90 detached_sleeves +91 food +92 lying +93 upper_body +94 small_breasts +95 sweat +96 bikini +97 male_focus +98 multiple_boys +99 hetero +100 censored +101 barefoot +102 necktie +103 shoes +104 white_legwear +105 sky +106 white_hair +107 sidelocks +108 cowboy_shot +109 frills +110 earrings +111 pussy +112 elbow_gloves +113 penis +114 striped +115 tears +116 open_clothes +117 white_shirt +118 day +119 sword +120 parted_lips +121 3girls +122 shorts +123 fang +124 midriff +125 alternate_costume +126 looking_back +127 solo_focus +128 thighs +129 hairclip +130 outdoors +131 pointy_ears +132 tongue +133 choker +134 cloud +135 cat_ears +136 belt +137 sleeveless +138 multicolored_hair +139 puffy_sleeves +140 cum +141 horns +142 chibi +143 shiny +144 white_gloves +145 hair_flower +146 fingerless_gloves +147 2boys +148 uniform +149 spread_legs +150 kimono +151 yuri +152 pink_eyes +153 white_panties +154 scarf +155 miniskirt +156 cape +157 artist_name +158 on_back +159 black_gloves +160 teeth +161 star +162 wide_sleeves +163 armpits +164 sex +165 water +166 flat_chest +167 socks +168 sweatdrop +169 :o +170 bra +171 side_ponytail +172 tongue_out +173 character_name +174 bag +175 dark_skin +176 black_eyes +177 hood +178 armor +179 sketch +180 from_behind +181 signature +182 zettai_ryouiki +183 huge_breasts +184 orange_hair +185 necklace +186 covered_nipples +187 twin_braids +188 aqua_eyes +189 apron +190 blunt_bangs +191 grey_hair +192 bunny_ears +193 indoors +194 grin +195 bowtie +196 vest +197 sailor_collar +198 pants +199 lips +200 bracelet +201 black_skirt +202 military +203 hair_over_one_eye +204 feet +205 vaginal +206 two_side_up +207 high_heels +208 dated +209 twitter_username +210 nail_polish +211 tree +212 arms_up +213 collar +214 off_shoulder +215 kneehighs +216 book +217 gun +218 head_tilt +219 mosaic_censoring +220 4koma +221 4girls +222 white_dress +223 collared_shirt +224 sweater +225 maid +226 military_uniform +227 looking_at_another +228 groin +229 arm_up +230 legs +231 cosplay +232 cat_tail +233 shiny_skin +234 open_shirt +235 hug +236 torn_clothes +237 grey_background +238 holding_weapon +239 dutch_angle +240 wrist_cuffs +241 plaid +242 cup +243 hand_on_hip +244 petals +245 one-piece_swimsuit +246 wet +247 mole +248 no_bra +249 kneeling +250 aqua_hair +251 sash +252 hand_up +253 maid_headdress +254 stomach +255 loli +256 v-shaped_eyebrows +257 black_dress +258 puffy_short_sleeves +259 pillow +260 neckerchief +261 from_side +262 black_footwear +263 red_neckwear +264 no_panties +265 english_text +266 6+girls +267 see-through +268 black_panties +269 bed +270 fingernails +271 blue_sky +272 single_braid +273 leotard +274 red_ribbon +275 pubic_hair +276 blue_skirt +277 blood +278 saliva +279 witch_hat +280 fruit +281 ascot +282 orange_eyes +283 blurry +284 headgear +285 gradient +286 copyright_name +287 sparkle +288 speech_bubble +289 pussy_juice +290 ^_^ +291 bottomless +292 underboob +293 magical_girl +294 alternate_hairstyle +295 headphones +296 headband +297 red_bow +298 siblings +299 dress_shirt +300 sideboob +301 v +302 bodysuit +303 parody +304 pantyshot +305 profile +306 sleeves_past_wrists +307 no_humans +308 eyebrows +309 :3 +310 shiny_hair +311 umbrella +312 striped_legwear +313 areolae +314 double_bun +315 bare_legs +316 uncensored +317 cameltoe +318 window +319 makeup +320 gradient_background +321 hair_tubes +322 holding_hands +323 side-tie_bikini +324 capelet +325 traditional_media +326 leaning_forward +327 bell +328 symbol-shaped_pupils +329 fox_ears +330 chain +331 thigh_boots +332 drill_hair +333 cover +334 cum_in_pussy +335 rose +336 two-tone_hair +337 coat +338 fur_trim +339 topless +340 anus +341 night +342 bare_arms +343 mask +344 short_shorts +345 glowing +346 parted_bangs +347 arm_support +348 wariza +349 mouth_hold +350 hoodie +351 open_jacket +352 fox_tail +353 blush_stickers +354 lingerie +355 school_swimsuit +356 eyelashes +357 strapless +358 bird +359 embarrassed +360 turtleneck +361 bat_wings +362 skirt_lift +363 moon +364 frown +365 transparent_background +366 one_side_up +367 shirt_lift +368 bound +369 looking_to_the_side +370 bed_sheet +371 bdsm +372 black_ribbon +373 eyepatch +374 eating +375 remodel_(kantai_collection) +376 beret +377 plaid_skirt +378 tattoo +379 depth_of_field +380 ocean +381 garter_straps +382 crop_top +383 wavy_mouth +384 leaf +385 grey_eyes +386 back +387 nose_blush +388 animal +389 underwear_only +390 from_above +391 on_side +392 mole_under_eye +393 heterochromia +394 skindentation +395 grabbing +396 stuffed_toy +397 hair_bobbles +398 thigh_gap +399 bandages +400 5girls +401 crossed_arms +402 watermark +403 obi +404 gradient_hair +405 ass_visible_through_thighs +406 arms_behind_back +407 covered_navel +408 pokemon_(creature) +409 detached_collar +410 chair +411 shadow +412 wavy_hair +413 blazer +414 hat_ribbon +415 flying_sweatdrops +416 sleeveless_dress +417 undressing +418 cat +419 blue_dress +420 floating_hair +421 sandals +422 low_twintails +423 bar_censor +424 beach +425 red_skirt +426 sleeping +427 neck_ribbon +428 knee_boots +429 looking_away +430 katana +431 bondage +432 cum_on_body +433 sleeveless_shirt +434 fangs +435 crossover +436 chinese_clothes +437 younger +438 polka_dot +439 short_dress +440 outstretched_arms +441 stuffed_animal +442 ;d +443 couple +444 striped_panties +445 toes +446 short_twintails +447 >_< +448 expressionless +449 formal +450 breast_grab +451 thigh_strap +452 multiple_views +453 panty_pull +454 lipstick +455 oral +456 black_bow +457 scrunchie +458 /\/\/\ +459 mob_cap +460 helmet +461 trembling +462 facial_mark +463 from_below +464 scar +465 medium_hair +466 bent_over +467 hair_bun +468 floral_print +469 buttons +470 light_smile +471 face +472 3boys +473 swept_bangs +474 suspenders +475 sunlight +476 cover_page +477 blue_background +478 crown +479 happy +480 leg_up +481 cleavage_cutout +482 antenna_hair +483 wolf_ears +484 facial_hair +485 halterneck +486 sisters +487 hands_up +488 table +489 pov +490 tan +491 backpack +492 phone +493 bug +494 skirt_set +495 heart-shaped_pupils +496 polearm +497 on_bed +498 crossed_legs +499 casual +500 fire +501 soles +502 no_shoes +503 crying +504 squatting +505 tokin_hat +506 pink_panties +507 wind +508 bike_shorts +509 outstretched_arm +510 insect +511 pink_background +512 pointing +513 cherry_blossoms +514 black_headwear +515 staff +516 monster_girl +517 crescent +518 black_jacket +519 genderswap +520 looking_down +521 on_stomach +522 otoko_no_ko +523 red_dress +524 no_pants +525 surprised +526 suit +527 bunnysuit +528 highleg +529 all_fours +530 brown_footwear +531 cardigan +532 side-tie_panties +533 lace +534 denim +535 gauntlets +536 third_eye +537 multiple_tails +538 shaded_face +539 convenient_censoring +540 loafers +541 covering +542 tiara +543 cum_on_upper_body +544 head_wings +545 knife +546 peaked_cap +547 white_bikini +548 black_bikini +549 checkered +550 cellphone +551 blouse +552 towel +553 instrument +554 grass +555 ground_vehicle +556 bottle +557 breast_press +558 fellatio +559 ring +560 standing_on_one_leg +561 pink_bow +562 child +563 kiss +564 feathers +565 holding_sword +566 multicolored +567 cloudy_sky +568 doujinshi +569 straddling +570 hair_intakes +571 half-closed_eyes +572 breasts_outside +573 black_shirt +574 looking_up +575 shinkaisei-kan +576 tank_top +577 high_ponytail +578 :< +579 sunglasses +580 facial +581 fake_animal_ears +582 light_brown_hair +583 hat_bow +584 nature +585 no_headwear +586 plant +587 goggles +588 girl_on_top +589 ? +590 black_neckwear +591 between_breasts +592 adapted_costume +593 bow_panties +594 sheath +595 musical_note +596 muscle +597 slit_pupils +598 tareme +599 hand_on_own_chest +600 white_headwear +601 demon_girl +602 pale_skin +603 animal_ear_fluff +604 bob_cut +605 butterfly +606 fan +607 front-tie_top +608 frilled_skirt +609 ribbon_trim +610 curtains +611 foreshortening +612 microphone +613 juliet_sleeves +614 letterboxed +615 lavender_hair +616 no_hat +617 wristband +618 mary_janes +619 upskirt +620 candy +621 angry +622 cross +623 bikini_top +624 box +625 t-shirt +626 twin_drills +627 puffy_nipples +628 hands +629 frilled_dress +630 anger_vein +631 testicles +632 wing_collar +633 drooling +634 building +635 clenched_teeth +636 wet_clothes +637 tsurime +638 white_skirt +639 clenched_hand +640 cum_on_breasts +641 star_(sky) +642 blue_bow +643 anal +644 machinery +645 full_moon +646 blue_ribbon +647 brown_legwear +648 animal_print +649 broom +650 horn +651 christmas +652 armband +653 ... +654 hair_flaps +655 scenery +656 waist_apron +657 strap_slip +658 streaked_hair +659 dual_persona +660 :p +661 carrying +662 glowing_eyes +663 eyes_visible_through_hair +664 rope +665 jingle_bell +666 white_footwear +667 brooch +668 genderswap_(mtf) +669 breast_hold +670 x_hair_ornament +671 messy_hair +672 skin_tight +673 snow +674 hakama +675 elf +676 corset +677 bunny_tail +678 night_sky +679 border +680 motor_vehicle +681 clothes_writing +682 strapless_dress +683 web_address +684 long_legs +685 semi-rimless_eyewear +686 personification +687 rifle +688 veil +689 china_dress +690 breath +691 breasts_apart +692 kemonomimi_mode +693 green_skirt +694 blurry_background +695 lifted_by_self +696 androgynous +697 abs +698 mecha +699 thick_thighs +700 santa_hat +701 pendant +702 single_thighhigh +703 multiple_penises +704 spiked_hair +705 finger_to_mouth +706 bridal_gauntlets +707 handgun +708 torn_legwear +709 wolf_tail +710 black_hairband +711 pink_dress +712 nontraditional_miko +713 santa_costume +714 sharp_teeth +715 black_bra +716 headset +717 revealing_clothes +718 o_o +719 tentacles +720 sneakers +721 vertical_stripes +722 steam +723 red_flower +724 cross-laced_footwear +725 group_sex +726 thick_eyebrows +727 hair_over_shoulder +728 hood_down +729 virtual_youtuber +730 tanline +731 6+boys +732 fishnets +733 frilled_sleeves +734 blue_sailor_collar +735 close-up +736 panties_under_pantyhose +737 erection +738 half_updo +739 headwear_removed +740 piercing +741 couch +742 licking +743 white_bow +744 gift +745 sailor_dress +746 gem +747 claws +748 red_footwear +749 seiza +750 dress_lift +751 dog_ears +752 paizuri +753 doujin_cover +754 holding_food +755 blue_neckwear +756 dual_wielding +757 white_ribbon +758 lolita_fashion +759 forest +760 side_braid +761 masturbation +762 red-framed_eyewear +763 naughty_face +764 micro_bikini +765 ribbed_sweater +766 sweater_vest +767 lens_flare +768 handjob +769 turret +770 teacup +771 between_legs +772 ball +773 ejaculation +774 eye_contact +775 gym_uniform +776 =_= +777 motion_lines +778 arms_behind_head +779 garter_belt +780 gen_1_pokemon +781 buckle +782 alcohol +783 minigirl +784 wading +785 cuffs +786 baseball_cap +787 halloween +788 pom_pom_(clothes) +789 after_sex +790 dark_skinned_male +791 folded_ponytail +792 crossdressing +793 v_arms +794 flying +795 wide_hips +796 blue_shirt +797 side_slit +798 mole_under_mouth +799 buruma +800 bespectacled +801 interlocked_fingers +802 desk +803 fish +804 new_year +805 legs_apart +806 fingering +807 camisole +808 jitome +809 index_finger_raised +810 spread_pussy +811 spear +812 low-tied_long_hair +813 cloak +814 bow_(weapon) +815 skull +816 epaulettes +817 armlet +818 yellow_neckwear +819 tray +820 black_background +821 light_particles +822 curvy +823 bandaid +824 bowl +825 innertube +826 hip_focus +827 spot_color +828 demon_tail +829 oekaki +830 purple_dress +831 older +832 starry_sky +833 legs_up +834 hands_together +835 text_focus +836 monster +837 bunny +838 black_wings +839 rain +840 spikes +841 bubble +842 ribbon-trimmed_sleeves +843 card_(medium) +844 rape +845 dakimakura +846 holding_gun +847 clenched_hands +848 4boys +849 empty_eyes +850 hat_removed +851 hair_scrunchie +852 pencil_skirt +853 out_of_frame +854 smoke +855 own_hands_together +856 zipper +857 thong +858 cake +859 yellow_background +860 shield +861 feet_out_of_frame +862 |_| +863 under-rim_eyewear +864 french_braid +865 red_legwear +866 partially_submerged +867 long_fingernails +868 hair_rings +869 twins +870 white_flower +871 teddy_bear +872 dog +873 page_number +874 cum_in_mouth +875 black_shorts +876 sunset +877 contrapposto +878 smartphone +879 spoken_heart +880 blue_legwear +881 ice +882 doggystyle +883 black_choker +884 snake +885 restrained +886 areola_slip +887 sleeves_rolled_up +888 faceless +889 happy_birthday +890 arm_at_side +891 impossible_clothes +892 upside-down +893 pelvic_curtain +894 hands_on_hips +895 high_heel_boots +896 spoken_ellipsis +897 beard +898 school_bag +899 thighband_pantyhose +900 yukata +901 straight_hair +902 reflection +903 walking +904 track_jacket +905 blue_bikini +906 curly_hair +907 miko +908 tress_ribbon +909 popsicle +910 shawl +911 denim_shorts +912 black_serafuku +913 running +914 ;) +915 plate +916 butt_crack +917 sun_hat +918 blue_jacket +919 paw_pose +920 yellow_bow +921 striped_bikini +922 backlighting +923 short_ponytail +924 white_bra +925 heavy_breathing +926 outstretched_hand +927 black_pants +928 sarashi +929 hand_on_another's_head +930 wince +931 knees_up +932 :q +933 swimsuit_under_clothes +934 shiny_clothes +935 breast_squeeze +936 short_hair_with_long_locks +937 pink_skirt +938 red_shirt +939 arm_behind_back +940 extra_ears +941 cumdrip +942 +_+ +943 long_skirt +944 hooded_jacket +945 red_gloves +946 ghost +947 upper_teeth +948 o-ring +949 drinking_glass +950 demon_wings +951 covering_breasts +952 contemporary +953 geta +954 freckles +955 cowgirl_position +956 doll +957 hairpin +958 top_hat +959 hug_from_behind +960 bloomers +961 eighth_note +962 tabard +963 single_glove +964 aircraft +965 pajamas +966 mini_hat +967 crying_with_eyes_open +968 fishnet_legwear +969 goggles_on_head +970 military_hat +971 jumping +972 blue_panties +973 serious +974 hair_down +975 pose +976 realistic +977 plump +978 chopsticks +979 full-face_blush +980 colored_eyelashes +981 facing_viewer +982 ofuda +983 cannon +984 smirk +985 injury +986 frog_hair_ornament +987 maid_apron +988 portrait +989 pantyshot_(sitting) +990 object_insertion +991 emphasis_lines +992 red_nails +993 tied_hair +994 red_background +995 white_jacket +996 watch +997 pink_flower +998 paws +999 legs_together +1000 valentine +1001 tassel +1002 circlet +1003 everyone +1004 sheathed +1005 angel_wings +1006 spread_arms +1007 purple_legwear +1008 floating +1009 pocket +1010 round_teeth +1011 two-tone_background +1012 brown_gloves +1013 ass_grab +1014 zoom_layer +1015 underwater +1016 panties_around_one_leg +1017 ! +1018 pink_nails +1019 nose +1020 multicolored_clothes +1021 hand_between_legs +1022 forehead +1023 pointless_censoring +1024 emblem +1025 clitoris +1026 string_bikini +1027 feathered_wings +1028 holding_book +1029 fur_collar +1030 futanari +1031 wedding_dress +1032 brother_and_sister +1033 frilled_bikini +1034 muneate +1035 white_apron +1036 snowing +1037 gothic_lolita +1038 cigarette +1039 brown_background +1040 cum_on_hair +1041 holding_cup +1042 lollipop +1043 yaoi +1044 camera +1045 chocolate +1046 hand_in_hair +1047 arrow +1048 knees_together_feet_apart +1049 bunny_girl +1050 jeans +1051 anchor +1052 male_pubic_hair +1053 red_bikini +1054 eyeshadow +1055 back-to-back +1056 arm_warmers +1057 frog +1058 competition_swimsuit +1059 photo +1060 lowleg +1061 kita_high_school_uniform +1062 purple_background +1063 demon_horns +1064 :t +1065 naval_uniform +1066 blue_theme +1067 clothed_sex +1068 eyewear_on_head +1069 cropped_legs +1070 antennae +1071 red_rose +1072 bun_cover +1073 stairs +1074 condom +1075 hitodama +1076 sailor_hat +1077 low_ponytail +1078 wand +1079 strawberry +1080 mountain +1081 outline +1082 black-framed_eyewear +1083 meme_attire +1084 ice_cream +1085 adjusting_hair +1086 mittens +1087 bouquet +1088 green_background +1089 bouncing_breasts +1090 white_skin +1091 panties_aside +1092 anklet +1093 city +1094 purple_skirt +1095 pauldrons +1096 grey_skirt +1097 green_dress +1098 light_rays +1099 unzipped +1100 oni +1101 hime_cut +1102 pocky +1103 dragon +1104 arched_back +1105 asymmetrical_hair +1106 pink_legwear +1107 dog_tail +1108 vibrator +1109 mouse_ears +1110 height_difference +1111 cum_on_lower_body +1112 oni_horns +1113 hands_in_pockets +1114 armored_dress +1115 eyeball +1116 rock +1117 hand_in_pocket +1118 faceless_male +1119 beads +1120 mirror +1121 lantern +1122 sign +1123 anchor_symbol +1124 gohei +1125 card +1126 pink_ribbon +1127 poke_ball +1128 nurse_cap +1129 spoon +1130 leg_lift +1131 unbuttoned +1132 white_swimsuit +1133 untied +1134 waitress +1135 crotch_seam +1136 age_difference +1137 pink_bra +1138 !? +1139 handbag +1140 sun +1141 tentacle_hair +1142 fighting_stance +1143 adjusting_clothes +1144 white_blouse +1145 striped_shirt +1146 folding_fan +1147 name_tag +1148 hair_bell +1149 green_bow +1150 sunflower +1151 nipple_slip +1152 crystal +1153 robe +1154 pink_lips +1155 headpiece +1156 open_book +1157 sportswear +1158 blue_footwear +1159 visor_cap +1160 mustache +1161 blindfold +1162 headdress +1163 apple +1164 knee_up +1165 sitting_on_person +1166 bandana +1167 center_opening +1168 paper +1169 waving +1170 pout +1171 pouch +1172 clock +1173 drinking_straw +1174 hoop_earrings +1175 thought_bubble +1176 halter_top +1177 red_headwear +1178 earmuffs +1179 guitar +1180 black_leotard +1181 silhouette +1182 peach +1183 red_jacket +1184 sleeves_past_fingers +1185 red_scarf +1186 lace-up_boots +1187 argyle +1188 cropped_jacket +1189 battle +1190 dildo +1191 breastplate +1192 wind_lift +1193 tearing_up +1194 petticoat +1195 salute +1196 jack-o'-lantern +1197 shirtless +1198 blanket +1199 clothed_female_nude_male +1200 >:) +1201 hanging_breasts +1202 triangular_headpiece +1203 partially_visible_vulva +1204 pistol +1205 yokozuwari +1206 no_nose +1207 airplane +1208 mature +1209 backless_outfit +1210 alternate_color +1211 music +1212 silent_comic +1213 gag +1214 top-down_bottom-up +1215 shota +1216 fork +1217 robot +1218 basket +1219 shoulder_armor +1220 military_vehicle +1221 covering_mouth +1222 mecha_musume +1223 motion_blur +1224 ninja +1225 chestnut_mouth +1226 halo +1227 highleg_leotard +1228 winter_clothes +1229 toned +1230 sexually_suggestive +1231 borrowed_character +1232 front-tie_bikini +1233 number +1234 fairy_wings +1235 crescent_hair_ornament +1236 bell_collar +1237 grey_legwear +1238 blue_flower +1239 veins +1240 blue_skin +1241 tabi +1242 scythe +1243 wide-eyed +1244 bukkake +1245 sundress +1246 gakuran +1247 leash +1248 fantasy +1249 drink +1250 bad_anatomy +1251 frilled_apron +1252 alternate_hair_length +1253 hair_tie +1254 labcoat +1255 fence +1256 frilled_shirt_collar +1257 chin_rest +1258 black_nails +1259 petting +1260 reading +1261 onsen +1262 creature +1263 android +1264 flat_cap +1265 directional_arrow +1266 bandeau +1267 kneepits +1268 forehead_mark +1269 highleg_panties +1270 hakama_skirt +1271 style_parody +1272 palm_tree +1273 head_out_of_frame +1274 snake_hair_ornament +1275 door +1276 gangbang +1277 hand_on_headwear +1278 microskirt +1279 missionary +1280 skirt_hold +1281 adjusting_eyewear +1282 bookshelf +1283 silver_eyes +1284 double_v +1285 groping +1286 flag +1287 chinese_text +1288 straw_hat +1289 brown_skirt +1290 bucket +1291 floral_background +1292 arms_at_sides +1293 water_drop +1294 :> +1295 asymmetrical_wings +1296 enmaided +1297 talking +1298 lactation +1299 @_@ +1300 white_hairband +1301 shoulder_bag +1302 covering_crotch +1303 taut_clothes +1304 fox_girl +1305 magic_circle +1306 ahegao +1307 food_on_face +1308 strap_gap +1309 glint +1310 arm_grab +1311 pink_shirt +1312 pantyshot_(standing) +1313 can +1314 cat_girl +1315 science_fiction +1316 character_doll +1317 bow_bra +1318 raised_eyebrows +1319 holding_umbrella +1320 blue_swimsuit +1321 orange_bow +1322 railing +1323 glass +1324 pilot_suit +1325 stuffed_bunny +1326 string +1327 key +1328 wooden_floor +1329 high_collar +1330 tasuki +1331 sack +1332 gap +1333 belt_buckle +1334 animal_hood +1335 yellow_ribbon +1336 parasol +1337 torpedo +1338 nurse +1339 smoking +1340 hair_stick +1341 hand_on_own_cheek +1342 orange_background +1343 striped_bow +1344 garters +1345 tatami +1346 ear_piercing +1347 witch +1348 armored_boots +1349 tate_eboshi +1350 lolita_hairband +1351 asymmetrical_legwear +1352 succubus +1353 grey_shirt +1354 drinking +1355 blue_headwear +1356 ice_wings +1357 beanie +1358 ankle_boots +1359 gift_box +1360 road +1361 leg_hug +1362 high-waist_skirt +1363 logo +1364 size_difference +1365 alternate_breast_size +1366 triangle_mouth +1367 magic +1368 hands_clasped +1369 beachball +1370 pen +1371 riding +1372 bath +1373 marker_(medium) +1374 hands_on_own_chest +1375 hibiscus +1376 oriental_umbrella +1377 sand +1378 star_hair_ornament +1379 horizon +1380 striped_neckwear +1381 shade +1382 strapless_leotard +1383 gagged +1384 red_hakama +1385 jacket_on_shoulders +1386 bangle +1387 lineart +1388 pink_footwear +1389 vambraces +1390 pumpkin +1391 black_sclera +1392 licking_lips +1393 partly_fingerless_gloves +1394 gen_3_pokemon +1395 domino_mask +1396 dragon_girl +1397 inverted_nipples +1398 5boys +1399 yukkuri_shiteitte_ne +1400 blood_on_face +1401 hair_censor +1402 bridal_veil +1403 saliva_trail +1404 sarong +1405 hand_on_own_face +1406 nosebleed +1407 cheerleader +1408 polka_dot_background +1409 rigging +1410 black_sailor_collar +1411 hair_up +1412 graphite_(medium) +1413 bare_back +1414 holster +1415 mug +1416 flipped_hair +1417 incest +1418 hand_to_own_mouth +1419 cabbie_hat +1420 one-piece_tan +1421 bandaged_arm +1422 shorts_under_skirt +1423 bat +1424 snowflakes +1425 purple_panties +1426 watercraft +1427 2koma +1428 pink_neckwear +1429 lace-trimmed_legwear +1430 purple_ribbon +1431 large_areolae +1432 puffy_long_sleeves +1433 fur +1434 plugsuit +1435 bathing +1436 naked_shirt +1437 wedding_band +1438 bound_wrists +1439 heart_of_string +1440 animal_costume +1441 underbust +1442 lace-trimmed_panties +1443 on_head +1444 torii +1445 biting +1446 drunk +1447 multicolored_eyes +1448 +++ +1449 on_floor +1450 plaid_vest +1451 arm_behind_head +1452 petite +1453 facepaint +1454 print_legwear +1455 indian_style +1456 fairy +1457 white_leotard +1458 cityscape +1459 classroom +1460 bracer +1461 white_scarf +1462 laughing +1463 randoseru +1464 lamp +1465 turtleneck_sweater +1466 vertical-striped_legwear +1467 japanese_armor +1468 open_coat +1469 blue_nails +1470 gen_4_pokemon +1471 mini_crown +1472 dark +1473 pool +1474 wagashi +1475 furry +1476 bench +1477 dragon_horns +1478 ranguage +1479 car +1480 outside_border +1481 cravat +1482 kicking +1483 gen_5_pokemon +1484 space +1485 ribbon_choker +1486 princess_carry +1487 green_neckwear +1488 open_fly +1489 hood_up +1490 pom_poms +1491 covered_mouth +1492 short_kimono +1493 spoken_question_mark +1494 brown_jacket +1495 purple_bow +1496 candle +1497 sample +1498 gen_2_pokemon +1499 whisker_markings +1500 scabbard +1501 carrot +1502 teapot +1503 tea +1504 pigeon-toed +1505 watercolor_(medium) +1506 cable +1507 overalls +1508 hair_over_eyes +1509 against_wall +1510 alternate_hair_color +1511 white_kimono +1512 leg_garter +1513 star-shaped_pupils +1514 greaves +1515 bright_pupils +1516 sports_bra +1517 blue_shorts +1518 holding_phone +1519 cum_on_clothes +1520 electricity +1521 pink_bikini +1522 hands_on_own_face +1523 alternate_eye_color +1524 assault_rifle +1525 absurdly_long_hair +1526 multiple_persona +1527 raccoon_ears +1528 patreon_username +1529 wristwatch +1530 mermaid +1531 naked_apron +1532 computer +1533 hand_on_another's_shoulder +1534 leaning_back +1535 toeless_legwear +1536 hatsune_miku +1537 hakurei_reimu +1538 kirisame_marisa +1539 remilia_scarlet +1540 izayoi_sakuya +1541 flandre_scarlet +1542 patchouli_knowledge +1543 alice_margatroid +1544 kochiya_sanae +1545 cirno +1546 yakumo_yukari +1547 admiral_(kantai_collection) +1548 konpaku_youmu +1549 artoria_pendragon_(all) +1550 shameimaru_aya +1551 fujiwara_no_mokou +1552 hong_meiling +1553 akemi_homura +1554 reisen_udongein_inaba +1555 komeiji_koishi +1556 kaname_madoka +1557 saigyouji_yuyuko +1558 komeiji_satori +1559 inubashiri_momiji +1560 kaga_(kantai_collection) +1561 yakumo_ran +1562 miki_sayaka +1563 saber +1564 kazami_yuuka +1565 moriya_suwako +1566 rumia +1567 kamishirasawa_keine +1568 konpaku_youmu_(ghost) +1569 chen +1570 reiuji_utsuho +1571 kagamine_rin +1572 sakura_kyouko +1573 tomoe_mami +1574 shimakaze_(kantai_collection) +1575 kawashiro_nitori +1576 hibiki_(kantai_collection) +1577 hinanawi_tenshi +1578 tatara_kogasa +1579 jeanne_d'arc_(fate)_(all) +1580 kaenbyou_rin +1581 houraisan_kaguya +1582 kongou_(kantai_collection) +1583 koakuma +1584 hijiri_byakuren +1585 inazuma_(kantai_collection) +1586 inaba_tewi +1587 ibuki_suika +1588 akagi_(kantai_collection) +1589 mystia_lorelei +1590 shigure_(kantai_collection) +1591 tenryuu_(kantai_collection) +1592 yasaka_kanako +1593 mash_kyrielight +1594 nazrin +1595 yagokoro_eirin +1596 akiyama_mio +1597 houjuu_nue +1598 ikazuchi_(kantai_collection) +1599 megurine_luka +1600 kagamine_len +1601 souryuu_asuka_langley +1602 mizuhashi_parsee +1603 yuudachi_(kantai_collection) +1604 akatsuki_(kantai_collection) +1605 morichika_rinnosuke +1606 zuikaku_(kantai_collection) +1607 fate_testarossa +1608 nakano_azusa +1609 nagato_yuki +1610 suzumiya_haruhi +1611 hoshiguma_yuugi +1612 nagato_(kantai_collection) +1613 shanghai_doll +1614 hirasawa_yui +1615 fubuki_(kantai_collection) +1616 toyosatomimi_no_miko +1617 kagiyama_hina +1618 kyubey +1619 haruna_(kantai_collection) +1620 hamakaze_(kantai_collection) +1621 wriggle_nightbug +1622 tamamo_(fate)_(all) +1623 kashima_(kantai_collection) +1624 toosaka_rin +1625 onozuka_komachi +1626 ryuujou_(kantai_collection) +1627 takamachi_nanoha +1628 mononobe_no_futo +1629 nero_claudius_(fate)_(all) +1630 tainaka_ritsu +1631 shiki_eiki +1632 daiyousei +1633 toramaru_shou +1634 fujimaru_ritsuka_(female) +1635 northern_ocean_hime +1636 suzuya_(kantai_collection) +1637 nishikino_maki +1638 abigail_williams_(fate/grand_order) +1639 jeanne_d'arc_(alter)_(fate) +1640 houshou_(kantai_collection) +1641 misaka_mikoto +1642 sonoda_umi +1643 nishizumi_miho +1644 yazawa_nico +1645 shibuya_rin +1646 matoi_ryuuko +1647 himekaidou_hatate +1648 atago_(kantai_collection) +1649 toujou_nozomi +1650 rem_(re:zero) +1651 usami_renko +1652 murakumo_(kantai_collection) +1653 ayase_eli +1654 ushio_(kantai_collection) +1655 nagae_iku +1656 link +1657 serval_(kemono_friends) +1658 shoukaku_(kantai_collection) +1659 murasa_minamitsu +1660 imaizumi_kagerou +1661 maribel_hearn +1662 yukikaze_(kantai_collection) +1663 ayanami_rei +1664 rensouhou-chan +1665 inkling +1666 amatsukaze_(kantai_collection) +1667 kotobuki_tsumugi +1668 kaku_seiga +1669 kitakami_(kantai_collection) +1670 kumoi_ichirin +1671 illyasviel_von_einzbern +1672 hoshii_miki +1673 kurodani_yamame +1674 hiiragi_kagami +1675 scathach_(fate)_(all) +1676 gilgamesh +1677 pikachu +1678 tatsuta_(kantai_collection) +1679 kaito +1680 ibaraki_kasen +1681 hata_no_kokoro +1682 female_admiral_(kantai_collection) +1683 fujimaru_ritsuka_(male) +1684 tamamo_no_mae_(fate) +1685 kasodani_kyouko +1686 akebono_(kantai_collection) +1687 prinz_eugen_(kantai_collection) +1688 kuujou_joutarou +1689 minami_kotori +1690 soga_no_tojiko +1691 izumi_konata +1692 jeanne_d'arc_(fate) +1693 matou_sakura +1694 okita_souji_(fate)_(all) +1695 yamato_(kantai_collection) +1696 joseph_joestar_(young) +1697 mutsu_(kantai_collection) +1698 yorha_no._2_type_b +1699 miyako_yoshika +1700 amami_haruka +1701 suigintou +1702 wo-class_aircraft_carrier +1703 c.c. +1704 kafuu_chino +1705 kijin_seija +1706 aki_minoriko +1707 bismarck_(kantai_collection) +1708 yoko_littner +1709 meiko +1710 kousaka_honoka +1711 kaban_(kemono_friends) +1712 ooi_(kantai_collection) +1713 hieda_no_akyuu +1714 ooyodo_(kantai_collection) +1715 kisaragi_chihaya +1716 sendai_(kantai_collection) +1717 d.va_(overwatch) +1718 shijou_takane +1719 shikinami_asuka_langley +1720 minase_iori +1721 takao_(kantai_collection) +1722 akashi_(kantai_collection) +1723 shiranui_(kantai_collection) +1724 kyon +1725 aki_shizuha +1726 letty_whiterock +1727 hoshizora_rin +1728 futatsuiwa_mamizou +1729 ex-keine +1730 astolfo_(fate) +1731 darjeeling_(girls_und_panzer) +1732 lillie_(pokemon) +1733 hiei_(kantai_collection) +1734 kinomoto_sakura +1735 gumi +1736 asahina_mikuru +1737 archer +1738 ganaha_hibiki +1739 souryuu_(kantai_collection) +1740 ro-500_(kantai_collection) +1741 senketsu +1742 kikuchi_makoto +1743 kirishima_(kantai_collection) +1744 nero_claudius_(fate) +1745 chun-li +1746 sukuna_shinmyoumaru +1747 asashio_(kantai_collection) +1748 sazanami_(kantai_collection) +1749 kiso_(kantai_collection) +1750 nishizumi_maho +1751 kiryuuin_satsuki +1752 verniy_(kantai_collection) +1753 dio_brando +1754 princess_zelda +1755 itsumi_erika +1756 scathach_(fate/grand_order) +1757 yamashiro_(kantai_collection) +1758 caesar_anthonio_zeppeli +1759 zero_two_(darling_in_the_franxx) +1760 asuna_(sao) +1761 sekibanki +1762 emiya_shirou +1763 clownpiece +1764 anchovy_(girls_und_panzer) +1765 takatsuki_yayoi +1766 shimamura_uzuki +1767 super_sonico +1768 akiyama_yukari +1769 musashi_(kantai_collection) +1770 okita_souji_(fate) +1771 barnaby_brooks_jr +1772 samidare_(kantai_collection) +1773 black_rock_shooter_(character) +1774 shuten_douji_(fate/grand_order) +1775 hiryuu_(kantai_collection) +1776 rider +1777 hestia_(danmachi) +1778 seaport_hime +1779 yuubari_(kantai_collection) +1780 iowa_(kantai_collection) +1781 kisume +1782 narukami_yuu +1783 koizumi_hanayo +1784 naka_(kantai_collection) +1785 tifa_lockhart +1786 kaburagi_t_kotetsu +1787 ultimate_madoka +1788 sanya_v_litvyak +1789 saber_alter +1790 takagaki_kaede +1791 graf_zeppelin_(kantai_collection) +1792 watanabe_you +1793 samus_aran +1794 eila_ilmatar_juutilainen +1795 kasumi_(kantai_collection) +1796 gokou_ruri +1797 bowsette +1798 cu_chulainn_(fate)_(all) +1799 wakasagihime +1800 kanzaki_ranko +1801 i-19_(kantai_collection) +1802 mordred_(fate)_(all) +1803 touko_(pokemon) +1804 kotomine_kirei +1805 sagisawa_fumika +1806 hiiragi_tsukasa +1807 yagami_hayate +1808 nitta_minami +1809 morrigan_aensland +1810 yuzuki_yukari +1811 megumin +1812 nia_teppelin +1813 minamoto_no_raikou_(fate/grand_order) +1814 djeeta_(granblue_fantasy) +1815 fairy_(kantai_collection) +1816 shirasaka_koume +1817 i-58_(kantai_collection) +1818 lunasa_prismriver +1819 shirogane_naoto +1820 hikari_(pokemon) +1821 sheryl_nome +1822 junko_(touhou) +1823 kakyouin_noriaki +1824 mercy_(overwatch) +1825 tsukino_usagi +1826 zuihou_(kantai_collection) +1827 jougasaki_mika +1828 medicine_melancholy +1829 red_(pokemon) +1830 maya_(kantai_collection) +1831 furude_rika +1832 haruka_(pokemon) +1833 midorikawa_nao +1834 lily_white +1835 anastasia_(idolmaster) +1836 mei_(pokemon) +1837 oshino_shinobu +1838 non-human_admiral_(kantai_collection) +1839 kishin_sagume +1840 hagiwara_yukiho +1841 pyonta +1842 kise_yayoi +1843 miura_azusa +1844 tsushima_yoshiko +1845 princess_peach +1846 beatrice +1847 stocking_(psg) +1848 miqo'te +1849 ikari_shinji +1850 kuma_(kantai_collection) +1851 charlotte_(madoka_magica) +1852 kirito +1853 takebe_saori +1854 kousaka_kirino +1855 lelouch_lamperouge +1856 jonathan_joestar +1857 haruno_sakura +1858 hecatia_lapislazuli +1859 mordred_(fate) +1860 akizuki_ritsuko +1861 cammy_white +1862 futaba_anzu +1863 satonaka_chie +1864 mankanshoku_mako +1865 izumi_sagiri +1866 taihou_(kantai_collection) +1867 unzan +1868 z1_leberecht_maass_(kantai_collection) +1869 emiya_kiritsugu +1870 fusou_(kantai_collection) +1871 z3_max_schultz_(kantai_collection) +1872 bb_(fate)_(all) +1873 i-401_(kantai_collection) +1874 atago_(azur_lane) +1875 waver_velvet +1876 simon +1877 asakura_ryouko +1878 lancer +1879 lancer_(fate/zero) +1880 ichinose_shiki +1881 jougasaki_rika +1882 ryuuguu_rena +1883 murasame_(kantai_collection) +1884 ahri +1885 uzumaki_naruto +1886 shirai_kuroko +1887 mizuki_(pokemon) +1888 aoki_reika +1889 maekawa_miku +1890 producer_(idolmaster) +1891 erica_hartmann +1892 hyuuga_hinata +1893 mikasa_ackerman +1894 holo +1895 hyuuga_(kantai_collection) +1896 miyafuji_yoshika +1897 accelerator +1898 kuujou_jolyne +1899 kallen_stadtfeld +1900 hishikawa_rikka +1901 aisaka_taiga +1902 hk416_(girls_frontline) +1903 giorno_giovanna +1904 kuroki_tomoko +1905 kumano_(kantai_collection) +1906 aoba_(kantai_collection) +1907 unicorn_(azur_lane) +1908 ushiromiya_battler +1909 jintsuu_(kantai_collection) +1910 elizabeth_bathory_(fate)_(all) +1911 sakurauchi_riko +1912 ruby_rose +1913 futami_mami +1914 hino_akane_(smile_precure!) +1915 katyusha_(girls_und_panzer) +1916 motoori_kosuzu +1917 alice_(wonderland) +1918 takarada_rikka +1919 tokitsukaze_(kantai_collection) +1920 tenshi_(angel_beats!) +1921 meltryllis +1922 kasumi_(pokemon) +1923 luna_child +1924 jack_the_ripper_(fate/apocrypha) +1925 yamakaze_(kantai_collection) +1926 gertrud_barkhorn +1927 unryuu_(kantai_collection) +1928 ashigara_(kantai_collection) +1929 makise_kurisu +1930 kiyoshimo_(kantai_collection) +1931 warspite_(kantai_collection) +1932 shimada_arisu +1933 louise_francoise_le_blanc_de_la_valliere +1934 isokaze_(kantai_collection) +1935 belfast_(azur_lane) +1936 higashikata_jousuke +1937 kousaka_tamaki +1938 corrin_(fire_emblem) +1939 akitsu_maru_(kantai_collection) +1940 reizei_mako +1941 kashiwazaki_sena +1942 kyonko +1943 mario +1944 urakaze_(kantai_collection) +1945 re-class_battleship +1946 shinku +1947 doremy_sweet +1948 taigei_(kantai_collection) +1949 satsuki_(kantai_collection) +1950 naganami_(kantai_collection) +1951 sunny_milk +1952 ishtar_(fate)_(all) +1953 star_sapphire +1954 mogami_(kantai_collection) +1955 houjou_hibiki +1956 nitocris_(fate/grand_order) +1957 kamijou_touma +1958 uzuki_(kantai_collection) +1959 kay_(girls_und_panzer) +1960 lyrica_prismriver +1961 koshimizu_sachiko +1962 lucina_(fire_emblem) +1963 kagerou_(kantai_collection) +1964 aida_mana +1965 toshinou_kyouko +1966 oboro_(kantai_collection) +1967 satoshi_(pokemon) +1968 merlin_prismriver +1969 futami_ami +1970 ranka_lee +1971 miyamoto_musashi_(fate/grand_order) +1972 shana +1973 u-511_(kantai_collection) +1974 common_raccoon_(kemono_friends) +1975 usami_sumireko +1976 florence_nightingale_(fate/grand_order) +1977 ikamusume +1978 fennec_(kemono_friends) +1979 rider_(fate/zero) +1980 akigumo_(kantai_collection) +1981 robin_(fire_emblem) +1982 weiss_schnee +1983 kurumi_erika +1984 yuuki_makoto +1985 kiyohime_(fate/grand_order) +1986 kirby +1987 hoshizora_miyuki +1988 battleship_hime +1989 amagi_yukiko +1990 suiseiseki +1991 prinz_eugen_(azur_lane) +1992 aori_(splatoon) +1993 raising_heart +1994 nagisa_kaworu +1995 harusame_(kantai_collection) +1996 mutsuki_(kantai_collection) +1997 takami_chika +1998 dizzy +1999 nami_(one_piece) +2000 takanashi_rikka +2001 abukuma_(kantai_collection) +2002 boko_(girls_und_panzer) +2003 vita +2004 charlotte_e_yeager +2005 i-8_(kantai_collection) +2006 hotaru_(splatoon) +2007 hatsuyuki_(kantai_collection) +2008 tone_(kantai_collection) +2009 koizumi_itsuki +2010 hoto_cocoa +2011 kujikawa_rise +2012 makinami_mari_illustrious +2013 irisviel_von_einzbern +2014 akaza_akari +2015 jun'you_(kantai_collection) +2016 perrine_h_clostermann +2017 michishio_(kantai_collection) +2018 blue_(pokemon) +2019 shiranui_mai +2020 son_gokuu +2021 noumi_kudryavka +2022 hayami_kanade +2023 haguro_(kantai_collection) +2024 akuma_homura +2025 houjou_satoko +2026 tachibana_arisu +2027 shiratsuyu_(kantai_collection) +2028 chloe_von_einzbern +2029 ise_(kantai_collection) +2030 kaenbyou_rin_(cat) +2031 hanekawa_tsubasa +2032 vivio +2033 jakuzure_nonon +2034 junketsu +2035 lynette_bishop +2036 isuzu_hana +2037 christa_renz +2038 katsuki_yuuri +2039 sailor_moon +2040 wa2000_(girls_frontline) +2041 shinki +2042 hiro_(darling_in_the_franxx) +2043 i-168_(kantai_collection) +2044 narmaya_(granblue_fantasy) +2045 ia_(vocaloid) +2046 corrin_(fire_emblem)_(female) +2047 ryougi_shiki +2048 takara_miyuki +2049 kisaragi_(kantai_collection) +2050 viktor_nikiforov +2051 tippy_(gochiusa) +2052 saratoga_(kantai_collection) +2053 yuzuriha_inori +2054 kirino_ranmaru +2055 oda_nobunaga_(fate)_(all) +2056 honda_mio +2057 sakamoto_mio +2058 uchiha_sasuke +2059 yuki_miku +2060 akizuki_(kantai_collection) +2061 senjougahara_hitagi +2062 panty_(psg) +2063 yang_xiao_long +2064 ereshkigal_(fate/grand_order) +2065 chitanda_eru +2066 kenzaki_makoto +2067 nero_claudius_(swimsuit_caster)_(fate) +2068 last_order +2069 kanna_kamui +2070 bardiche +2071 sonozaki_mion +2072 index +2073 kurosawa_dia +2074 ishtar_(fate/grand_order) +2075 mika_(girls_und_panzer) +2076 kirima_sharo +2077 kunikida_hanamaru +2078 elizabeth_bathory_(fate) +2079 yowane_haku +2080 hatsuzuki_(kantai_collection) +2081 hanamura_yousuke +2082 signum +2083 johnny_joestar +2084 matsuno_karamatsu +2085 arcueid_brunestud +2086 saten_ruiko +2087 tamamo_no_mae_(swimsuit_lancer)_(fate) +2088 matou_kariya +2089 matsuno_osomatsu +2090 noel_vermillion +2091 hanasaki_tsubomi +2092 matsuno_juushimatsu +2093 ram_(re:zero) +2094 homura_(xenoblade_2) +2095 minamino_kanade +2096 yahagi_(kantai_collection) +2097 matsuno_ichimatsu +2098 takao_(azur_lane) +2099 maru-yu_(kantai_collection) +2100 souseiseki +2101 shoebill_(kemono_friends) +2102 nachi_(kantai_collection) +2103 ump45_(girls_frontline) +2104 otonashi_kotori +2105 emilia_(re:zero) +2106 aegis_(persona) +2107 sendai_hakurei_no_miko +2108 oda_nobunaga_(fate) +2109 gangut_(kantai_collection) +2110 yura_(kantai_collection) +2111 mima +2112 katsushika_hokusai_(fate/grand_order) +2113 female_protagonist_(persona_3) +2114 kurosawa_ruby +2115 blake_belladonna +2116 tama_(kantai_collection) +2117 kagari_atsuko +2118 producer_(idolmaster_cinderella_girls_anime) +2119 saber_lily +2120 eren_yeager +2121 nero_claudius_(bride)_(fate) +2122 makoto_nanaya +2123 matsuura_kanan +2124 neptune_(neptune_series) +2125 jeanne_d'arc_alter_santa_lily +2126 formidable_(azur_lane) +2127 aqua_(konosuba) +2128 ushiromiya_ange +2129 kamikaze_(kantai_collection) +2130 tracer_(overwatch) +2131 choukai_(kantai_collection) +2132 hirasawa_ui +2133 rensouhou-kun +2134 katsuragi_(kantai_collection) +2135 gamagoori_ira +2136 nagatsuki_(kantai_collection) +2137 miyuki_(kantai_collection) +2138 hatsune_miku_(append) +2139 serena_(pokemon) +2140 matsuno_choromatsu +2141 gold_(pokemon) +2142 cure_peace +2143 nonna_(girls_und_panzer) +2144 ayanami_(azur_lane) +2145 t-head_admiral +2146 tamamo_cat_(fate) +2147 tomoe_gozen_(fate/grand_order) +2148 mei_(overwatch) +2149 yorigami_shion +2150 funami_yui +2151 ujimatsu_chiya +2152 hanyuu +2153 mikazuki_munechika +2154 mimura_kanako +2155 mary_(pokemon) +2156 artoria_pendragon_(lancer) +2157 moroboshi_kirari +2158 kasumi_(doa) +2159 yukine_chris +2160 fou_(fate/grand_order) +2161 akagi_miria +2162 princess_king_boo +2163 suzukaze_(kantai_collection) +2164 miyamoto_frederica +2165 kamui_gakupo +2166 fujibayashi_kyou +2167 uraraka_ochako +2168 yuri_lowell +2169 francesca_lucchini +2170 ymir_(shingeki_no_kyojin) +2171 matsuno_todomatsu +2172 kashuu_kiyomitsu +2173 jeanne_d'arc_(alter_swimsuit_berserker) +2174 lucky_beast_(kemono_friends) +2175 ragna_the_bloodedge +2176 yuugumo_(kantai_collection) +2177 hikari_(xenoblade_2) +2178 shinjou_akane +2179 charlotte_dunois +2180 orange_pekoe_(girls_und_panzer) +2181 makigumo_(kantai_collection) +2182 bb_(fate/extra_ccc) +2183 furukawa_nagisa +2184 gambier_bay_(kantai_collection) +2185 yayoi_(kantai_collection) +2186 tatsumi_kanji +2187 shirayuki_(kantai_collection) +2188 han_juri +2189 tedeza_rize +2190 natsume_rin +2191 i-class_destroyer +2192 pepperoni_(girls_und_panzer) +2193 japanese_crested_ibis_(kemono_friends) +2194 levi_(shingeki_no_kyojin) +2195 kamina +2196 sakura_miku +2197 yamato-no-kami_yasusada +2198 misumi_nagisa +2199 widowmaker_(overwatch) +2200 kamio_misuzu +2201 watatsuki_no_yorihime +2202 kizuna_ai +2203 ookido_green +2204 miyu_edelfelt +2205 kotone_(pokemon) +2206 yuno +2207 northern_white-faced_owl_(kemono_friends) +2208 pola_(kantai_collection) +2209 isuzu_(kantai_collection) +2210 tsukikage_yuri +2211 suiren_(pokemon) +2212 enemy_aircraft_(kantai_collection) +2213 yumemi_riamu +2214 shikinami_(kantai_collection) +2215 mizuno_ami +2216 lilith_aensland +2217 enterprise_(azur_lane) +2218 horikawa_raiko +2219 ibaraki_douji_(fate/grand_order) +2220 hassan_of_serenity_(fate) +2221 uiharu_kazari +2222 mao_(pokemon) +2223 dark_magician_girl +2224 agano_(kantai_collection) +2225 kuroshio_(kantai_collection) +2226 shirona_(pokemon) +2227 caster_(fate/zero) +2228 touya_(pokemon) +2229 byleth_(fire_emblem) +2230 failure_penguin +2231 phosphophyllite +2232 sinon +2233 higashi_setsuna +2234 konjiki_no_yami +2235 sakata_gintoki +2236 berserker_(fate/zero) +2237 myoudouin_itsuki +2238 kagura_(gintama) +2239 nico_robin +2240 elsa_(frozen) +2241 elin_(tera) +2242 felicia +2243 etna +2244 gran_(granblue_fantasy) +2245 laffey_(azur_lane) +2246 ump9_(girls_frontline) +2247 madotsuki +2248 yoshikawa_chinatsu +2249 teruzuki_(kantai_collection) +2250 aino_minako +2251 nanami_chiaki +2252 akizuki_ryou +2253 tsurumaru_kuninaga +2254 ivan_karelin +2255 edward_elric +2256 leafa +2257 kawakaze_(kantai_collection) +2258 asashimo_(kantai_collection) +2259 honma_meiko +2260 saotome_ranma +2261 kuma_(persona_4) +2262 chibi_usa +2263 alice_margatroid_(pc-98) +2264 haramura_nodoka +2265 kamiya_nao +2266 nagi +2267 taihou_(azur_lane) +2268 lyn_(fire_emblem) +2269 kaguya_luna +2270 cloud_strife +2271 sonozaki_shion +2272 arashio_(kantai_collection) +2273 akagi_(azur_lane) +2274 tina_branford +2275 toosaka_tokiomi +2276 namazuo_toushirou +2277 kishibe_rohan +2278 octoling +2279 eevee +2280 sengoku_nadeko +2281 tachibana_hibiki_(symphogear) +2282 honebami_toushirou +2283 kamoi_(kantai_collection) +2284 houjou_karen +2285 myoukou_(kantai_collection) +2286 cure_marine +2287 gardevoir +2288 ohara_mari +2289 mithra +2290 p-head_producer +2291 ayanami_(kantai_collection) +2292 amamiya_ren +2293 illustrious_(azur_lane) +2294 noire +2295 noshiro_(kantai_collection) +2296 seiran_(touhou) +2297 frederica_bernkastel +2298 nekomusume +2299 libeccio_(kantai_collection) +2300 shiomi_shuuko +2301 altera_(fate) +2302 hayashimo_(kantai_collection) +2303 artoria_pendragon_(swimsuit_rider_alter) +2304 yukishiro_honoka +2305 shokuhou_misaki +2306 tokisaki_kurumi +2307 little_boy_admiral_(kantai_collection) +2308 minna-dietlinde_wilcke +2309 kasugano_sakura +2310 n_(pokemon) +2311 midare_toushirou +2312 asui_tsuyu +2313 camilla_(fire_emblem) +2314 huang_baoling +2315 silica +2316 tatsumaki +2317 hourai_doll +2318 tsukino_mito +2319 midna +2320 hoshino_fumina +2321 kururugi_suzaku +2322 manjuu_(azur_lane) +2323 isonami_(kantai_collection) +2324 tsukumo_benben +2325 m1903_springfield_(girls_frontline) +2326 su-san +2327 kaga_(azur_lane) +2328 tokiko_(touhou) +2329 sento_isuzu +2330 ringo_(touhou) +2331 nakano_miku +2332 maebara_keiichi +2333 alisa_ilinichina_amiella +2334 jean_pierre_polnareff +2335 helena_blavatsky_(fate/grand_order) +2336 purple_heart +2337 robin_(fire_emblem)_(female) +2338 ike_(fire_emblem) +2339 joseph_joestar +2340 nu-13 +2341 furutaka_(kantai_collection) +2342 tsuruya +2343 katori_(kantai_collection) +2344 yui_(angel_beats!) +2345 edelgard_von_hresvelg +2346 reiuji_utsuho_(bird) +2347 roll +2348 kirijou_mitsuru +2349 slaine_troyard +2350 yuuri_(pokemon) +2351 hoshi_shouko +2352 hagikaze_(kantai_collection) +2353 ootsuki_yui +2354 ezo_red_fox_(kemono_friends) +2355 maikaze_(kantai_collection) +2356 toono_akiha +2357 oomuro_sakurako +2358 totoki_airi +2359 izumi-no-kami_kanesada +2360 fukuzawa_yumi +2361 uryuu_ryuunosuke +2362 araragi_koyomi +2363 kirigaya_suguha +2364 cagliostro_(granblue_fantasy) +2365 ru-class_battleship +2366 rosehip_(girls_und_panzer) +2367 saniwa_(touken_ranbu) +2368 sakuma_mayu +2369 bayonetta_(character) +2370 okita_souji_(alter)_(fate) +2371 bel_(pokemon) +2372 artoria_pendragon_(lancer_alter) +2373 bb_(swimsuit_mooncancer)_(fate) +2374 shouhou_(kantai_collection) +2375 shidare_hotaru +2376 kinugasa_(kantai_collection) +2377 kirigiri_kyouko +2378 cure_beauty +2379 cure_sunshine +2380 amanogawa_kirara +2381 blanc +2382 yin +2383 riesz +2384 gyro_zeppeli +2385 crystal_(pokemon) +2386 sirius_(azur_lane) +2387 morikubo_nono +2388 osakabe-hime_(fate/grand_order) +2389 rachel_alucard +2390 eurasian_eagle_owl_(kemono_friends) +2391 grey_wolf_(kemono_friends) +2392 nepgear +2393 caster +2394 sanageyama_uzu +2395 ta-class_battleship +2396 hino_rei +2397 little_red_riding_hood_(grimm) +2398 android_18 +2399 subaru_nakajima +2400 lum +2401 hachikuji_mayoi +2402 popuko +2403 komaki_manaka +2404 aerith_gainsborough +2405 umikaze_(kantai_collection) +2406 momozono_love +2407 kamikita_komari +2408 mysterious_heroine_x_(alter) +2409 enoshima_junko +2410 robin_(fire_emblem)_(male) +2411 rydia +2412 cure_blossom +2413 chikuma_(kantai_collection) +2414 diana_cavendish +2415 chitose_(kantai_collection) +2416 mamiya_(kantai_collection) +2417 silver_fox_(kemono_friends) +2418 oikawa_shizuku +2419 yorha_no._9_type_s +2420 arashi_(kantai_collection) +2421 estellise_sidos_heurassein +2422 jeanne_d'arc_(swimsuit_archer) +2423 kadotani_anzu +2424 ooshio_(kantai_collection) +2425 furutani_himawari +2426 sona_buvelle +2427 yuri_(angel_beats!) +2428 kooh +2429 emperor_penguin_(kemono_friends) +2430 kino_makoto +2431 fuuro_(pokemon) +2432 pharah_(overwatch) +2433 atalanta_(fate) +2434 horikawa_kunihiro +2435 nanasaki_ai +2436 armin_arlert +2437 cure_march +2438 hina_ichigo +2439 karina_lyle +2440 andou_(girls_und_panzer) +2441 kitashirakawa_tamako +2442 ark_royal_(kantai_collection) +2443 yuuki_mikan +2444 white_mage +2445 saotome_alto +2446 manabe_nodoka +2447 konno_junko +2448 toudou_shimako +2449 kirin_(armor) +2450 sailor_mercury +2451 cure_happy +2452 sakurai_momoka +2453 frankenstein's_monster_(fate) +2454 abe_nana +2455 boo +2456 javelin_(azur_lane) +2457 kusanagi_motoko +2458 bruno_buccellati +2459 reisalin_stout +2460 yorigami_jo'on +2461 eve_(elsword) +2462 rosalina +2463 medjed +2464 katsura_hinagiku +2465 marie_antoinette_(fate/grand_order) +2466 kasane_teto +2467 i-26_(kantai_collection) +2468 mikuma_(kantai_collection) +2469 shirayuki_hime +2470 keith_goodman +2471 yuri_plisetsky +2472 shindou_takuto +2473 lusamine_(pokemon) +2474 tooru_(maidragon) +2475 tateyama_ayano +2476 kero +2477 silver_(pokemon) +2478 airfield_hime +2479 byleth_(fire_emblem)_(female) +2480 miyamizu_mitsuha +2481 littorio_(kantai_collection) +2482 cure_melody +2483 anjou_naruko +2484 takeba_yukari +2485 sorceress_(dragon's_crown) +2486 hatsushimo_(kantai_collection) +2487 jaguar_(kemono_friends) +2488 kayneth_el-melloi_archibald +2489 vegeta +2490 oreki_houtarou +2491 fumizuki_(kantai_collection) +2492 tomoe_hotaru +2493 ninomiya_asuka +2494 daidouji_tomoyo +2495 oumae_kumiko +2496 kozakura_marry +2497 aragaki_ayase +2498 ibuki_fuuko +2499 iida_(splatoon) +2500 alpaca_suri_(kemono_friends) +2501 monkey_d_luffy +2502 yotsuba_alice +2503 ryuu_(street_fighter) +2504 hatsukaze_(kantai_collection) +2505 darkness_(konosuba) +2506 tamura_yuri +2507 watatsuki_no_toyohime +2508 japan_(hetalia) +2509 tiki_(fire_emblem) +2510 kako_(kantai_collection) +2511 taneshima_popura +2512 kohaku +2513 ushiwakamaru_(fate/grand_order) +2514 kousaka_kyousuke +2515 mizunashi_akari +2516 kamitsure_(pokemon) +2517 king_hassan_(fate/grand_order) +2518 miss_cloud +2519 hayasui_(kantai_collection) +2520 winry_rockbell +2521 ranma-chan +2522 assassin_(fate/zero) +2523 kos-mos +2524 bulma +2525 tada_riina +2526 rurina_(pokemon) +2527 akatsuki_kirika +2528 rowlet +2529 hoshimiya_ichigo +2530 seiren_(suite_precure) +2531 hotarumaru +2532 nishizumi_shiho +2533 kawashima_momo +2534 tokitarou_(fate/grand_order) +2535 elvaan +2536 ib_(ib) +2537 nyarlathotep_(nyaruko-san) +2538 katou_megumi +2539 marth_(fire_emblem) +2540 you_(pokemon) +2541 oktavia_von_seckendorff +2542 oshida_(girls_und_panzer) +2543 sailor_venus +2544 okabe_rintarou +2545 prisma_illya +2546 kurokawa_eren +2547 ciel +2548 ushiromiya_jessica +2549 midoriya_izuku +2550 g11_(girls_frontline) +2551 ichigo_(darling_in_the_franxx) +2552 doujima_nanako +2553 palutena +2554 nibutani_shinka +2555 charizard +2556 shirase_sakuya +2557 mochizuki_(kantai_collection) +2558 maou_(maoyuu) +2559 asagumo_(kantai_collection) +2560 haro +2561 america_(hetalia) +2562 laura_bodewig +2563 miyanaga_saki +2564 momo_velia_deviluke +2565 cure_sunny +2566 passion_lip +2567 pipimi +2568 honolulu_(azur_lane) +2569 ramlethal_valentine +2570 tsukumo_yatsuhashi +2571 amagi_(kantai_collection) +2572 au_ra +2573 commander_(azur_lane) +2574 xp-tan +2575 yuzuhara_konomi +2576 luigi +2577 yuffie_kisaragi +2578 united_kingdom_(hetalia) +2579 shimazu_yoshino +2580 lala_satalin_deviluke +2581 ushiromiya_maria +2582 tieria_erde +2583 yagen_toushirou +2584 sakura_chiyo +2585 araragi_karen +2586 kamado_nezuko +2587 inumuta_houka +2588 nowaki_(kantai_collection) +2589 reinforce_zwei +2590 danua +2591 cecilia_alcott +2592 tashkent_(kantai_collection) +2593 jin_kisaragi +2594 sonya_(kill_me_baby) +2595 furudo_erika +2596 aisha_landar +2597 anastasia_(fate/grand_order) +2598 consort_yu_(fate) +2599 assam_(girls_und_panzer) +2600 kisaragi_shintarou +2601 aino_megumi +2602 scathach_(swimsuit_assassin)_(fate) +2603 sailor_mars +2604 kishinami_hakuno_(female) +2605 kuchiki_rukia +2606 yamashiro_(azur_lane) +2607 sieg_(fate/apocrypha) +2608 suzukaze_aoba +2609 meltryllis_(swimsuit_lancer)_(fate) +2610 scathach_skadi_(fate/grand_order) +2611 len +2612 misaka_imouto +2613 mysterious_heroine_xx_(foreigner) +2614 cure_black +2615 carpaccio_(girls_und_panzer) +2616 anna_(frozen) +2617 oshawott +2618 saint_martha +2619 alastor_(shakugan_no_shana) +2620 frisk_(undertale) +2621 mumei_(kabaneri) +2622 roma_(kantai_collection) +2623 anegasaki_nene +2624 nishi_kinuyo +2625 natori_sana +2626 etorofu_(kantai_collection) +2627 suzutsuki_(kantai_collection) +2628 minamoto_sakura +2629 piplup +2630 azura_(fire_emblem) +2631 zara_(kantai_collection) +2632 nenohi_(kantai_collection) +2633 rias_gremory +2634 sakura_futaba +2635 takei_hisa +2636 hex_maniac_(pokemon) +2637 higuchi_kaede +2638 momoe_nagisa +2639 miyako +2640 shirabe_ako +2641 teana_lanster +2642 akitsushima_(kantai_collection) +2643 yuuki_(pokemon) +2644 kino +2645 snivy +2646 tharja +2647 yamagishi_fuuka +2648 ayane_(doa) +2649 lambdadelta +2650 mizuno_ai +2651 komaeda_nagito +2652 takakura_himari +2653 sanzen'in_nagi +2654 clarisse_(granblue_fantasy) +2655 aquila_(kantai_collection) +2656 kousaka_reina +2657 yaoyorozu_momo +2658 cure_moonlight +2659 takoluka +2660 star_platinum +2661 yumehara_nozomi +2662 stakes_of_purgatory +2663 kinu_(kantai_collection) +2664 yamada_elf +2665 enkidu_(fate/strange_fake) +2666 okazaki_yumemi +2667 sugiura_ayano +2668 zidane_tribal +2669 guido_mista +2670 yuigahama_yui +2671 lulu_(league_of_legends) +2672 takamaki_anne +2673 monokuma +2674 hinata_(angel_beats!) +2675 sakagami_tomoyo +2676 ichigo_hitofuri +2677 ri-class_heavy_cruiser +2678 xuanzang_(fate/grand_order) +2679 kujou_karen +2680 saitama_(one-punch_man) +2681 lucario +2682 jinx_(league_of_legends) +2683 chibi_miku +2684 sucy_manbavaran +2685 sailor_jupiter +2686 nursery_rhyme_(fate/extra) +2687 female_saniwa_(touken_ranbu) +2688 kazanari_tsubasa +2689 yukinoshita_yukino +2690 hatsuharu_(kantai_collection) +2691 kars_(jojo) +2692 angela_balzac +2693 ogata_chieri +2694 female_protagonist_(pokemon_go) +2695 hime_(splatoon) +2696 chi-chi_(dragon_ball) +2697 eureka +2698 oomori_yuuko +2699 medb_(fate)_(all) +2700 nemoto_hina +2701 shantae_(character) +2702 roy_(fire_emblem) +2703 caren_hortensia +2704 garry_(ib) +2705 milla_maxwell +2706 hiyou_(kantai_collection) +2707 rita_mordio +2708 oribe_yasuna +2709 naoe_riki +2710 natsume_(pokemon) +2711 kasumigaoka_utaha +2712 kido_tsubomi +2713 ene_(kagerou_project) +2714 bianca +2715 hammann_(azur_lane) +2716 okazaki_tomoya +2717 prinny +2718 pod_(nier_automata) +2719 ten'ou_haruka +2720 hisui +2721 inami_mahiru +2722 matoi_(pso2) +2723 gokotai +2724 yamabuki_inori +2725 gladio_(pokemon) +2726 ferry_(granblue_fantasy) +2727 lio_fotia +2728 konpaku_youki +2729 kariya_masaki +2730 shima_rin +2731 mohammed_avdol +2732 qbz-95_(girls_frontline) +2733 jervis_(kantai_collection) +2734 regina_(dokidoki!_precure) +2735 slime_(dragon_quest) +2736 me-tan +2737 agrias_oaks +2738 vira_lilie +2739 arthur_pendragon_(fate) +2740 toono_shiki +2741 shokudaikiri_mitsutada +2742 alphonse_elric +2743 godzilla +2744 hino_akane_(idolmaster) +2745 harime_nui +2746 dead_master +2747 kanbaru_suruga +2748 2k-tan +2749 lei_lei +2750 rockman_(character) +2751 tanikaze_(kantai_collection) +2752 cure_peach +2753 araragi_tsukihi +2754 leonardo_da_vinci_(fate/grand_order) +2755 kogitsunemaru +2756 commandant_teste_(kantai_collection) +2757 kiriya_aoi +2758 arare_(kantai_collection) +2759 annie_leonhardt +2760 yamada_aoi +2761 maka_albarn +2762 kama_(fate/grand_order) +2763 diego_brando +2764 cure_rhythm +2765 isolated_island_oni +2766 kazama_asuka +2767 enemy_lifebuoy_(kantai_collection) +2768 error_musume +2769 nekomusume_(gegege_no_kitarou_6) +2770 lightning_farron +2771 nitocris_(swimsuit_assassin)_(fate) +2772 manya +2773 cure_white +2774 taokaka +2775 puru-see +2776 igarashi_kyou_(eroe) +2777 ichinose_kotomi +2778 kusugawa_sasara +2779 feldt_grace +2780 cure_heart +2781 setsuna_f_seiei +2782 mitake_ran +2783 touwa_erio +2784 sf-a2_miki +2785 faris_scherwiz +2786 toon_link +2787 tepig +2788 aozaki_aoko +2789 platinum_the_trinity +2790 shihouin_yoruichi +2791 kohinata_miho +2792 musashi_(pokemon) +2793 rena_erindel +2794 reinforce +2795 katsuragi_misato +2796 platelet_(hataraku_saibou) +2797 belldandy +2798 cheria_barnes +2799 lisbeth +2800 aragaki_shinjirou +2801 kira_yoshikage +2802 iori_rinko +2803 kaiou_michiru +2804 nishida_satono +2805 sakawa_(kantai_collection) +2806 lion_(kemono_friends) +2807 yuna_(ff10) +2808 natsuki_subaru +2809 suomi_kp31_(girls_frontline) +2810 moose_(kemono_friends) +2811 chiyoda_(kantai_collection) +2812 takumi_(fire_emblem) +2813 lenna_charlotte_tycoon +2814 kitazawa_shiho +2815 kanaria +2816 fubuki_(one-punch_man) +2817 asbel_lhant +2818 tsukimiya_ayu +2819 yellow_(pokemon) +2820 takanami_(kantai_collection) +2821 fukuji_mihoko +2822 kawashima_ami +2823 yamanaka_ino +2824 igarashi_futaba_(shiromanta) +2825 garnet_til_alexandros_xvii +2826 himejima_akeno +2827 bulbasaur +2828 ara_haan +2829 makoto_(street_fighter) +2830 vert +2831 nikka_edvardine_katajainen +2832 yamagumo_(kantai_collection) +2833 nadia +2834 moogle +2835 euryale +2836 sawamura_spencer_eriri +2837 komano_aun +2838 medb_(fate/grand_order) +2839 otonashi_(angel_beats!) +2840 inoue_orihime +2841 sesshouin_kiara +2842 afuro_terumi +2843 meer_campbell +2844 lalafell +2845 cleveland_(azur_lane) +2846 haruno_haruka +2847 viera +2848 richelieu_(kantai_collection) +2849 marie_(girls_und_panzer) +2850 oyashio_(kantai_collection) +2851 boudica_(fate/grand_order) +2852 jude_mathis +2853 boa_hancock +2854 nagara_(kantai_collection) +2855 sophie_(tales) +2856 mikazuki_(kantai_collection) +2857 hasegawa_kobato +2858 dekomori_sanae +2859 mary_(ib) +2860 shiina_mayuri +2861 sasaki_chie +2862 kuga_natsuki +2863 klan_klein +2864 mysterious_heroine_x +2865 kawakami_mai +2866 nakano_nino +2867 revy_(black_lagoon) +2868 teireida_mai +2869 bakugou_katsuki +2870 miia_(monster_musume) +2871 boota +2872 hatoba_tsugu_(character) +2873 orihara_izaya +2874 bowser +2875 albedo +2876 st_ar-15_(girls_frontline) +2877 aki_(girls_und_panzer) +2878 asahina_mirai +2879 napoleon_bonaparte_(fate/grand_order) +2880 squirtle +2881 tsunade +2882 fujisaki_chihiro +2883 usada_hikaru +2884 gengar +2885 ichihara_nina +2886 cheren_(pokemon) +2887 akita_neru +2888 kano_shuuya +2889 jill_valentine +2890 matara_okina +2891 sora_(kingdom_hearts) +2892 cure_sword +2893 hikawa_hina +2894 matsumoto_rangiku +2895 shinonome_nano +2896 suzuki_jun +2897 enma_ai +2898 minamoto_no_raikou_(swimsuit_lancer)_(fate) +2899 zeta_(granblue_fantasy) +2900 hirose_kouichi +2901 sonohara_anri +2902 kobayakawa_sae +2903 matsukaze_tenma +2904 hei +2905 rainbow_mika +2906 hoshimiya_kate +2907 nunnally_lamperouge +2908 ibuki_(street_fighter) +2909 saitou_(pokemon) +2910 chrom_(fire_emblem) +2911 kaizuka_inaho +2912 filia_(skullgirls) +2913 sailor_saturn +2914 sion_eltnam_atlasia +2915 kemomimi-chan_(naga_u) +2916 kasuga_ayumu +2917 yuuno_scrya +2918 pit_(kid_icarus) +2919 m4_sopmod_ii_(girls_frontline) +2920 princess_of_moonbrook +2921 luxanna_crownguard +2922 sanada_akihiko +2923 white_rabbit +2924 aono_miki +2925 yumeko +2926 morgiana +2927 shamal +2928 euphemia_li_britannia +2929 okita_sougo +2930 sawa_azusa +2931 neo_politan +2932 zero_(rockman) +2933 kneesocks_(psg) +2934 prinz_eugen_(unfading_smile)_(azur_lane) +2935 kobayakawa_rinko +2936 nanao_yuriko +2937 tsukuyomi_shirabe +2938 iris_(pokemon) +2939 caro_ru_lushe +2940 niijima_makoto +2941 lysithea_von_ordelia +2942 kiss-shot_acerola-orion_heart-under-blade +2943 tsukioka_kogane +2944 tsuchinoko_(kemono_friends) +2945 selvaria_bles +2946 tsurumaki_maki +2947 mizutani_eri +2948 graf_eisen +2949 midway_hime +2950 higashikata_jousuke_(jojolion) +2951 small-clawed_otter_(kemono_friends) +2952 elizabeth_bathory_(brave)_(fate) +2953 yuuki_(sao) +2954 natori_(kantai_collection) +2955 takasu_ryuuji +2956 celes_chere +2957 hoshino_ruri +2958 destroyer_hime +2959 super_pochaco +2960 lucy_heartfilia +2961 narancia_ghirga +2962 harukaze_(kantai_collection) +2963 alisa_(girls_und_panzer) +2964 jigglypuff +2965 mikazuki_yozora +2966 sunazuka_akira +2967 cosmog +2968 iroha_(samurai_spirits) +2969 alena_(dq4) +2970 minase_nayuki +2971 fujimura_taiga +2972 saigusa_haruka +2973 kyaru_(princess_connect) +2974 m16a1_(girls_frontline) +2975 minazuki_karen +2976 kotegawa_yui +2977 hinatsuru_ai +2978 kazagumo_(kantai_collection) +2979 reisen +2980 kazami_yuuka_(pc-98) +2981 lyria_(granblue_fantasy) +2982 inuyama_aoi +2983 mimikyu +2984 gasai_yuno +2985 kairi_(kingdom_hearts) +2986 bazett_fraga_mcremitz +2987 m4a1_(girls_frontline) +2988 chen_(cat) +2989 kazemaru_ichirouta +2990 yoshida_yuuko_(machikado_mazoku) +2991 gotland_(kantai_collection) +2992 dante_(devil_may_cry) +2993 hiradaira_chisaki +2994 lavinia_whateley_(fate/grand_order) +2995 sage_(dq3) +2996 brynhildr_(fate) +2997 kagamihara_nadeshiko +2998 raven_(tales) +2999 black_hanekawa +3000 kuroyukihime +3001 violet_evergarden_(character) +3002 koyama_yuzu +3003 magical_ruby +3004 ginga_nakajima +3005 naegi_makoto +3006 bradamante_(fate/grand_order) +3007 kikuzuki_(kantai_collection) +3008 nakoruru +3009 satou_kazuma +3010 amazon_(dragon's_crown) +3011 akali +3012 katou_asuka +3013 soldier_(dq3) +3014 uzuki_sayaka +3015 ogasawara_sachiko +3016 android_21 +3017 lili_(tekken) +3018 kyouhei_(pokemon) +3019 tsubaki_yayoi +3020 kohinata_miku +3021 kula_diamond +3022 amasawa_yuuko +3023 galko +3024 kamio_reiji_(yua) +3025 brown_bear_(kemono_friends) +3026 hakuryuu_(inazuma_eleven) +3027 shannon +3028 judith +3029 kumada_masaru +3030 sayo_samonji +3031 vampy +3032 kondou_taeko +3033 eva_02 +3034 yorha_type_a_no._2 +3035 zooey_(granblue_fantasy) +3036 berserker +3037 millia_rage +3038 beatrix_(granblue_fantasy) +3039 buront +3040 natalia_(idolmaster) +3041 ingrid +3042 takamori_aiko +3043 okumura_haru +3044 sonic +3045 chi-class_torpedo_cruiser +3046 toga_himiko +3047 sairenji_haruna +3048 tenjou_utena +3049 rabbit_yukine +3050 yamanaka_sawako +3051 roronoa_zoro +3052 kisaragi_momo +3053 seeu +3054 mukai_takumi +3055 cure_lovely +3056 milfeulle_sakuraba +3057 kochou_shinobu +3058 paul_bunyan_(fate/grand_order) +3059 nikaidou_saki +3060 bridget_(guilty_gear) +3061 kurumizawa_satanichia_mcdowell +3062 hinata_hajime +3063 nao_(mabinogi) +3064 nijimura_okuyasu +3065 karna_(fate) +3066 tanned_cirno +3067 kanon_(umineko) +3068 akashi_(azur_lane) +3069 anila_(granblue_fantasy) +3070 asseylum_vers_allusia +3071 maruyama_aya +3072 pannacotta_fugo +3073 kan'u_unchou +3074 elphelt_valentine +3075 leo_(fire_emblem) +3076 anya_alstreim +3077 himekawa_yuki +3078 natsume_kyousuke +3079 graf_zeppelin_(azur_lane) +3080 ak-12_(girls_frontline) +3081 wakaba_(kantai_collection) +3082 princess_daisy +3083 elesis_(elsword) +3084 edmond_dantes_(fate/grand_order) +3085 popplio +3086 sheik +3087 mikisugi_aikurou +3088 takimoto_hifumi +3089 yumi_(senran_kagura) +3090 quetzalcoatl_(maidragon) +3091 nekomiya_hinata +3092 tanamachi_kaoru +3093 artoria_pendragon_(swimsuit_ruler)_(fate) +3094 samson_(skullgirls) +3095 eas +3096 kokkoro_(princess_connect!) +3097 hanna-justina_marseille +3098 ellen_baker +3099 jeanne_d'arc_(granblue_fantasy) +3100 baiken +3101 lunamaria_hawke +3102 asakaze_(kantai_collection) +3103 hidaka_ai +3104 maria_cadenzavna_eve +3105 solid_snake +3106 black_heart +3107 monika_(doki_doki_literature_club) +3108 flonne +3109 huang_lingyin +3110 muppo +3111 izayoi_liko +3112 kisaragi_(azur_lane) +3113 kizuna_akari +3114 corrin_(fire_emblem)_(male) +3115 mikko_(girls_und_panzer) +3116 okazaki_ushio +3117 g41_(girls_frontline) +3118 toudou_yurika +3119 doraemon_(character) +3120 kurosaki_ichigo +3121 arle_nadja +3122 korra +3123 shinonono_houki +3124 may_(guilty_gear) +3125 hasumi_souji_(eroe) +3126 kiki +3127 itoshiki_nozomu +3128 ooji_mochizou +3129 eva_beatrice +3130 squall_leonhart +3131 angel_(kof) +3132 i-13_(kantai_collection) +3133 mukaido_manaka +3134 tsurumaki_kokoro +3135 seto_kousuke +3136 cure_princess +3137 koizumi_itsuki_(female) +3138 micaiah_(fire_emblem) +3139 lina_inverse +3140 charmander +3141 roll_caskett +3142 kurugaya_yuiko +3143 suou_pavlichenko +3144 mifune_miyu +3145 kobayakawa_yutaka +3146 riven_(league_of_legends) +3147 asuna_(sao-alo) +3148 sasha_braus +3149 reines_el-melloi_archisorte +3150 jouga_maya +3151 saya +3152 endou_mamoru +3153 semiramis_(fate) +3154 belarus_(hetalia) +3155 sora_ginko +3156 ness +3157 udagawa_tomoe +3158 ange_(princess_principal) +3159 gwendolyn +3160 shirakiin_ririchiyo +3161 murasaki_shikibu_(fate) +3162 abyssal_admiral_(kantai_collection) +3163 jessica_albert +3164 alice_cartelet +3165 tatebayashi_sakurako +3166 bottle_miku +3167 gawain_(fate/extra) +3168 kushieda_minori +3169 yuel_(granblue_fantasy) +3170 shuu_(inazuma_eleven) +3171 ashido_mina +3172 yuudachi_(azur_lane) +3173 lambda-11 +3174 sand_cat_(kemono_friends) +3175 hazama +3176 yumizuka_satsuki +3177 torchic +3178 raichu +3179 eldridge_(azur_lane) +3180 priest_(ragnarok_online) +3181 kagurazaka_asuna +3182 tachibana_taki +3183 kusakabe_misao +3184 tsushima_(kantai_collection) +3185 chocola_(sayori) +3186 morishima_haruka +3187 senkawa_chihiro +3188 virgilia +3189 temari +3190 chikorita +3191 aoba_moca +3192 st._louis_(azur_lane) +3193 heiwajima_shizuo +3194 raphtalia +3195 uehara_himari +3196 rosa_farrell +3197 soldier:_76_(overwatch) +3198 colorado_(kantai_collection) +3199 fuura_kafuka +3200 meta_knight +3201 sin_sack +3202 hikawa_sayo +3203 evangeline_a_k_mcdowell +3204 isshiki_akane +3205 sasaki_chiho +3206 iori_junpei +3207 princess_of_the_crystal +3208 yuri_sakazaki +3209 iggy_(jojo) +3210 todoroki_shouto +3211 shiro_(dennou_shoujo_youtuber_shiro) +3212 sanji +3213 prussia_(hetalia) +3214 naganohara_mio +3215 natsu_megumi +3216 cure_pine +3217 shampoo_(ranma_1/2) +3218 kokonoe_rin +3219 heshikiri_hasebe +3220 elise_(fire_emblem) +3221 ayatsuji_tsukasa +3222 humboldt_penguin_(kemono_friends) +3223 minase_akiko +3224 erwin_(girls_und_panzer) +3225 golden_snub-nosed_monkey_(kemono_friends) +3226 princess_serenity +3227 plasma-chan_(kantai_collection) +3228 nanachi_(made_in_abyss) +3229 katarina_du_couteau +3230 katagiri_sanae +3231 yae_sakura +3232 kongou_(aoki_hagane_no_arpeggio) +3233 hibari_(senran_kagura) +3234 hummy_(suite_precure) +3235 sakurajima_mai +3236 kiana_kaslana +3237 sugimoto_reimi +3238 stheno +3239 royal_penguin_(kemono_friends) +3240 aircraft_carrier_oni +3241 schwertkreuz +3242 mishaguji +3243 high_priest +3244 tokiha_mai +3245 eva_01 +3246 wild_tiger +3247 tendou_akane +3248 isobe_noriko +3249 shizuka_rin +3250 shirley_fenette +3251 sailor_chibi_moon +3252 arima_senne +3253 sakura_(fire_emblem) +3254 izayoi_aki +3255 z23_(azur_lane) +3256 antonio_lopez +3257 dark_sakura +3258 hacka_doll_3 +3259 busujima_saeko +3260 vampire_(azur_lane) +3261 uni_(neptune_series) +3262 takagi-san +3263 lily_black +3264 russia_(hetalia) +3265 amaterasu +3266 tsurugi_kyousuke +3267 akimoto_komachi +3268 vee_(granblue_fantasy) +3269 lord_el-melloi_ii +3270 nathan_seymour +3271 hilda_valentine_goneril +3272 venera-sama +3273 noctis_lucis_caelum +3274 poison_(final_fight) +3275 barasuishou +3276 vanilla_(sayori) +3277 ushiromiya_lion +3278 kuriyama_mirai +3279 nakano_yotsuba +3280 kuradoberi_jam +3281 intrepid_(kantai_collection) +3282 roto +3283 heles +3284 ima-no-tsurugi +3285 blue_rose_(tiger_&_bunny) +3286 trish_una +3287 strength_(black_rock_shooter) +3288 kouki_(pokemon) +3289 hatake_kakashi +3290 wang_liu_mei +3291 alvin_(tales) +3292 rex_(xenoblade_2) +3293 erza_scarlet +3294 carmilla_(fate/grand_order) +3295 chocobo +3296 tomori_nao +3297 mogami_shizuka +3298 kaidou_minami +3299 lisa_lisa +3300 cure_diamond +3301 priest_(dq3) +3302 matsudaira_touko +3303 shizuki_hitomi +3304 sombra_(overwatch) +3305 genji_(overwatch) +3306 sazaki_kaoruko +3307 kirito_(sao-ggo) +3308 mint_blancmanche +3309 lily_(vocaloid) +3310 iwasaki_minami +3311 hayasaka_mirei +3312 ms._fortune_(skullgirls) +3313 fujibayashi_ryou +3314 meowth +3315 cure_beat +3316 silva_(granblue_fantasy) +3317 ushiromiya_rosa +3318 totooria_helmold +3319 rotom +3320 johnston_(kantai_collection) +3321 elly +3322 sailor_uranus +3323 tanya_degurechaff +3324 atago_(midsummer_march)_(azur_lane) +3325 naomi_(girls_und_panzer) +3326 konoha_(kagerou_project) +3327 misaka_worst +3328 matsuwa_(kantai_collection) +3329 adachi_tooru +3330 ushiromiya_natsuhi +3331 nana_asta_deviluke +3332 victorica_de_blois +3333 mordred_(swimsuit_rider)_(fate) +3334 komori_kiri +3335 takao_(aoki_hagane_no_arpeggio) +3336 kuroka_(high_school_dxd) +3337 kaburagi_kaede +3338 ichii_yui +3339 arin +3340 arisa_bannings +3341 kasugano_urara_(yes!_precure_5) +3342 yuuki_yuuna +3343 taihou_(forbidden_feast)_(azur_lane) +3344 mizuho_(kantai_collection) +3345 utsugi_yuuki +3346 espeon +3347 tachibana_makoto +3348 matsumae_ohana +3349 sun_wukong +3350 marie_rose +3351 oyama_mahiro +3352 son_gohan +3353 eirika_(fire_emblem) +3354 kitashirakawa_chiyuri +3355 miyamoto_musashi_(swimsuit_berserker)_(fate) +3356 lockon_stratos +3357 koiwai_yotsuba +3358 rossweisse +3359 niyah +3360 umbreon +3361 tron_bonne +3362 pyrrha_nikos +3363 matoba_risa +3364 hiiragi_yuzu +3365 cure_muse_(yellow) +3366 irako_(kantai_collection) +3367 kirakishou +3368 kiyohime_(swimsuit_lancer)_(fate) +3369 cure_dream +3370 girl_holding_a_cat_(kantai_collection) +3371 madoka_aguri +3372 jibril_(no_game_no_life) +3373 kotonoha_akane +3374 merlin_(fate) +3375 hori_yuuko +3376 ganondorf +3377 female_assassin_(fate/zero) +3378 scheherazade_(fate/grand_order) +3379 kiyama_hiroto +3380 watarase_jun +3381 morpeko +3382 cure_rosetta +3383 ping_hai_(azur_lane) +3384 diamond_(houseki_no_kuni) +3385 robert_eo_speedwagon +3386 xander_(fire_emblem) +3387 i-14_(kantai_collection) +3388 flareon +3389 valentine_(skullgirls) +3390 tsumiki_mikan +3391 litten +3392 ouma_kokichi +3393 hakurei_reimu_(pc-98) +3394 viral +3395 odamaki_sapphire +3396 yui_(sao) +3397 astaroth_(shinrabanshou) +3398 celestia_ludenberck +3399 luke_fon_fabre +3400 soma_peries +3401 ibara_mayaka +3402 minami_kana +3403 tear_grants +3404 caesar_(girls_und_panzer) +3405 candy_(smile_precure!) +3406 itou_chika +3407 chara_(undertale) +3408 luciela_r._sourcream +3409 einhart_stratos +3410 tsunashi_hajime +3411 negev_(girls_frontline) +3412 samuel_b._roberts_(kantai_collection) +3413 butz_klauser +3414 natsuki_rin +3415 sakata_kintoki_(fate/grand_order) +3416 komasan +3417 kanzuki_karin +3418 frankenstein's_monster_(swimsuit_saber)_(fate) +3419 tsunashi_kaoru +3420 elizabeth_(persona) +3421 sakaguchi_karina +3422 ookurikara +3423 cure_passion +3424 alice_carroll +3425 african_wild_dog_(kemono_friends) +3426 okinami_(kantai_collection) +3427 uchi_emiri +3428 yukikaze_panettone +3429 hiro +3430 yoshi +3431 luo_tianyi +3432 sakurazaki_setsuna +3433 nijou_noriko +3434 black-tailed_prairie_dog_(kemono_friends) +3435 aioi_yuuko +3436 isabella_valentine +3437 sans +3438 mai_natsume +3439 scanty_(psg) +3440 todoroki_yachiyo +3441 elize_lutus +3442 asia_argento +3443 prosciutto +3444 allelujah_haptism +3445 sailor_neptune +3446 kasuga_maru_(kantai_collection) +3447 pichu +3448 kojirou_(pokemon) +3449 hau_(pokemon) +3450 leone_abbacchio +3451 cure_twinkle +3452 mewtwo +3453 hyuuga_saki +3454 miyazaki_nodoka +3455 ryuuhou_(kantai_collection) +3456 anemone_(eureka_seven) +3457 hyur +3458 kasugano_sora +3459 fujino_shizuru +3460 cure_berry +3461 admiral_graf_spee_(azur_lane) +3462 rebecca_miyamoto +3463 fujiwara_chika +3464 otokura_yuuki +3465 northern_italy_(hetalia) +3466 jibanyan +3467 gokotai's_tigers +3468 siegfried_(fate) +3469 tanigawa_kanna +3470 american_beaver_(kemono_friends) +3471 narusawa_ryouka +3472 lyn_(blade_&_soul) +3473 amamiya_hibiya +3474 matsuda_chiyohiko +3475 asamiya_athena +3476 andira_(granblue_fantasy) +3477 phosphophyllite_(ll) +3478 elf_(dragon's_crown) +3479 jirou_kyouka +3480 ooiwa_wataru +3481 zange +3482 yamanbagiri_kunihiro +3483 zessica_wong +3484 acerola_(pokemon) +3485 chrome_dokuro +3486 arjuna_(fate/grand_order) +3487 yuuki_haru +3488 fubuki_shirou +3489 yokoyama_nao +3490 kitsu_chiri +3491 takano_miyo +3492 doma_umaru +3493 vaporeon +3494 amane_suzuha +3495 ana_(overwatch) +3496 ibuki_tsubasa +3497 gentoo_penguin_(kemono_friends) +3498 aircraft_carrier_hime +3499 hierophant_green +3500 walpurgisnacht_(madoka_magica) +3501 morgana_(persona_5) +3502 cyndaquil +3503 mochizuki_anna +3504 matsuoka_miu +3505 takozonesu +3506 matsuoka_rin +3507 erika_(pokemon) +3508 futatsuiwa_mamizou_(human) +3509 takane_manaka +3510 caitlyn_(league_of_legends) +3511 eruruw +3512 minazuki_(kantai_collection) +3513 rororina_fryxell +3514 igarashi_kyouko +3515 tenma_gabriel_white +3516 narciso_anasui +3517 kirisaki_chitoge +3518 asuka_(senran_kagura) +3519 minakami_mai +3520 ken_masters +3521 okusawa_misaki +3522 cheshire_cat +3523 3.1-tan +3524 kyon_no_imouto +3525 the_world +3526 lucas +3527 kotonomiya_yuki +3528 satou_shin +3529 wii_fit_trainer +3530 robin_hood_(fate) +3531 nanase_haruka_(free!) +3532 felyne +3533 ikezawa_hanako +3534 mishou_mai +3535 nishizono_mio +3536 shinomiya_kaguya +3537 minato_aqua +3538 lacus_clyne +3539 kitten_(gravity_daze) +3540 cure_honey +3541 tsutsukakushi_tsukiko +3542 amae_koromo +3543 shibuki_ran +3544 ana_coppola +3545 nena_trinity +3546 erina_pendleton +3547 xenovia_quarta +3548 sola-ui_nuada-re_sophia-ri +3549 akamatsu_kaede +3550 kinoshita_hideyoshi +3551 fukuda_(girls_und_panzer) +3552 guts +3553 okumura_rin +3554 heidimarie_w_schnaufer +3555 fran +3556 lancelot_(fate/grand_order) +3557 rory_mercury +3558 shoukaku_(azur_lane) +3559 byleth_(fire_emblem)_(male) +3560 wu_zetian_(fate/grand_order) +3561 ninian_(fire_emblem) +3562 villetta_nu +3563 maeda_toushirou +3564 futaki_kanata +3565 jack_frost +3566 naoi_ayato +3567 hange_zoe +3568 santa_claus +3569 cheese-kun +3570 kanno_naoe +3571 pixiv-tan +3572 soraka +3573 hinamori_amu +3574 riza_hawkeye +3575 elizabeth_liones +3576 mudkip +3577 sakamoto_(nichijou) +3578 airi_(queen's_blade) +3579 sakutarou +3580 cerulean_(kemono_friends) +3581 oozora_akari +3582 pecorine +3583 queen_elizabeth_(azur_lane) +3584 banned_artist +3585 hammer_(sunset_beach) +3586 haruyama_kazunori +3587 mizuki_hitoshi +3588 kouji_(campus_life) +3589 tani_takeshi +3590 ebifurya +3591 itomugi-kun +3592 tanaka_takayuki +3593 bkub +3594 rebecca_(keinelove) +3595 yohane +3596 a1 +3597 carnelian +3598 ichimi +3599 blade_(galaxist) +3600 futa_(nabezoko) +3601 iesupa +3602 kirisawa_juuzou +3603 peko +3604 warugaki_(sk-ii) +3605 yua_(checkmate) +3606 kanon_(kurogane_knights) +3607 dd_(ijigendd) +3608 masao +3609 tsuda_nanafushi +3610 kantoku +3611 hamu_koutarou +3612 matsunaga_kouyou +3613 lolita_channel +3614 hisahiko +3615 shino_(ponjiyuusu) +3616 ido_(teketeke) +3617 minaba_hideo +3618 tsunako +3619 drawfag +3620 houtengeki +3621 zounose +3622 sayori +3623 ueyama_michirou +3624 tsukudani_(coke-buta) +3625 hews_hack +3626 minami_(colorful_palette) +3627 abubu +3628 shimazaki_mujirushi +3629 mizuki_makoto +3630 tomose_shunsaku +3631 ha_akabouzu +3632 ixy +3633 tonda +3634 matsuryuu +3635 kanikama +3636 yaegashi_nan +3637 otoufu +3638 creayus +3639 miyo_(ranthath) +3640 niwatazumi +3641 rappa_(rappaya) +3642 itou_noiji +3643 ilya_kuvshinov +3644 niiko_(gonnzou) +3645 ebi_193 +3646 katahira_masashi +3647 mizumoto_tadashi +3648 ruu_(tksymkw) +3649 suzuhira_hiro +3650 mizuhara_aki +3651 mochi_au_lait +3652 as109 +3653 saku_usako_(rabbit) +3654 ishikei +3655 dei_shirou +3656 yuureidoushi_(yuurei6214) +3657 takeuchi_takashi +3658 mikage_takashi +3659 karaagetarou +3660 nishi_koutarou +3661 yuuji_(and) +3662 hisona_(suaritesumi) +3663 frapowa +3664 eromame +3665 onikobe_rin +3666 bow_(bhp) +3667 agahari +3668 gaoo_(frpjx283) +3669 yokochou +3670 naga_u +3671 homare_(fool's_art) +3672 makuwauri +3673 shiseki_hirame +3674 butcha-u +3675 taisa_(kari) +3676 null_(nyanpyoun) +3677 gofu +3678 mishima_kurone +3679 beni_shake +3680 happoubi_jin +3681 kure_masahiro +3682 mattaku_mousuke +3683 mitsumoto_jouji +3684 clearite +3685 koyama_shigeru +3686 fujima_takuya +3687 ojipon +3688 koto_inari +3689 dr_rex +3690 kawashina_(momen_silicon) +3691 milkpanda +3692 lolicept +3693 unya +3694 wa_(genryusui) +3695 boris_(noborhys) +3696 tima +3697 murakami_suigun +3698 cato_(monocatienus) +3699 bai_lao_shu +3700 fumio_(rsqkr) +3701 imizu_(nitro_unknown) +3702 kagami_hirotaka +3703 urushihara_satoshi +3704 zen +3705 hoshizuki_(seigetsu) +3706 hankuri +3707 ginhaha +3708 yabuki_kentarou +3709 aoshima +3710 maturiuta_sorato +3711 shimada_fumikane +3712 nori_tamago +3713 slugbox +3714 shirosato +3715 asanagi +3716 pokemoa +3717 misaki_kurehito +3718 usashiro_mani +3719 engiyoshi +3720 noai_nioshi +3721 goma_(gomasamune) +3722 shichimenchou +3723 eiri_(eirri) +3724 koruri +3725 cis_(carcharias) +3726 pageratta +3727 eroe +3728 maruki_(punchiki) +3729 meow_(nekodenki) +3730 satou_kibi +3731 iizuki_tasuku +3732 kei-suwabe +3733 catstudioinc_(punepuni) +3734 lasterk +3735 6_(yuchae) +3736 toosaka_asagi +3737 imu_sanjo +3738 himura_kiseki +3739 mikeou +3740 ootsuki_wataru +3741 ooyari_ashito +3742 negom +3743 lpip +3744 ayu_(mog) +3745 e.o. +3746 sekina +3747 space_jin +3748 seo_tatsuya +3749 wakabayashi_toshiya +3750 yamamoto_souichirou +3751 shirou_masamune +3752 yuuhagi_(amaretto-no-natsu) +3753 awa +3754 yamashita_shun'ya +3755 kuromiya +3756 otohime_(youngest_princess) +3757 yang-do +3758 nanao_naru +3759 masukuza_j +3760 kashiwamochi_yomogi +3761 ban +3762 nagisa_kurousagi +3763 ichikawa_feesu +3764 jjune +3765 tanaka_kusao +3766 yagisaka_seto +3767 akinbo_(hyouka_fuyou) +3768 kara_(color) +3769 nagian +3770 kouu_hiyoyo +3771 sheya +3772 nanase_nao +3773 eto +3774 yume_shokunin +3775 non_(z-art) +3776 nishieda +3777 komatsu_eiji +3778 utano +3779 ryoji_(nomura_ryouji) +3780 isshiki_(ffmania7) +3781 dandon_fuga +3782 colonel_aki +3783 pas_(paxiti) +3784 sakurazawa_izumi +3785 sano_toshihide +3786 hidefu_kitayan +3787 satou_yuuki +3788 jin_(mugenjin) +3789 pote_(ptkan) +3790 chan_co +3791 sakiyamama +3792 setz +3793 hase_yu +3794 aaaa +3795 yopparai_oni +3796 youkan +3797 shinama +3798 asamura_hiori +3799 eu03 +3800 mitsumi_misato +3801 torinone +3802 oouso +3803 aono3 +3804 yoshi_tama +3805 kazenokaze +3806 cutesexyrobutts +3807 7010 +3808 ucmm +3809 minato_hitori +3810 ikari_manatsu +3811 agawa_ryou +3812 fuantei +3813 chanta_(ayatakaoisii) +3814 yano_toshinori +3815 swd3e2 +3816 shibasaki_shouji +3817 oryou +3818 aldehyde +3819 yamato_nadeshiko +3820 jako_(jakoo21) +3821 rokuwata_tomoe +3822 kenkou_cross +3823 manji_(tenketsu) +3824 akkijin +3825 haruhisky +3826 yuuki_hagure +3827 sakuraba_yuuki +3828 sugimura_tomokazu +3829 lm_(legoman) +3830 fkey +3831 ragho_no_erika +3832 hayashiya_zankurou +3833 chado +3834 aozora_market +3835 ino +3836 morino_hon +3837 hemogurobin_a1c +3838 sumiyao_(amam) +3839 komaku_juushoku +3840 sakazaki_freddy +3841 onija_tarou +3842 ghettoyouth +3843 y.ssanoha +3844 tsuki_wani +3845 ichiba_youichi +3846 urin +3847 moneti_(daifuku) +3848 kurimomo +3849 gogiga_gagagigo +3850 shiromanta +3851 misumi_(macaroni) +3852 namori +3853 bosshi +3854 mutsuki_(moonknives) +3855 bbb_(friskuser) +3856 cait +3857 caffein +3858 hairu +3859 piromizu +3860 sky_(freedom) +3861 kiku_hitomoji +3862 miuku_(marine_sapphire) +3863 udon_(shiratama) +3864 meito_(maze) +3865 porurin +3866 kusanagi_tonbo +3867 sakino_shingetsu +3868 momio +3869 sutahiro_(donta) +3870 bubukka +3871 ogino_atsuki +3872 nanakusa_nazuna +3873 kamelie +3874 niichi_(komorebi-palette) +3875 ryouka_(suzuya) +3876 kasuga_yukihito +3877 mattari_yufi +3878 hizaka +3879 komusou_(jinrikisha) +3880 kobuichi +3881 umekichi +3882 erubo +3883 harada_takehito +3884 m-da_s-tarou +3885 nanaroku_(fortress76) +3886 tetsu_(kimuchi) +3887 robert_porter +3888 ameyama_denshin +3889 tenken_(gotannda) +3890 michael +3891 suzuki_toto +3892 amazon_(taitaitaira) +3893 seki_(red_shine) +3894 moisture_(chichi) +3895 karukan_(monjya) +3896 petenshi_(dr._vermilion) +3897 watarui +3898 kouno_hikaru +3899 shinapuu +3900 dairi +3901 kawata_hisashi +3902 annin_musou +3903 mtu_(orewamuzituda) +3904 nigou +3905 jetto_komusou +3906 mogudan +3907 kou_mashiro +3908 obiwan +3909 ama_mitsuki +3910 kishida_mel +3911 kuro_suto_sukii +3912 shuugetsu_karasu +3913 mery_(apfl0515) +3914 herada_mitsuru +3915 9law +3916 azasuke +3917 ryuun_(stiil) +3918 sousou_(sousouworks) +3919 tk8d32 +3920 donguri_suzume +3921 waero +3922 kusaka_souji +3923 takayaki +3924 magukappu +3925 nogi_takayoshi +3926 saki_chisuzu +3927 774_(nanashi) +3928 at_classics +3929 mosha +3930 hiro_(hirohiro31) +3931 amazuyu_tatsuki +3932 murata_renji +3933 optionaltypo +3934 paseri +3935 tsukumo +3936 mosho +3937 celebi_ryousangata +3938 matarou_(genkai_toppa) +3939 mudou_eichi +3940 shirabi +3941 ishii_hisao +3942 yamaarashi +3943 hera_(hara0742) +3944 yukataro +3945 muririn +3946 kaga3chi +3947 kfr +3948 nekotoufu +3949 swordsouls +3950 jeno +3951 yume_no_owari +3952 marshmallow_mille +3953 yuzuna99 +3954 untsue +3955 satou_shouji +3956 komeshiro_kasu +3957 coffee-kizoku +3958 saru +3959 nardack +3960 tasaka_shinnosuke +3961 hirano_katsuyuki +3962 gomennasai +3963 jason_(kaiten_kussaku_kikou) +3964 inu_(aerodog) +3965 yanagi_(nurikoboshi) +3966 gurande_(g-size) +3967 neko_majin +3968 nora_higuma +3969 yamu_(reverse_noise) +3970 jizeru_(giselebon) +3971 sakimori_(hououbds) +3972 cool-kyou_shinja +3973 sakimichan +3974 culter +3975 gochou_(atemonai_heya) +3976 akairiot +3977 tsuji_santa +3978 watanabe_akio +3979 sako_(bosscoffee) +3980 riichu +3981 akagashi_hagane +3982 deco_(geigeki_honey) +3983 mamuru +3984 kloah +3985 minamura_haruki +3986 shinoasa +3987 osashin_(osada) +3988 danbo_(rock_clime) +3989 totokichi +3990 mo_(kireinamo) +3991 socha +3992 sakimiya_(inschool) +3993 juurouta +3994 ishikkoro +3995 eretto +3996 kaiga +3997 natsu_(anta_tte_hitoha) +3998 futatsuki_hisame +3999 bococho +4000 umigarasu_(kitsune1963) +4001 blew_andwhite +4002 shangguan_feiying +4003 rifyu +4004 yuzu_momo +4005 satou_daiji +4006 mikagami_hiyori +4007 nironiro +4008 shinshin +4009 caryo +4010 body_mahattaya_ginga +4011 wata_do_chinkuru +4012 kotoba_noriaki +4013 kisetsu +4014 anti_(untea9) +4015 joy_ride +4016 nekobungi_sumire +4017 kuro_chairo_no_neko +4018 ikeuchi_tanuma +4019 kuavera +4020 uni_mate +4021 kichihachi +4022 nameo_(judgemasterkou) +4023 riyo_(lyomsnpmp) +4024 diesel-turbo +4025 abe_kanari +4026 (ysy)s +4027 kaisen_chuui +4028 gensoukoumuten +4029 goma_(yoku_yatta_hou_jane) +4030 chita_(ketchup) +4031 n.g. +4032 mota +4033 hana_kazari +4034 nishimata_aoi +4035 zuwai_kani +4036 otokuyou +4037 asutora +4038 em +4039 ryuuichi_(f_dragon) +4040 hayashi_custom +4041 kamizono_(spookyhouse) +4042 shikushiku_(amamori_weekly) +4043 hetza_(hellshock) +4044 arinu +4045 souji +4046 ririko_(zhuoyandesailaer) +4047 s-syogo +4048 goto_p +4049 uni8 +4050 yuki_arare +4051 michii_yuuki +4052 aki99 +4053 fuuma_nagi +4054 sazaki_ichiri +4055 senomoto_hisashi +4056 mokufuu +4057 neocoill +4058 ramchi +4059 daiaru +4060 aoi_(annbi) +4061 oda_ken'ichi +4062 lack +4063 okina_ika +4064 kuro_(kuronell) +4065 ominaeshi_(takenoko) +4066 sousouman +4067 marumikan +4068 masakichi_(crossroad) +4069 konpeto +4070 givuchoko +4071 shin_(new) +4072 jabara_tornado +4073 j7w +4074 kazuma_muramasa +4075 kumiko_shiba +4076 koyorin +4077 asteroid_ill +4078 bekkankou +4079 mikazuki_akira! +4080 redrop +4081 yoshitomi_akihito +4082 kurou_(quadruple_zero) +4083 takana_shinno +4084 dr.p +4085 harigane_shinshi +4086 yutakasan-love +4087 otoi_rekomaru +4088 boushi-ya +4089 homura_subaru +4090 tebi_(tbd11) +4091 pekeko_(pepekekeko) +4092 kuroboshi_kouhaku +4093 aikawa_ryou +4094 makuran +4095 fuukadia_(narcolepsy) diff --git a/pycls/__init__.py b/pycls/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pycls/__pycache__/__init__.cpython-37.pyc b/pycls/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..237625bf7dcbb4ece8a71c0b24c0588a5f8c5203 GIT binary patch literal 132 zcmZ?b<>g`k0^5egaUl9Jh=2h`Aj1KOi&=m~3PUi1CZpdg`k0^5egaUl9Jh=2h`Aj1KOi&=m~3PUi1CZpd|9 z0NEqfnaR{S=u2caBVR{4zPo@p*EJ<5%)?X1V+< zKFR%=&HbIlnZL960{3ecpUw9(|7Y<wz-XeaTya@7(_qakIm`FoBr_bp#qBKO z@9}$D#BY5AIcjEIOhR*71rylN-!dq0e+uc0r&$l2k?hv z9^iZACRrd?KNJxr*T`!h3g|~?NUDpqk*~^ z_5$_NFnS!&fT$aL>bj=8_4zStT&pgom9F324)8AR0a4aBx2%W+sW4`(o=$aX6vsFi zBs3cCp(7i99CmR+{Xx7L(f(Kjq45#`4^0evemKAzy{P+etOD!K765qubQ{&;us`et zI8K5-4hLwO1~J}1L$V=m+&)9#v9SX$*Ay|B~{nbam`TJjP zZT;$(?D;M1%iwmQ8^z<_Grd3lfjwW}-ulO9-~73?v2;5vg7@KI`~mEazXXsL9{Iho z0FjJART#5DHl6`6k{6Qx@Y{(lPv`ue-|wZRI3dw)lFEZ_x0hBRR=>9$#YuSVA}E|; z3Jb%%ZZDoA(c?ic^2r=eZ;h0VKkXxFb zk*aUQg~w`^R25Rc8^mfjc;wU2A0+WSWf>Zs^R@%1TpA~8kE&2ML$$vPM4b3EQT;@{ zS})KVNYY~fQaK6YWHjTkn^ogsFHF>nKY9P6I*34JcQ}k_5|Gi&Y`-54!nqAU>29lW zI11EwJKF6*X9TJbhxC#VDpid(Rjbp&EA5UuKRWAy`TuVB2H*a_&C)W9^m>~H-7pT+ zWs60Zu+;R2yRc%-%K~^5&~P(^-i3}+Z>iT*aH=lR(>!q2-J_SA(UwZU$8eCTeMj&& z_Rk5E0IDtQ2mjMSR{dxyVLdI{ZO?3_Mb|UhZd%jgIH1f!gVKnmMbFVp3wGO0(@RTi zH26tcUDiB(6}u)fQe{7zsq7i9mrBc4daUCbxa}Ca3E1^C&rOx4y@DG|pu#pVoN1(G z!_tVQnwg1QJ~ILJ1hq5IWvqIMvgu1- zWIzRwme&k(WfiWZ*09&CrmZ!wuIZ}=on;~{szYFok(U8{#InZbn;=h%oO6&BX1@Y2 z&&nIHfN3FqA|-_#8Q$Wj()&VEdID4Isr2*X?~A}IKY^c+KNCe^O(2E)f+IWt3>lI~ z^~P?02s82^Ei7V2!EA=!QAr3*l|*!JJ{2FsXAQ`K{frV9Ibv8aA=aHUdB zq_<$NE_!v5o?@$RTE>_}U5xqhsK!5j^Sepdi|50L7ZsY=^SkkUR=O5>um!S8%En(MH6gZ=uORa4L%iI7vJevsN@)~Wq7Cp1INl(RX$l9yVG{j zku|g7t>U)f;1$L}Ro8ROhJmj=xQ0rawa#^6hP5z1ti+dVsJh&=^?NywJK&-cx8S&4E{dV!^|0CN}>T6T-VFAo8oU|Ddr zR=a7q`!f}tM22Hy(=t7L$F#I2UeQ`D4V`9lSMRu<-NIRy!^qXOb#&r8nDi_&UtZQg zr|FyqydJayc=lQWF1Pz}TG-138;laI| zb^{%QF{R&Y+ot91vnI5TXJaUyPRpPROrMQ0G{H<_Hrxy+E4ek>fqQUKwQX9sWwdN( z9kOh+(4=EDJCMtEQ`3#KsCOEcl~$GwOJ8ki&OHkM4-52FZZr?Ygg7ByxEHpf>{~)_ zFmMw-mUg5Wk7ssg-n{XvZnupoJKxK_f-(v|6=c!KF!OnX``;03@+J@93^m2EOqn; z3ZhXDTQ8l?ST|LWjzwvdTvn;eT?+;06jVP0W6%OGi19Xbi_U%h*fcoa_z-O_f-Szp z->~s^-#kEA9%1y?3IuP;SJ5Qc_ueDpipf)_6ZYc{^g% z{c35t;>k~>0w`LWzfb#fS&06+;=HVMpC=~K!t{ZPTwmE#D$#fCgpQV#?aYL^>3-n& z@xdVP0PkVt7MeQ9PTc8%)+BboxxfZppd;8{P0o|Cm}S}srNiVdH6gFcq{z}F&#MS_ zrCgaFu@s_$(bP!we+Jcpq}^9n#cr@Ygs`r$5aqh}TDBR-T&8i%0;rJAqq4LFbiW=q za)W{?r`c>r``)R09E084aJ#B;)I-)4Z(*nzt$_qbh2*PTyl^4y2%)5dJGAWu!9SFX B!g>Gz literal 0 HcmV?d00001 diff --git a/pycls/core/__pycache__/model_builder.cpython-37.pyc b/pycls/core/__pycache__/model_builder.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32012be5eee30e1a9ff81c22790510f93ec8ede5 GIT binary patch literal 1317 zcmZWp&5qkP5GJV~%d)NQ?lzkSZ3>~6z&+T#6)4goKfCCqyInNRp*mF1inLZs|gC2I#Q(uYKp7IJkbx7GokpFi>u`pbOm zmIuru*zz|xC`u5;lyF=Tf-tI_#KpbDOFYmx+^+`7z{*@6RKsLwd5?$HC>dGa=i_RU zOe{a(QMH%sVZ_{<57_=KP7c_Rjo2QW(BOABIiy1x{)&4=Vj-l60fnoidDPaDd3 zoHw;pqRmy=)bXONEhA^|AXIcV#oEai1rYyn-JLOIxRb?V%l*KLe>rJ5H}SK&tC>oC zwpdsynB39=Xl~q^MPM6Tp2C)|;ZSIamqa-??k&3V-k@t_Z;WoyHQu1*a07V0#UKS+>R!3xCFsY?$uioY4K6U~y|>Oa z+91HhU++I@OHO61E?Ha|cxF48RJUUBgI}&6#C4-$*{)WNP>ilWf11@s(n@0?Sp4nz z^O&+1WzLM7td6r6S;?~t&J1?F<{gHFr;hgFjg~VV_TsdpQcs#zty-1#qUoM6NNMl% z`{R@EwEN>v=O=oQ+SsIauUIE_^jJzJO#M!TXoU6`ji@rEgR|$SDTMP(d#VxnrFQd{ zW_rAREN|Lc=_qf7VD;`jP3l?)t4w5^Gp_F*m(U#LMGL7OXDU12DyBqxHijew`)#a$ zBJnZA`uCUf%cf%Ubw_F4$vFi($<%z+yh9FUq@2kC`DoQc`0(<96G$=w4_@(2z@ zj_}wyz&?pcNXD=O91(FJG`WGsEJGdGwwcDjY@k^-=pl?1l$ESMg+_%$n2>SS*EObH zomFKHX?8Pca}j|RWaWaC0Z;&#O{PCS0i}+_)=3}W7a)^103u>zd<1n#X_Kd^KD4>c zT2*p6<4sYNb)h}`sRz4n!dr1@9PjtH4JI>~#zk4!nkQQ?eXVExTnUrrdzyaIg!_LD zL<}=0%$&Vv(@&* ResNet; > 1 -> ResNeXt) +_C.RESNET.NUM_GROUPS = 1 + +# Width of each group (64 -> ResNet; 4 -> ResNeXt) +_C.RESNET.WIDTH_PER_GROUP = 64 + +# Apply stride to 1x1 conv (True -> MSRA; False -> fb.torch) +_C.RESNET.STRIDE_1X1 = True + + +# ---------------------------------------------------------------------------- # +# AnyNet options +# ---------------------------------------------------------------------------- # +_C.ANYNET = CN() + +# Stem type +_C.ANYNET.STEM_TYPE = "plain_block" + +# Stem width +_C.ANYNET.STEM_W = 32 + +# Block type +_C.ANYNET.BLOCK_TYPE = "plain_block" + +# Depth for each stage (number of blocks in the stage) +_C.ANYNET.DEPTHS = [] + +# Width for each stage (width of each block in the stage) +_C.ANYNET.WIDTHS = [] + +# Strides for each stage (applies to the first block of each stage) +_C.ANYNET.STRIDES = [] + +# Bottleneck multipliers for each stage (applies to bottleneck block) +_C.ANYNET.BOT_MULS = [] + +# Group widths for each stage (applies to bottleneck block) +_C.ANYNET.GROUP_WS = [] + +# Whether SE is enabled for res_bottleneck_block +_C.ANYNET.SE_ON = False + +# SE ratio +_C.ANYNET.SE_R = 0.25 + +# ---------------------------------------------------------------------------- # +# RegNet options +# ---------------------------------------------------------------------------- # +_C.REGNET = CN() + +# Stem type +_C.REGNET.STEM_TYPE = "simple_stem_in" +# Stem width +_C.REGNET.STEM_W = 32 +# Block type +_C.REGNET.BLOCK_TYPE = "res_bottleneck_block" +# Stride of each stage +_C.REGNET.STRIDE = 2 +# Squeeze-and-Excitation (RegNetY) +_C.REGNET.SE_ON = False +_C.REGNET.SE_R = 0.25 + +# Depth +_C.REGNET.DEPTH = 10 +# Initial width +_C.REGNET.W0 = 32 +# Slope +_C.REGNET.WA = 5.0 +# Quantization +_C.REGNET.WM = 2.5 +# Group width +_C.REGNET.GROUP_W = 16 +# Bottleneck multiplier (bm = 1 / b from the paper) +_C.REGNET.BOT_MUL = 1.0 + + +# ---------------------------------------------------------------------------- # +# EfficientNet options +# ---------------------------------------------------------------------------- # +_C.EN = CN() + +# Stem width +_C.EN.STEM_W = 32 + +# Depth for each stage (number of blocks in the stage) +_C.EN.DEPTHS = [] + +# Width for each stage (width of each block in the stage) +_C.EN.WIDTHS = [] + +# Expansion ratios for MBConv blocks in each stage +_C.EN.EXP_RATIOS = [] + +# Squeeze-and-Excitation (SE) ratio +_C.EN.SE_R = 0.25 + +# Strides for each stage (applies to the first block of each stage) +_C.EN.STRIDES = [] + +# Kernel sizes for each stage +_C.EN.KERNELS = [] + +# Head width +_C.EN.HEAD_W = 1280 + +# Drop connect ratio +_C.EN.DC_RATIO = 0.0 + +# Dropout ratio +_C.EN.DROPOUT_RATIO = 0.0 + + +# ---------------------------------------------------------------------------- # +# Batch norm options +# ---------------------------------------------------------------------------- # +_C.BN = CN() + +# BN epsilon +_C.BN.EPS = 1e-5 + +# BN momentum (BN momentum in PyTorch = 1 - BN momentum in Caffe2) +_C.BN.MOM = 0.1 + +# Precise BN stats +_C.BN.USE_PRECISE_STATS = False +_C.BN.NUM_SAMPLES_PRECISE = 1024 + +# Initialize the gamma of the final BN of each block to zero +_C.BN.ZERO_INIT_FINAL_GAMMA = False + +# Use a different weight decay for BN layers +_C.BN.USE_CUSTOM_WEIGHT_DECAY = False +_C.BN.CUSTOM_WEIGHT_DECAY = 0.0 + +# ---------------------------------------------------------------------------- # +# Optimizer options +# ---------------------------------------------------------------------------- # +_C.OPTIM = CN() + +# Base learning rate +_C.OPTIM.BASE_LR = 0.1 + +# Learning rate policy select from {'cos', 'exp', 'steps'} +_C.OPTIM.LR_POLICY = "cos" + +# Exponential decay factor +_C.OPTIM.GAMMA = 0.1 + +# Steps for 'steps' policy (in epochs) +_C.OPTIM.STEPS = [] + +# Learning rate multiplier for 'steps' policy +_C.OPTIM.LR_MULT = 0.1 + +# Maximal number of epochs +_C.OPTIM.MAX_EPOCH = 200 + +# Momentum +_C.OPTIM.MOMENTUM = 0.9 + +# Momentum dampening +_C.OPTIM.DAMPENING = 0.0 + +# Nesterov momentum +_C.OPTIM.NESTEROV = True + +# L2 regularization +_C.OPTIM.WEIGHT_DECAY = 5e-4 + +# Start the warm up from OPTIM.BASE_LR * OPTIM.WARMUP_FACTOR +_C.OPTIM.WARMUP_FACTOR = 0.1 + +# Gradually warm up the OPTIM.BASE_LR over this number of epochs +_C.OPTIM.WARMUP_EPOCHS = 0 + + +# ---------------------------------------------------------------------------- # +# Training options +# ---------------------------------------------------------------------------- # +_C.TRAIN = CN() + +# Dataset and split +_C.TRAIN.DATASET = "" +_C.TRAIN.SPLIT = "train" + +# Total mini-batch size +_C.TRAIN.BATCH_SIZE = 128 + +# Image size +_C.TRAIN.IM_SIZE = 224 + +# Evaluate model on test data every eval period epochs +_C.TRAIN.EVAL_PERIOD = 1 + +# Save model checkpoint every checkpoint period epochs +_C.TRAIN.CHECKPOINT_PERIOD = 1 + +# Resume training from the latest checkpoint in the output directory +_C.TRAIN.AUTO_RESUME = True + +# Weights to start training from +_C.TRAIN.WEIGHTS = "" + + +# ---------------------------------------------------------------------------- # +# Testing options +# ---------------------------------------------------------------------------- # +_C.TEST = CN() + +# Dataset and split +_C.TEST.DATASET = "" +_C.TEST.SPLIT = "val" + +# Total mini-batch size +_C.TEST.BATCH_SIZE = 200 + +# Image size +_C.TEST.IM_SIZE = 256 + +# Weights to use for testing +_C.TEST.WEIGHTS = "" + + +# ---------------------------------------------------------------------------- # +# Common train/test data loader options +# ---------------------------------------------------------------------------- # +_C.DATA_LOADER = CN() + +# Number of data loader workers per training process +_C.DATA_LOADER.NUM_WORKERS = 4 + +# Load data to pinned host memory +_C.DATA_LOADER.PIN_MEMORY = True + + +# ---------------------------------------------------------------------------- # +# Memory options +# ---------------------------------------------------------------------------- # +_C.MEM = CN() + +# Perform ReLU inplace +_C.MEM.RELU_INPLACE = True + + +# ---------------------------------------------------------------------------- # +# CUDNN options +# ---------------------------------------------------------------------------- # +_C.CUDNN = CN() + +# Perform benchmarking to select the fastest CUDNN algorithms to use +# Note that this may increase the memory usage and will likely not result +# in overall speedups when variable size inputs are used (e.g. COCO training) +_C.CUDNN.BENCHMARK = True + + +# ---------------------------------------------------------------------------- # +# Precise timing options +# ---------------------------------------------------------------------------- # +_C.PREC_TIME = CN() + +# Perform precise timing at the start of training +_C.PREC_TIME.ENABLED = False + +# Total mini-batch size +_C.PREC_TIME.BATCH_SIZE = 128 + +# Number of iterations to warm up the caches +_C.PREC_TIME.WARMUP_ITER = 3 + +# Number of iterations to compute avg time +_C.PREC_TIME.NUM_ITER = 30 + + +# ---------------------------------------------------------------------------- # +# Misc options +# ---------------------------------------------------------------------------- # + +# Number of GPUs to use (applies to both training and testing) +_C.NUM_GPUS = 1 + +# Output directory +_C.OUT_DIR = "/tmp" + +# Config destination (in OUT_DIR) +_C.CFG_DEST = "config.yaml" + +# Note that non-determinism may still be present due to non-deterministic +# operator implementations in GPU operator libraries +_C.RNG_SEED = 1 + +# Log destination ('stdout' or 'file') +_C.LOG_DEST = "stdout" + +# Log period in iters +_C.LOG_PERIOD = 10 + +# Distributed backend +_C.DIST_BACKEND = "nccl" + +# Hostname and port for initializing multi-process groups +_C.HOST = "localhost" +_C.PORT = 10001 + +# Models weights referred to by URL are downloaded to this local cache +_C.DOWNLOAD_CACHE = "/tmp/pycls-download-cache" + + +def assert_and_infer_cfg(cache_urls=True): + """Checks config values invariants.""" + assert ( + not _C.OPTIM.STEPS or _C.OPTIM.STEPS[0] == 0 + ), "The first lr step must start at 0" + assert _C.TRAIN.SPLIT in [ + "train", + "val", + "test", + ], "Train split '{}' not supported".format(_C.TRAIN.SPLIT) + assert ( + _C.TRAIN.BATCH_SIZE % _C.NUM_GPUS == 0 + ), "Train mini-batch size should be a multiple of NUM_GPUS." + assert _C.TEST.SPLIT in [ + "train", + "val", + "test", + ], "Test split '{}' not supported".format(_C.TEST.SPLIT) + assert ( + _C.TEST.BATCH_SIZE % _C.NUM_GPUS == 0 + ), "Test mini-batch size should be a multiple of NUM_GPUS." + assert ( + not _C.BN.USE_PRECISE_STATS or _C.NUM_GPUS == 1 + ), "Precise BN stats computation not verified for > 1 GPU" + assert _C.LOG_DEST in [ + "stdout", + "file", + ], "Log destination '{}' not supported".format(_C.LOG_DEST) + assert ( + not _C.PREC_TIME.ENABLED or _C.NUM_GPUS == 1 + ), "Precise iter time computation not verified for > 1 GPU" + if cache_urls: + cache_cfg_urls() + + +def cache_cfg_urls(): + """Download URLs in the config, cache them locally, and rewrite cfg to make + use of the locally cached file. + """ + _C.TRAIN.WEIGHTS = cache_url(_C.TRAIN.WEIGHTS, _C.DOWNLOAD_CACHE) + _C.TEST.WEIGHTS = cache_url(_C.TEST.WEIGHTS, _C.DOWNLOAD_CACHE) + + +def dump_cfg(): + """Dumps the config to the output directory.""" + cfg_file = os.path.join(_C.OUT_DIR, _C.CFG_DEST) + with open(cfg_file, "w") as f: + _C.dump(stream=f) + + +def load_cfg(out_dir, cfg_dest="config.yaml"): + """Loads config from specified output directory.""" + cfg_file = os.path.join(out_dir, cfg_dest) + _C.merge_from_file(cfg_file) diff --git a/pycls/core/losses.py b/pycls/core/losses.py new file mode 100644 index 0000000..feea0b6 --- /dev/null +++ b/pycls/core/losses.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Loss functions.""" + +import torch.nn as nn +from pycls.core.config import cfg + + +# Supported loss functions +_loss_funs = {"cross_entropy": nn.CrossEntropyLoss} + + +def get_loss_fun(): + """Retrieves the loss function.""" + assert ( + cfg.MODEL.LOSS_FUN in _loss_funs.keys() + ), "Loss function '{}' not supported".format(cfg.TRAIN.LOSS) + return _loss_funs[cfg.MODEL.LOSS_FUN]().cuda() + + +def register_loss_fun(name, ctor): + """Registers a loss function dynamically.""" + _loss_funs[name] = ctor diff --git a/pycls/core/model_builder.py b/pycls/core/model_builder.py new file mode 100644 index 0000000..9263d3c --- /dev/null +++ b/pycls/core/model_builder.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Model construction functions.""" + +import pycls.utils.logging as lu +import torch +from pycls.core.config import cfg +from pycls.models.anynet import AnyNet +from pycls.models.effnet import EffNet +from pycls.models.regnet import RegNet +from pycls.models.resnet import ResNet + + +logger = lu.get_logger(__name__) + +# Supported models +_models = {"anynet": AnyNet, "effnet": EffNet, "resnet": ResNet, "regnet": RegNet} + + +def build_model(): + """Builds the model.""" + assert cfg.MODEL.TYPE in _models.keys(), "Model type '{}' not supported".format( + cfg.MODEL.TYPE + ) + assert ( + cfg.NUM_GPUS <= torch.cuda.device_count() + ), "Cannot use more GPU devices than available" + # Construct the model + model = _models[cfg.MODEL.TYPE]() + # Determine the GPU used by the current process + cur_device = torch.cuda.current_device() + # Transfer the model to the current GPU device + model = model.cuda(device=cur_device) + # Use multi-process data parallel model in the multi-gpu setting + if cfg.NUM_GPUS > 1: + # Make model replica operate on the current device + model = torch.nn.parallel.DistributedDataParallel( + module=model, device_ids=[cur_device], output_device=cur_device + ) + return model + + +def register_model(name, ctor): + """Registers a model dynamically.""" + _models[name] = ctor diff --git a/pycls/core/optimizer.py b/pycls/core/optimizer.py new file mode 100644 index 0000000..bbba1cc --- /dev/null +++ b/pycls/core/optimizer.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Optimizer.""" + +import pycls.utils.lr_policy as lr_policy +import torch +from pycls.core.config import cfg + + +def construct_optimizer(model): + """Constructs the optimizer. + + Note that the momentum update in PyTorch differs from the one in Caffe2. + In particular, + + Caffe2: + V := mu * V + lr * g + p := p - V + + PyTorch: + V := mu * V + g + p := p - lr * V + + where V is the velocity, mu is the momentum factor, lr is the learning rate, + g is the gradient and p are the parameters. + + Since V is defined independently of the learning rate in PyTorch, + when the learning rate is changed there is no need to perform the + momentum correction by scaling V (unlike in the Caffe2 case). + """ + # Batchnorm parameters. + bn_params = [] + # Non-batchnorm parameters. + non_bn_parameters = [] + for name, p in model.named_parameters(): + if "bn" in name: + bn_params.append(p) + else: + non_bn_parameters.append(p) + # Apply different weight decay to Batchnorm and non-batchnorm parameters. + bn_weight_decay = ( + cfg.BN.CUSTOM_WEIGHT_DECAY + if cfg.BN.USE_CUSTOM_WEIGHT_DECAY + else cfg.OPTIM.WEIGHT_DECAY + ) + optim_params = [ + {"params": bn_params, "weight_decay": bn_weight_decay}, + {"params": non_bn_parameters, "weight_decay": cfg.OPTIM.WEIGHT_DECAY}, + ] + # Check all parameters will be passed into optimizer. + assert len(list(model.parameters())) == len(non_bn_parameters) + len( + bn_params + ), "parameter size does not match: {} + {} != {}".format( + len(non_bn_parameters), len(bn_params), len(list(model.parameters())) + ) + return torch.optim.SGD( + optim_params, + lr=cfg.OPTIM.BASE_LR, + momentum=cfg.OPTIM.MOMENTUM, + weight_decay=cfg.OPTIM.WEIGHT_DECAY, + dampening=cfg.OPTIM.DAMPENING, + nesterov=cfg.OPTIM.NESTEROV, + ) + + +def get_epoch_lr(cur_epoch): + """Retrieves the lr for the given epoch (as specified by the lr policy).""" + return lr_policy.get_epoch_lr(cur_epoch) + + +def set_lr(optimizer, new_lr): + """Sets the optimizer lr to the specified value.""" + for param_group in optimizer.param_groups: + param_group["lr"] = new_lr diff --git a/pycls/datasets/__init__.py b/pycls/datasets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pycls/datasets/__pycache__/__init__.cpython-37.pyc b/pycls/datasets/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd79509453e1c283e20de5219024755628ef1db9 GIT binary patch literal 141 zcmZ?b<>g`k0^5egaUl9Jh=2h`Aj1KOi&=m~3PUi1CZpd1Q@$s2?nI-Y@dIgoYIBatBQ%ZAE?Lg*z24V&P DS@0p+ literal 0 HcmV?d00001 diff --git a/pycls/datasets/__pycache__/cifar10.cpython-37.pyc b/pycls/datasets/__pycache__/cifar10.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7da02c70520c1b3b04411ca9949d6c16483e5db GIT binary patch literal 2902 zcmZWrTaO#J6(%{?9ciz=#OoHR5}*kLwAyuUMcs|Nj@Ky+#GBMxAjQDRbUYksM;^_L zNvgH95)fql8WaWk(mqM(Q-S_~JoX2)KcIXnkf;3%d1=q#NV^+{0uK*4hveaRZu($# zH6YO5|Ke|-{|C5laI$PRF#FKupMh`^5>7eGX-Lh;LS{xQw9V)YTt+Avc%esYNq?|n zR)VmxSZRh$BfDxwD_GUCCjaYAfmXdT(PpiuQZBO8)uz ze($e;fi`-)Soz}P{ocR-1+BSc{_)9vVuTlj!MG1yB1s86p5Vhni!*M$An zdoa4FHjG}abm;RYUxoP!Ujv2LwRbP;#^ToQc++V9(@Ux+7F>Z1M+f>n=<;_!6zS6$ zo6tFxHr13<7<#p<~A=E?NUR#B=}w;UF{&9tLLoE6$usZ`_FkE%RV7eaDaRi#iV z2XWC#@Ok4O+$#&I#4u4=(QBQ*^kmxUblOeLN~vwUO1tUvOe(3@?n;>om6gSPAxfcL zIBO6q?dD~#mkMoXMYq&Jgl3NFosmR6U@b3WZa!!)%8F_T$9Yj6KS^`hwzMtNysK9( zHaceC--);e$bS3o&T%mi;>(}xY15EN^-fNi#54p-gaxZQ;oDA!Bv@xq937+ z)_@Rtlde&pZBUoGHaKQ&=gv|vK~KVlVYLF?Soa-Z6xfH;Q})NJQ>JL2p-s8<6`NY0 zLfHM9JR-j(6N}s1NS$rcMb~!^H4{&?^JD}@0eAP)A}KkbE%AJ#vJ1=w3}uxT+O4wW zX`X5uF>Bjm3q*AABv#3BjVkSG$_+9#xsC#j8e${%6IhUVGNDa+gO0D9ztfn($RrfM zL-V1dta<9!z$h|^94~3W`R?7Ku+nD77$oH7<;5g z7?}br&-W(ufPD7O)K+fan}JLB?FpPUbDF8lO$FNl!{muSXO}lpeWnvk)gYmVG05WOvOe-r|gLF`eQD?ky zGOgFpDQ~w!5NMLbBQ zb{A3Y*4`X092P%{SK*MbSqs$92%EK7J$nf_u@3ULpv#*;2)oV#dY!pI1G)k68_*`( zf=C{3eILp52woc1xb+=42Hko>6;sySnnJGaLrDa8z72WXf!MC2nZnEkDX;JMSFmb) zg|ZZ5dHmx~L|VlFOiLZ7t!xnYQj<-KRQ;h8Pr>0Jt#yoR)uoL5_m}7!*+A3k3_Y%G ziFaXubeE0O(61mRBG(Rd0LiyGE~GP8k<+ z=h)&~&;<>)yTAaNCO>xBHlRGmIaI>!MJj-Rw=kG7h!>&C4m*(9Hejo^ld>9#O<=_y zkY&PivVy12YrGueEZ=`Z)Js*S(m@nGgq@3w_bNQ$QpUTGIgfV?EOZZ}K86pf--n#H zjb)Y{3KRhFjCziV0|<3Y!!N7gMNys>QS=DDneZlbyngPK^|wPXFH|B7geZba7!683 z%#m+K(X(NkFTTK8yiB4a@oP652Z{y zW~#mAoaICKFH(x+SZ|o2Q_JH~db2(uDMi{z%A%X~>I@TrC82=f-w1(bsZ-l+7*Ozix&=SrXwwQTNF>((0l{A0I{*Lx literal 0 HcmV?d00001 diff --git a/pycls/datasets/__pycache__/imagenet.cpython-37.pyc b/pycls/datasets/__pycache__/imagenet.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..588f332b114177d7e1a7ef0f78890a8153d1e63d GIT binary patch literal 3347 zcma)8U2Ggz6~1?Vc4x=C_S$hAJ1Jo$`nM{qouVRW5^r4FO^uob;gD3*CZoyR+3{>< zXV!Pd!QQL|s{4YV0;&9{2#K}ILy;=r0SUna5xgN9iMJ|OLTv%Hn0F*7FPw8{e;^cz zS>3<0=iYnn`Mz`ReX3M434G_j^4j&QlZ5;U2g5H9oeNNs-$TWSMHppDNG&Q`#ZqLe zTB>X{OP6h~omU7YZNoBXHlxrsrDVb?4kRV3By}fctAI&>my6>^NwspfP`uusL?( zCBja4rIy)RXq`Mzty4_dB-Q!u^hVq9y@r=on3Fn*m)4HpprlcyLUo(I|DDyZh426K zUfuugtzZ4%dWFzxu*Ae)G3S|9Hp$$JEi!f15MC+aSmJNy*rT!4~% z8Y+(fN(2!CbQGq(L@W(`3N84R<)O`?U!l#TPoXtf5xi0o#?UJt;vZLqweiDNhq4MK zc@CM_vfmC>V+}^L%YE@GxaUwL_Oys=XAW0K3xsoIvPlGsG z=RD>j2g|jcROG_g_dPDOAZo_Kw2?)2Hq%bf6ftGH!@caF&2Cgxp(S3}6os*_E>(X- z_%U$f2cKHpird~|caO1XFIgNAbFs7MhRL8F$dbhX=^FuUJA1;g?H~$L+kO}kfk-BV zo}{K)q-C0e-x$o=P}8NT1z!$|0Y!rOXXp&TBrUZ^TH2wqPbFNmpT{u(3m{UG8X2=v zfD6H1BL@ID0BB}ROD90vJdkpb7JUMBY4-|B*T}VJ_W|ryv8ODMeRW75pog-r?bhU) z8m@V*vaf?)sXsZ;(R%u!(o?)BK=KJ|{3bhZw9%&=e%os?)mQ2bD zKrFL6J0e0-Nw>TTB2?>^Z|}=|APM&*&|>5#j4&j9g5@++J#uXhd_(#WYv>gY0dNI~ zyA5#%J1E`4_2~JfPkiRuV-Gffs|xpo4#aV{a8O$kv?)Q)s##d@gTgsPbhpEoF^Uu1 zQ6t^QhTN^)$EVu4Fm|0VS+0%djjY8G)u3$RLUT|BXz%0;fF^hz!txbmTieWNCoQO| zd>+&U4FtV)zhUM%7PzS!w>!&caY3{!N4g*XKP($fgj45(b`~MJ8#+nSoo(#2Z+N^C zH^KYB<^&zyJwBF7wM^WomN`w1>7=i zxs)BSL=?Per|oek^{nZU7+ssf-$Iw6DsoPz<3&uUp%+ziybP;ari|6>1jcYZD?gZN z<=}L<7oIY5Uw=Y+=RSS z7wR)tSAnjN!bs4L&|}U#euOUJnADE2%Cz5!FZkbk6~AB9tJ9Jy!h|~`N-&8hVj8+E zZrivOvt1r{I4&df%U@X8Xo&pAC3|zjS{GAWF%P=H5GS;oVE}t35O|)@F0XH1789-) zr5=~_ML{m#i4#v4Vc>7!H6sf4`o=~3%F45w!|wX(rZC+&jJX|wDL}hR>njbR+M8=t zMbb^4?>-KR3A>Xy6j=HUSejtW64g{RT2^P&GA%1*#ZXLWCuw(f%%kzN5r@#}SY+Z~ zgAS%Lz!%uF09;xifVRrteHs!xRp8x7x&qmEPc(9G`|~l;*~r zJ^oP`RrBzsy7jn#5CJ$De-LC4VoAE!@q`j}M7|luPWs8mc^$Pi=>VYz5%YF4ob}MT z;f4S04%_$AAobd|?ZC`LK4+C#IKFwG1ICJ@9Em(ZxTOPdW{P11w-_9QLdxcNn@U@& zG=t$3D;gY*Y()KVwF)ZFncx?U?O^*e}}l_Kp({{(;3< z?AkUzj=N&Akj~%;GvB~!yu0k-mBBI3e+){3$B>4mBrlM z5@p%cGJRZ1OlK>&G52baHJgE-g_{W=+Szl8e-^Ej>HIwQmT_m@;oRBdPoer6}{sj$Yjnx1E literal 0 HcmV?d00001 diff --git a/pycls/datasets/__pycache__/loader.cpython-37.pyc b/pycls/datasets/__pycache__/loader.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..285adf4197d482f11c40db3202805720493e1b2f GIT binary patch literal 2056 zcmb_dUvC>l5Wm~|>$9CWX_A(ff@y(bAvv_-u|-W2w~a`hDt3i9Ax_uZb9~PI)9#we z<{}}&122{M06e7PiLbP;JoPK^#O&U=Y11O{z@Bb)c6MfOW_IRx_nJ*dfpO=jKfZT> z`%5eqLj&e6y!2NfSaB7sVicKtBFaegbjm_J@RK|a z2}qjg;A&{=-6b|#Kzi@PQGKc!aPw^g!N;KIg|RNF=-ZN zfeMQ-Y^12r3JbKCCd-nx3LTrztz)~eAv4ze=604snA3pKHXDa+e3q#8C?isK82u2Y za)*YjJHITF30(WekHh9_>3c*yG?M(6ES+iAGdiJva=;Y;2yX8Pab&> z9zPinagW?!841SRn6fCQTo<<75mD~RYT|X_gtxc9wbMJ`bz%F>{=wJ1g8_FQ@Akb% zy+`{8-*#%eAuJ(lUg{?yH)tM5jEGJmbwQSaxV2ZFajQ}%4dH}YmhPVl`3RPOf4Mo% zl5lfA!#JJM%}RAMp9L|kXeFbYRng}&zTyRoyY@=yML3MMfhcHIwNXp8H3u!B4P>hb z9tU0%9vhMCpc4qi#jt>pb^RGIOkwH-O*Bb$X2{tDYF$xIROoa-f#(L)n>Jq`5MCj=DetUv%kyv+c%Hi^8>+i#rLKSs%I(FP8&W{l;3%>oKDkmF1Q`jtZ~>#} zP*$zGaqgh%mq9czaxb3ST>M6eE_UCS@Y9v(BR%Qjv)__Tg?s5Ol?q48bhU(g*98&f nOvG)Mkn^tgux}?Bp2p!vsJgY>5pV literal 0 HcmV?d00001 diff --git a/pycls/datasets/__pycache__/paths.cpython-37.pyc b/pycls/datasets/__pycache__/paths.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9103017520148771c74d6df4ca0d2f1a14f7abc GIT binary patch literal 815 zcmZWnJ&)5c7`9zENk4KU6%s>`U?@WaLSj159Rwtn6Dmv@u%g&)bC))i9ZpKbz`)O7 zxQdCt zie0u0?uiIo0@p(0pi4@ZU0mtqs?z{-@#|K)s3y ztaGOb7T9lxN>Rrt{0g9iwGWqST^Pu<53qmENH&mEk!NMuEG~L2%yP`na!lhtNxHks z1#|yM(h02R6YC2ht6T`nDx*WdEWfOZo%Lm*-L-)btD;mwq@hS&zY)p#hjWp8=#&S;VE6~VmBZWs literal 0 HcmV?d00001 diff --git a/pycls/datasets/__pycache__/transforms.cpython-37.pyc b/pycls/datasets/__pycache__/transforms.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4898d3646c5ac135d3b79c8bfb5b7a6b2b14765 GIT binary patch literal 3422 zcmaJ@&u=706|SoOIX~@nW*yrx%izE&vt;eC2o$6x&azpNmB5M;(4x_Xp)=Lvnej~b zY;`%dhaMEgSq?}m?U56*l@k|E`AazVi6{r~jSCkp@V)AB+i{}wXsTZQ>VEaT?|oH2 ztJTU3&rd)4^tXShG4=&D&L0cLoA@U^6q2=>q>I{> z%hH#nPk7stFUqp4!185P)-YC*b-94LELY@`d;udB*^t-KQ@q|KO6_UVNte+U^`v%i5`3YzmGb33&r6Znx$+A-&WZ%e~Z2U>dZ=6YOS)AQ@MiC z)Vga*^a<*FY2No_@ewS1T})qlCsg#+q&W%I?QTDaqHxrV$dA!rYJ6$Fa{ui|&D^!t zrsDW`OJolG-3>D*=?9arWoKeA&g^j*M46RnneoA2nes99zprog<8in(J(4myO15O6 zgCx{jlcVk^nV0jvWUBxn*_<3@C8#YDecM%yKM7qVX z*I0(YK}3_x!qS$dC`OqNyORv2VOApRU?6qh0Ff6ZGl#y{!SD)N2`yt{iK}ZgW&!LX z>a;Hxf3vUO!GQmK&rPbDeatlfl5DCgJ~S;VASl529q3^ST)~%bibej@L7UiZ3~^o+By3l9uT5_ zFG)g0WPYGjtQ4}1WiD7F<8kKgM+0KiIcj{8IkA$Vx(e08g%cfe;s)N5R8cTd<#q1! z8)BWSRkRDxh$aPSe?r56W~&Az48inF%wQpPkj78NDtjzu*38yoXr0&~j+OEib}D90 z(PPhhT)=13n$*FMzsO*@_!-N9=%wDpdvo&biR6#k`Ix63Nasv93V3rUnq3vop}mq~ zhEHk%8#tq!+iAImA&N+0&{Wr9Wgfj}5NXvwtL5h`w26UcZN~c}ahRvoIzujXByGA`Xa( z-4n1qO7a;b;KbLZVfhqaLc-rDuur;>H_G+PT;4e&Jwa~Q0B5Ks@yO_3t70I}Ec7Dn z1KrP@-YAZhsREr@-4E}8*`dk$>cgF1J$SeCi-$W8?!BA2L86Z)VNQMzs{;MQI|$@D zK9tAzTVxTd+fKg|g$HftfU2tza>(QGN0^sTzL8!uxy{SSfOWCPm$}+RZxJ02jUqaK zK;x^?$+4tZtS|{IbByP`h}flGa6Ps0V}|j>ePiJ`O3x9)Lgm_%0I+52- zod%n^&-vjk4cgL){G7kPKJ#)Bf$UhP5DU!l=dxl;2O?9_j&`+2G);Ywx|EY0bT3~W z$wP#ZO8$mZCS551pVLZOMhcZ@6<9VB>}Fa{9aB?VsL!MqEt)lD>A};Ak+-UuCDh-) zH0q>fGg?CstN2w+$ZKgWEt7RA(WO$V#Ub< z{8*%Pd~)*<)7Mh2moSSRi(|SCm8M$-O&&(wa6-q&ZKTjB{6@a)B06~fy*)SnwY~dm z^Ze($=O5eo#f7WyVq)usoL`jh7gDO0sY#r}?IzxjMXa2Zx;jGC&onpV>5=@lLy<1GO9K4z=6W zAen?+-BBb%ZTG;ix9#?G3IAroUz;mBX_7PxehEJkl5o-v->7;Ws=kI(w9FUz4g5Cv zI?`d0tJg8Bdrp6%enJ0_&`6Q@pjDMF40Np;f)?mA7b~2jYzXB?3B|x^$))qmYS26= z`bAs1=*Jm*xT&EgL$|obuFr7FrY^3m4#?`RuvKUc_1tAo9?!3>{w12+8-<5Ep`MfE zo%{Ei!+|D!`MO)HT7&y%bbz*at|C7%HUe-sNX%%iKi&l^GB>{s<6gTzCYhgvfmVh& z=0%~=m3Msp+VW{w$bTWa?MMNnRqCx0~9?Q4kP2N*04UmHQP z`7PXdamCrrohI>z?{p!+)$0eS)E%;Gnb+yaxZCL%(i(ndPP9Ls9OYXfMa%CGbXhWI yxuI!JSPHoL#3j#+gy#3vySVJ`kHR;oE1@j0g^$~TZ~4COmMi6|U-!%9>;DI*a|6`? literal 0 HcmV?d00001 diff --git a/pycls/datasets/cifar10.py b/pycls/datasets/cifar10.py new file mode 100644 index 0000000..059b2e2 --- /dev/null +++ b/pycls/datasets/cifar10.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""CIFAR10 dataset.""" + +import os +import pickle + +import numpy as np +import pycls.datasets.transforms as transforms +import pycls.utils.logging as lu +import torch +import torch.utils.data +from pycls.core.config import cfg + + +logger = lu.get_logger(__name__) + +# Per-channel mean and SD values in BGR order +_MEAN = [125.3, 123.0, 113.9] +_SD = [63.0, 62.1, 66.7] + + +class Cifar10(torch.utils.data.Dataset): + """CIFAR-10 dataset.""" + + def __init__(self, data_path, split): + assert os.path.exists(data_path), "Data path '{}' not found".format(data_path) + assert split in ["train", "test"], "Split '{}' not supported for cifar".format( + split + ) + logger.info("Constructing CIFAR-10 {}...".format(split)) + self._data_path = data_path + self._split = split + # Data format: + # self._inputs - (split_size, 3, im_size, im_size) ndarray + # self._labels - split_size list + self._inputs, self._labels = self._load_data() + + def _load_batch(self, batch_path): + with open(batch_path, "rb") as f: + d = pickle.load(f, encoding="bytes") + return d[b"data"], d[b"labels"] + + def _load_data(self): + """Loads data in memory.""" + logger.info("{} data path: {}".format(self._split, self._data_path)) + # Compute data batch names + if self._split == "train": + batch_names = ["data_batch_{}".format(i) for i in range(1, 6)] + else: + batch_names = ["test_batch"] + # Load data batches + inputs, labels = [], [] + for batch_name in batch_names: + batch_path = os.path.join(self._data_path, batch_name) + inputs_batch, labels_batch = self._load_batch(batch_path) + inputs.append(inputs_batch) + labels += labels_batch + # Combine and reshape the inputs + inputs = np.vstack(inputs).astype(np.float32) + inputs = inputs.reshape((-1, 3, cfg.TRAIN.IM_SIZE, cfg.TRAIN.IM_SIZE)) + return inputs, labels + + def _prepare_im(self, im): + """Prepares the image for network input.""" + im = transforms.color_norm(im, _MEAN, _SD) + if self._split == "train": + im = transforms.horizontal_flip(im=im, p=0.5) + im = transforms.random_crop(im=im, size=cfg.TRAIN.IM_SIZE, pad_size=4) + return im + + def __getitem__(self, index): + im, label = self._inputs[index, ...].copy(), self._labels[index] + im = self._prepare_im(im) + return im, label + + def __len__(self): + return self._inputs.shape[0] diff --git a/pycls/datasets/imagenet.py b/pycls/datasets/imagenet.py new file mode 100644 index 0000000..b2c68fd --- /dev/null +++ b/pycls/datasets/imagenet.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""ImageNet dataset.""" + +import os +import re + +import cv2 +import numpy as np +import pycls.datasets.transforms as transforms +import pycls.utils.logging as lu +import torch +import torch.utils.data +from pycls.core.config import cfg + + +logger = lu.get_logger(__name__) + +# Per-channel mean and SD values in BGR order +_MEAN = [0.406, 0.456, 0.485] +_SD = [0.225, 0.224, 0.229] + +# Eig vals and vecs of the cov mat +_EIG_VALS = np.array([[0.2175, 0.0188, 0.0045]]) +_EIG_VECS = np.array( + [[-0.5675, 0.7192, 0.4009], [-0.5808, -0.0045, -0.8140], [-0.5836, -0.6948, 0.4203]] +) + + +class ImageNet(torch.utils.data.Dataset): + """ImageNet dataset.""" + + def __init__(self, data_path, split): + assert os.path.exists(data_path), "Data path '{}' not found".format(data_path) + assert split in [ + "train", + "val", + ], "Split '{}' not supported for ImageNet".format(split) + logger.info("Constructing ImageNet {}...".format(split)) + self._data_path = data_path + self._split = split + self._construct_imdb() + + def _construct_imdb(self): + """Constructs the imdb.""" + # Compile the split data path + split_path = os.path.join(self._data_path, self._split) + logger.info("{} data path: {}".format(self._split, split_path)) + # Images are stored per class in subdirs (format: n) + self._class_ids = sorted( + f for f in os.listdir(split_path) if re.match(r"^n[0-9]+$", f) + ) + # Map ImageNet class ids to contiguous ids + self._class_id_cont_id = {v: i for i, v in enumerate(self._class_ids)} + # Construct the image db + self._imdb = [] + for class_id in self._class_ids: + cont_id = self._class_id_cont_id[class_id] + im_dir = os.path.join(split_path, class_id) + for im_name in os.listdir(im_dir): + self._imdb.append( + {"im_path": os.path.join(im_dir, im_name), "class": cont_id} + ) + logger.info("Number of images: {}".format(len(self._imdb))) + logger.info("Number of classes: {}".format(len(self._class_ids))) + + def _prepare_im(self, im): + """Prepares the image for network input.""" + # Train and test setups differ + if self._split == "train": + # Scale and aspect ratio + im = transforms.random_sized_crop( + im=im, size=cfg.TRAIN.IM_SIZE, area_frac=0.08 + ) + # Horizontal flip + im = transforms.horizontal_flip(im=im, p=0.5, order="HWC") + else: + # Scale and center crop + im = transforms.scale(cfg.TEST.IM_SIZE, im) + im = transforms.center_crop(cfg.TRAIN.IM_SIZE, im) + # HWC -> CHW + im = im.transpose([2, 0, 1]) + # [0, 255] -> [0, 1] + im = im / 255.0 + # PCA jitter + if self._split == "train": + im = transforms.lighting(im, 0.1, _EIG_VALS, _EIG_VECS) + # Color normalization + im = transforms.color_norm(im, _MEAN, _SD) + return im + + def __getitem__(self, index): + # Load the image + im = cv2.imread(self._imdb[index]["im_path"]) + im = im.astype(np.float32, copy=False) + # Prepare the image for training / testing + im = self._prepare_im(im) + # Retrieve the label + label = self._imdb[index]["class"] + return im, label + + def __len__(self): + return len(self._imdb) diff --git a/pycls/datasets/loader.py b/pycls/datasets/loader.py new file mode 100644 index 0000000..49ea817 --- /dev/null +++ b/pycls/datasets/loader.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Data loader.""" + +import pycls.datasets.paths as dp +import torch +from pycls.core.config import cfg +from pycls.datasets.cifar10 import Cifar10 +from pycls.datasets.imagenet import ImageNet +from torch.utils.data.distributed import DistributedSampler +from torch.utils.data.sampler import RandomSampler + + +# Supported datasets +_DATASET_CATALOG = {"cifar10": Cifar10, "imagenet": ImageNet} + + +def _construct_loader(dataset_name, split, batch_size, shuffle, drop_last): + """Constructs the data loader for the given dataset.""" + assert dataset_name in _DATASET_CATALOG.keys(), "Dataset '{}' not supported".format( + dataset_name + ) + assert dp.has_data_path(dataset_name), "Dataset '{}' has no data path".format( + dataset_name + ) + # Retrieve the data path for the dataset + data_path = dp.get_data_path(dataset_name) + # Construct the dataset + dataset = _DATASET_CATALOG[dataset_name](data_path, split) + # Create a sampler for multi-process training + sampler = DistributedSampler(dataset) if cfg.NUM_GPUS > 1 else None + # Create a loader + loader = torch.utils.data.DataLoader( + dataset, + batch_size=batch_size, + shuffle=(False if sampler else shuffle), + sampler=sampler, + num_workers=cfg.DATA_LOADER.NUM_WORKERS, + pin_memory=cfg.DATA_LOADER.PIN_MEMORY, + drop_last=drop_last, + ) + return loader + + +def construct_train_loader(): + """Train loader wrapper.""" + return _construct_loader( + dataset_name=cfg.TRAIN.DATASET, + split=cfg.TRAIN.SPLIT, + batch_size=int(cfg.TRAIN.BATCH_SIZE / cfg.NUM_GPUS), + shuffle=True, + drop_last=True, + ) + + +def construct_test_loader(): + """Test loader wrapper.""" + return _construct_loader( + dataset_name=cfg.TEST.DATASET, + split=cfg.TEST.SPLIT, + batch_size=int(cfg.TEST.BATCH_SIZE / cfg.NUM_GPUS), + shuffle=False, + drop_last=False, + ) + + +def shuffle(loader, cur_epoch): + """"Shuffles the data.""" + assert isinstance( + loader.sampler, (RandomSampler, DistributedSampler) + ), "Sampler type '{}' not supported".format(type(loader.sampler)) + # RandomSampler handles shuffling automatically + if isinstance(loader.sampler, DistributedSampler): + # DistributedSampler shuffles data based on epoch + loader.sampler.set_epoch(cur_epoch) diff --git a/pycls/datasets/paths.py b/pycls/datasets/paths.py new file mode 100644 index 0000000..93eef0c --- /dev/null +++ b/pycls/datasets/paths.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Dataset paths.""" + +import os + + +# Default data directory (/path/pycls/pycls/datasets/data) +_DEF_DATA_DIR = os.path.join(os.path.dirname(__file__), "data") + +# Data paths +_paths = { + "cifar10": _DEF_DATA_DIR + "/cifar10", + "imagenet": _DEF_DATA_DIR + "/imagenet", +} + + +def has_data_path(dataset_name): + """Determines if the dataset has a data path.""" + return dataset_name in _paths.keys() + + +def get_data_path(dataset_name): + """Retrieves data path for the dataset.""" + return _paths[dataset_name] + + +def register_path(name, path): + """Registers a dataset path dynamically.""" + _paths[name] = path diff --git a/pycls/datasets/transforms.py b/pycls/datasets/transforms.py new file mode 100644 index 0000000..f5f2092 --- /dev/null +++ b/pycls/datasets/transforms.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Image transformations.""" + +import math + +import cv2 +import numpy as np + + +def color_norm(im, mean, std): + """Performs per-channel normalization (CHW format).""" + for i in range(im.shape[0]): + im[i] = im[i] - mean[i] + im[i] = im[i] / std[i] + return im + + +def zero_pad(im, pad_size): + """Performs zero padding (CHW format).""" + pad_width = ((0, 0), (pad_size, pad_size), (pad_size, pad_size)) + return np.pad(im, pad_width, mode="constant") + + +def horizontal_flip(im, p, order="CHW"): + """Performs horizontal flip (CHW or HWC format).""" + assert order in ["CHW", "HWC"] + if np.random.uniform() < p: + if order == "CHW": + im = im[:, :, ::-1] + else: + im = im[:, ::-1, :] + return im + + +def random_crop(im, size, pad_size=0): + """Performs random crop (CHW format).""" + if pad_size > 0: + im = zero_pad(im=im, pad_size=pad_size) + h, w = im.shape[1:] + y = np.random.randint(0, h - size) + x = np.random.randint(0, w - size) + im_crop = im[:, y : (y + size), x : (x + size)] + assert im_crop.shape[1:] == (size, size) + return im_crop + + +def scale(size, im): + """Performs scaling (HWC format).""" + h, w = im.shape[:2] + if (w <= h and w == size) or (h <= w and h == size): + return im + h_new, w_new = size, size + if w < h: + h_new = int(math.floor((float(h) / w) * size)) + else: + w_new = int(math.floor((float(w) / h) * size)) + im = cv2.resize(im, (w_new, h_new), interpolation=cv2.INTER_LINEAR) + return im.astype(np.float32) + + +def center_crop(size, im): + """Performs center cropping (HWC format).""" + h, w = im.shape[:2] + y = int(math.ceil((h - size) / 2)) + x = int(math.ceil((w - size) / 2)) + im_crop = im[y : (y + size), x : (x + size), :] + assert im_crop.shape[:2] == (size, size) + return im_crop + + +def random_sized_crop(im, size, area_frac=0.08, max_iter=10): + """Performs Inception-style cropping (HWC format).""" + h, w = im.shape[:2] + area = h * w + for _ in range(max_iter): + target_area = np.random.uniform(area_frac, 1.0) * area + aspect_ratio = np.random.uniform(3.0 / 4.0, 4.0 / 3.0) + w_crop = int(round(math.sqrt(float(target_area) * aspect_ratio))) + h_crop = int(round(math.sqrt(float(target_area) / aspect_ratio))) + if np.random.uniform() < 0.5: + w_crop, h_crop = h_crop, w_crop + if h_crop <= h and w_crop <= w: + y = 0 if h_crop == h else np.random.randint(0, h - h_crop) + x = 0 if w_crop == w else np.random.randint(0, w - w_crop) + im_crop = im[y : (y + h_crop), x : (x + w_crop), :] + assert im_crop.shape[:2] == (h_crop, w_crop) + im_crop = cv2.resize(im_crop, (size, size), interpolation=cv2.INTER_LINEAR) + return im_crop.astype(np.float32) + return center_crop(size, scale(size, im)) + + +def lighting(im, alpha_std, eig_val, eig_vec): + """Performs AlexNet-style PCA jitter (CHW format).""" + if alpha_std == 0: + return im + alpha = np.random.normal(0, alpha_std, size=(1, 3)) + rgb = np.sum( + eig_vec * np.repeat(alpha, 3, axis=0) * np.repeat(eig_val, 3, axis=0), axis=1 + ) + for i in range(im.shape[0]): + im[i] = im[i] + rgb[2 - i] + return im diff --git a/pycls/models/__init__.py b/pycls/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pycls/models/__pycache__/__init__.cpython-37.pyc b/pycls/models/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58e45f14e8cdd8eef6fde32e51ea69cc4a2c1899 GIT binary patch literal 139 zcmZ?b<>g`k0^5egaUl9Jh=2h`Aj1KOi&=m~3PUi1CZpdjG%)HE!_;|g7%3B;ZK*7?SR6CGqpMjVG E01q@F0{{R3 literal 0 HcmV?d00001 diff --git a/pycls/models/__pycache__/anynet.cpython-37.pyc b/pycls/models/__pycache__/anynet.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf4beebbffd2ea32a5f7a8c853aef7e1ff0def19 GIT binary patch literal 13481 zcmd5@+i%=fdgpC8oU87Zx`)=pxN}-TduwDAeU;W{qy`w1q&WmI-aPlE`|68h}xJpHFRaa}M zo0_Jm%BEg1)QFRcIc6mtbF9rwB_q%EMs_n-$*GEG?B~70fm$iLhMW3Asf@X%o5pec zBgM7c%omEA@iOZZ5B16bGV$NAtTT)7Pr+B%!TlNBbv*mgXfBq4rTP~@gURzzOL9lYuKq>A8?EXAR zpag25x$0l&dumtds{8tZvSIqK28OHcr@G34`cVB`?c&~?g3suk-@M}mqW-}1?ckng z`+>J+K>6N1Je1~_PT79)DD!JZuy%`q$`ewV~MfdB?&Y2~? zCfIFz_G_Q*y=FICf$eX#+bt1zZpjSucRZhuEbvufCce5}4)aU$MZAYAW5e6^!{TM% z_XOLsC`3zy=4wl9R)dljW@LN#nM!nuhWmf3h#chb)x`_UXd^SnvwYZ=(X~> zuz-=c!k0#ulf3M`Qu0I^{TCKV1_XnUUEwDjISTPPhd~ZPL@2|sK+#5z;n6aW@nc`=;G|u`#weGxN5r6* z0a*qrjg@-UFIiy+8K|# zKA(KMMiaq}NA`LFyZ;7ASJ{VwI8b3I)P3#{?E%DkpwV>f>&ZQuiLzjCAN?=3k|9px zJTxBEy=^qL-toj)FiM&@N4~mqc~>YSzk}O8bw^RN>Lm8gSkl_~iBg(Eqs@WP zvW^3UZo#fNVcv1>Z&e%dowVb)tr}!O35Zug!a^*dn%8I~LZTfgsp4fmL?x7?@)k~{ zz%13!@+rxyf!Et;PPzFPK?ar1%}BM=UE_-k)Gpb-?M-8?#{b*t<`*R-o8>kZ{P76Tc6rH7t1vwiW`qTt^5!dUBgv2EOD}%ijO9b zu+#9KG~zV$Qfp!2RgR`XP3`IjIy@+~R4aGBJ=6-@ZNbhDSYvx-^Sr&becrA%UEB8@ zVYi^7t)^c}MIfI=J7j;Ps4*nyfggyiT2M;M;TA99i7>V8w6=oK#I3sPh1$wysI5^s z@XBNaI2zF~aMZqsJ<`Y|YNS0_AO<4Y3& z)xg)gT41cF)=f#%Zc0Me!kr8_+4Y=jeo1)ePene{` z&B3*j5#}~L(exUQLIkS?sII_F@kw(^ zq;^t0rFLeA$E>FuJ)=XRaF?UTkL{eK**Q?IDxc=}Ao(s-c~8Bc{RE~6DyuTWv1<3~ z4Oe*0k{&hi75p$vHLAO?tWu5oxIIH2vWq zw10%%mrWq7XNSA;1YmzI8~N)1wwvxM>qRGmvsD(f{t2ALyS$L#_-&p=5Ek@j;#(k) z!?$r*j-e}lmp8sc@*W8hE51uY)e)CSJ|LkH6d#gYCb>dFq=>5^p|x6XRvW0P#6KgZOlFU+3f9jGKk8PfK5K!Cd9VI9||s7lCr!MSNnfFnUg0{*L{o zeeLy~(s}!ueJQ@xgYY-(_`f3s+_xCd>H68d0V^=z^9LNp1wO<;RG<>ji@1i3LV!EN z{{F;Jvhf7FK$J;m{Y(yDx@NO1VJrh!xaKEfIV0V##BgY|MQi=BAV!??bl41w4xGQ^ zZ`9ik-}o7Y5ut(Zv~%W=D25(BV$EqGe5y2~pAEfTMEs*}FgZaGxDFo%yDlT6C=^7H zD8s^sd_oV5{ZwGCr`Ii+;AG-JQHrRP4YO=sl&6Us_=JQ`sIEpbk@iYjvk!TXMk_QJ zT0Fgl`YC?n%hD0@X~unU5ho9I|EO`|H|fY^*J(OsM}I?hRM|%d4`jX;NYn&rGj&hwS%t(owc#rr2$@fXhB)34~L`E>`7-h?T4t~@Hbl{9uWFj*&+=axP z$(SSa_?SbAn1=g_fd_$jKlyMRc$k(Y?az>D)A10u=M6oyrNz$6OZT@tuj9RiwBW79 zomxGR0f+ti(qbtxZbP{=ttmh2Gr$`iVjO63PSOuBZsXwr(-MbzFM-R<*Y`#T7r*3p zdcj3*X|ZoOlAiNO4O`%&9Mu4Lh!c8b2m3kfhbZ94H3{ z(36b~3N(ayf1fOaBVz;^8AQ=QDHnm}6rYqCb9TvtT$@3?+K57gQFux()z>y#bvHCt z9d9Rbt`v*-JrZeF`?i(J^XJ&3YKoRed_E3LjjNpkWiuK|G(A!}EK^9JFnxs0AdIG& zI%1eKv5dR2%3k`bDmO z$o+bc8T7qq9CyF7^Uir2`TThsXF~}*8_MSR40=XGp{sX|ZmMe@m`5OS7e5++#PfUG zzhe`6C^^&X%0{n$eb^-R2SSE5J*gNdC`8TIy2iBfkV#s)o9?DgDSK9+A;DI=X(Y`F zNSJsV;HSFj$hkokZfjeW5earZfw@h?R z@sCo-YS)ZgnF%J*&dftpMx3emD@^YPTRJVjp^a$ijGKXHnQ=}?pl6>ka)@v*GY8a9 zhI59{TobLWwl8ys<3{Ks0HzsGx|wO6!p&62f%Nm!l}vNAvml#l8I=SFJYQKLl2 zaF@ErMm|NE;OUc(e`0F~l8@7)T6J7Dix~bJesI8MofPEOu=h3zPFx+$KngMgVReyJ z#;CfOiZc(>&AIuZ45TO~@RrUc#2;}6(ks|MCJpJ|@8u%@5jE?-zb8G~aAxvi7LAQN z7K`>TlvU57oo2Yz>&O+(K=ch-B3d5VdZ|>uo&+L_EFW70VDR~Lp)&);f5kqSHM~4Z z2*asEV%vG-E=9qRh$+F-rw)Ap!xI?iLyq+G*Aza}v>f$+7H5c}~YgiOHmNsZ!}>Wzhd$PgOP z?+1^qQA7!@Of)`3d!s_5%yJKlXB5W%2S16cHc{QbGmFt15YSOwe=}_OT94C(Wg(El$ zje67B4pW%!-@G#Gbk^~fg z#LreqMu6gMq8-V^a%Xl}NWW@Syut~`f6678yFhUEbYcIJZMD%-d z1<}_RxKM|&4y+?Zt`+=x085!sMTt;#!%T0rm>4cmjE34g#o3GFCX$}_VV3lQ2AP_; zz+Nzv-dPT?*5;v#B?%wzWx5$mrfA|+S8rlT$X+(Sexs{4v0Py<$3m&AW6mMtYO`2Q ziBet;Q|Q^Q$^JexS?T6-1_?bQS~n$8*U?u#+r#3n6ejW4yfDj5En43cEv3tk5`>^W z2$y772%%c9SSwhp$OShQEa>23u~7)SNV#y-6s@R@tK%rsbf)^!`G)WnrsU`yZqTb} z#-&3DWqOw4|1#8A-EqSm*p@R#lpJ_@2Q9Px9Q7l(|M~Ju7KW(!8Vv zNsHny<#7zUuHbBZeIj}%w0ts{TAw~pWBlgJf=a|CURVRXEhc%Pxqhf^WB2_~U)cm% z+xF!ou&{9-1xNlytdsBuVuPeXvPse;X_34`VuO@sqU9%bGPs&V#!k_qC-ZXohvmiP zFtfD0c+*+_;cYB!lBb`9*(*10E&L$nU0uAr{P9v~escXPkLji5JJ+u+E``>WTg%SP zyEkz5(Vbg&Z#$nXg{h@Q=N6V4<8UWT-Mn>m@kW>{-@WN9+_=27gde4i(oj9_u+`+?QXNt5}(kF~wAAE)-l_a^ox_onuy zp*k7YL~F*}G!0~s?H0Qe-O28Fx6qyHPNO()(MWbD_KNqrpC~~N_p^xhS+bAP;Y_*n z@}{s%Zoe(@{~ zk{%&$jQ9z@B7RE3)$xg?gvnmh0BB9uwddQ@oY&P~!JQHSokLuBCO1RxKM`EDadSxq@fLZzV5745wWc z)lI)*Nppb}-_nOw%(n0zzP1Db7wS#3;af}8gb5=5#*N6 zi8pWvr{sS%%x?vC{G$lf0GJ6=@-j>(*BVxUjtxH8^q?Cc;*a?bD`<(@z4>NSrpMt# z^kl6iym`oUwZ0bF;4IhiN17HbYj}tsOYMsE%DgF;Y)e&&77l;R2M9|!EhT3{WMxs- zQ)DtI1K&skrP=SxSe_9Iq4j<=03T2Ya(30g0umTXHLquNt6&u-$0pBA&P~2Nd1CUF N$#au4IL=I-{67!_PAC8X literal 0 HcmV?d00001 diff --git a/pycls/models/__pycache__/effnet.cpython-37.pyc b/pycls/models/__pycache__/effnet.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f91695257e1a8847698223cb281017b311e83f7 GIT binary patch literal 7300 zcmcIp%WoUU8Q&Lomk+&Y`4Kk{Ht54lYdLk$0!^KVEHSa0n08I34u~XMio232lS_Jb zWy#E{2&w=*1t<#i(4Hg|Jro6c=&`2){TF&D=Gser@2M!z{(iGdkyhlyFx(;L+u3hs z=kdMgx|f`;Y|o?dMy0g)@7Fez8^5wZPmj1v3YFYxZr;xBT3vnx6~un{#_+YaSTecYtv+ z%mOg3p9dzNVamW1{30;L408gQl0O5?OomwmX4anrW-h~=1ZLh}0A?Y>obru^RxOVf zYwPRXPB#eSdJtFo13&0ROAl~ftzOlo*;(J{@MT=gtP#D8MxeEHkj3!zmicpS9mMg? zPu-T49;=m057V-Lu31{k!E&y2u-uiP?T^Y2t$Q1&r4H%M!ekPsu7F(SdW2`9XtxIbg zy)w7$4Yp!wgdLnT?|GeGJBmE-_u9WNUbsEz2Ny>7eLuV(UGUp+I|||p!~30Hl);oz z7lQTmFo>6i_XRh68Z}a<&}a40e0J8Q@m?M}po_6996thxwN0Hzg3u@``qpQ5qIm}J zW@5zFW^U6~(D8Ghndouj{2mD56=H3_=-US%k5|Honax?>xnl_*U(RLk?q}?s2WEjp z^YeZ|%~4jQE7l+A*nuE;3lu8e2}Bt5yr?@0qz%G#{Xjayw(obt4QZ`)+fmh!W-yGT z+Xt1xc&lGE#4P6^*g#bUMAe7QsxHcSt)2j#Lus!J!h08eX@MvAWbtx4?%b{qME{~M z42+V-<+?O$HyYBscKw=kILccCryASc=(a4V_eixDmi_iH?%oTQ?`_-|40@QVPEO$;BfesLdg~M2)a{ymvxRk-NK~7sd zkKv=LlD|s31A%MD-9cDc-wN?E2w$!2R9>n?-HraB>z~`HcBn=yd~!|Jh#K&)4I5i6 zQ_x(fRn+NmG*W(iLUm+yOq!SPp+-uk^wF%^(!{h(iyEg~;aKWpqQgW$pbqrK%u*UM z7Y{_|wsf+6XY3Z=#_$Q1Q}U7#-{)M}x-;pzV+K27i631cpi2d{e3&lmYj%OSx z`wD7Ay;6)b6&*TwJuD<2%ai&Wqfr}ut8sTL2u8vAcIcn4?R2_v%ENPwTD7v)8+7he zwl7rDb9KfG@<}@K8F8x*^DkA^1gbgZfl^+67lS8~Ts0@Ag?=Xrj_2*0T<6er6dSb( zYp1tFyR^cgs{mvyEd;DPJjK2V3lFOSF9JUd4`Ri+J$e=kR)6eOER3@cv;zy@I7$d} z^=a#^On4KRIn1A0ZdaUQyF#FBN1+jbpkdr?_XItj;+k|CsS-$Q-3xZ6k1Z&-6Cp_D zicIGjQX>K*YU%Sj>u82&how(__Cq$5AF0z*;c?Qj6$K5Z#9L5a)mFbuu&;9inP3;N z>{zPZM@u!g@TH*!>`3z0!BpQSpaF1-)muN4)T17RH&Bo@d%`=9t`J-X-oAg zt&j%$d)?3*iovEVj$0h;B0BM+JKZ578L~eO(tg0!B?5I$Dw~q37J_=L{7942!oY|w zOEYY}WVnY>oTVF+pK&UU>@Y$_dB$jAEQy~tOr$0^reyI`G!T}7H=8h-Fi>fTM?+;~ zVIKMr=4lAe&Oiz*v}gC{a5+#IgorTm ziE&!PlQ%n~zJaZ%Qk@seXb75H!Ci{C3Em>85nLhoA;DFGcL1ati#Bdh;YP~p7Zq-G zC5xtwa7&gLNYhZm&ctJT&k&+zu6suVM`bd9iq446L_?Tn>!(2Gf-!H{Mj3YBMNE?s z`DvM9+NK8AoHD~-6U25<4+|N{@}y?iMbz7f+Z(~?1X`2JbVS@zhl{iDNt?yxR22Hf zwl;ANoX=U?8~Aq0+PaUS5mNV$H?_awJ`PRo!uSvqV>{kBV{KN7PDBOTX~tUO?CGo8 zhgWvp#HCRZXA=u%(b&yr?=L01q1d|xh_ddRxaYV&#PdLp&7{D4Xp*N(*|Ql!-7VZo z9ZPn9QTA+Pu6?}Ad%b9f8-Xm~3Yy&@qzw`2w`0Z2YVk3sAbx~>OT8=IH7~o<+G`%u z0LKL;zvf99wQ9z!F|W_V3Y2ukUB&0)!V_|pRS}8t;?5ZyTsuIJ^I?fPndtCJIuDxY zX~sgAZs&Hl=ZhemRv(#DKKtmt7>NH4RfWqM@(b7zGYsjDo|ei`z<$Y9rc|aRi0z;r z7M_&lNma&f#6kaRee}W+g$eo%nKA$Ai3WvWAfM|Q1$5AmjLW)P_aqNflILeVbb-`K08w9+(J%%@j_^bF_`DS&& zf24ypGUwdNDc3s|I-VfSdK_NVCNcV-(5ZxBobVVejHz+FD8Gsxfon`jzblCCpdJ<;k>!b{A5stLKH@G^Msew( z1Olxt7mI!aK-tw)&@%^xJEY!nyVP1_ zLq{((v)G}6Ov%T)m2OlC2XO`I+2MfqlhO0dAc_%NJfgpvXW}vFd)w0RBWY|$X>2JX z!9WkuB9M20WZKx`{RnKXl#Yu;SGT1UEXxw+h^HRbGb6yZ{YR838Yfe=kjU^Q!v zW_o}55bqQ7AXJ=VH=4C;-Ysdb)^0SfG^Bm&>MEaw+WR-s0H`6YM$Nm4OKH z4z8&X8~diuykmiZs;zVBapwwTsGz+5r4NkV(g(TSncdmlx!w8Q1%|7X zxuoP<%E#xEVltO3B=hj`v&qb!Loc7q?iTO9eoKoB=yj9)9y8g+EDJGu4tLq~&JW3W zwKDA)&VRhC4CBa*(4t|imZa6|MzJ(U-J!I5gN=r?x6v=!LZuJAv zjswAHP%&&#v+1@5~AW~!PysaEg3N$Q@s3tSy?+x#vW&8 z|H*xEgISm{o9*RknOW-GUr;Az_M|&6h^?Mh2}F59{nfIx6?gG(6q(Ea9&+kcI^*}b z=|QkN(gaZ2C0@Z?xWX{YxfF)VLCS@6cxNCGln=u7ZhEzmg^eKgQUOZSKHL1*zNT{3 zN>0>`R$Ww8z?ljVy@J~F6a^KJnXZ-YrIfs{)00GJ0W92d%DCl}^paUH%P*7*kX(_vS+LW_b+?UUgaJw!c_`A6Mnd z)XY#UiDgk#DgyMOK;QcyqEG!3{R#a6^O`6Bg}k)q%&sU!77{dFV$WQ6=FB&*2p(AeiABj4+xIpX$==Tht&%-}W7XBW691B=EbS>SbP$T_#lbA zgRytqYEorFx3YOQOc}IDSCQj?8oUadefiP)>#QHGPsWU;W3kRc8Hz}*55}EDl(IO; zI*)p3BwK^AT09MgOLb&BEaF+qa^EuiFgc^i!W|g z4cF1CtzK2_lzv4qW%lE=X>ve@p-4~Jqu?Nu*R{Q075jDB2Ug z1AchhJZj}FFTI0HY2Kl0r1T;eoK;b3QVNOr711I3ITd35i=v__RN^txs}@*57o&h zn-PFv1c(+006^|2<3}FQMih2ldqRf2$m{Z~?+qgE9Y7Qwc_(2qj6{ngH0D8p*udjy zeB6hM4#K;iS%{CRIkgZWN9N#OZsyjJdBs^e0-AQ=syck>)6qZ??o&RvDMWDb3#5G5 zi)5gc6ai`(Xbr_T!Uch;HARh!0KQi;DZw|Aatz%3o01tM>fBY z5q@Mr3307Ul3TZNjEWDW@h5^YoSQR9Zfk&sI}oqXW5Nqji|KBLGpwA>bbadN*tT<; zJ2Rl*X{DU)KA{b6%SnjNXB)I;~D?dF9-d3xJOL%*bt&&|d+Z zRPt(G1FQh0H_b_FGfE>K%1B%R!$=hHD6{z>zkt> zMDY+spJN8#$TEHfh^v!9PNzpgd*64{VZX$SUxW3E`gOlA^~!?=MKb>o78A(W2K|7n zl6z#C+y=M5LwF1NIyi9gV6fvOJP?uL6S!Z&y~;N~H@cA3ftjkl^*P|dx51{Catgam z8m0a8CE_ycVuHdKmlgj8fw!JRdI;eF66g*jga=6H)S7dRDJ#G>kp2PF*3>Sq6IHk* z5KW;@d6~HEsmL0TVkLJ@P5q6-imS!tjH=$n<$uxTLQ0u2t?7DQ%Z8SV5HLuuuu_hr zONYzM{<}MM<%(R*fu;{{b&cHpD}n1I5KzL|}V)y0q;8l~B2|c*xP@U&C;FAm5e6O%}D+uB=mO=2feEB~s#vh_~ zl_$IV!S>6oQY-!ubl#-blS{q6y)B9@oxS}RpFV4U(~iHm+_CAZngA*c5LC+1)sb>_ z7Ag9ufJ*>SBa$aawg0SCtIG}kl;v_}dp=&KDyUp<6(TiMUr&-Z_+*^%Q# zLth7!z1I$Qca#Oi3su|OZ*K?tzj@wPpg&Y!Dfh|N?$citefIY5v-Xx+*m=1fJl)#Z z+iUMBTMU5$_&RD+z3^dk0hLO+zIA|vZfm3$R@b`!1h@YL1)hR`jN+#tlyfu+c~8LQ zrxpaABorbj14!SIK7~2>t-y?9P)hEbi*%K&(q+0z*Gz}tk6!uJ>163zd^H`^GX-W0 z0_6q)pnRBMy&eScCm|_EDnY=q4(yJ5sz#|zCrXkFjWEN&y?P4@uC+rO{1{$=gjgjO z)mDKi??JnHi$8}#p)CgEf^%=_zbvhxjNwlgP;M_yfp&FMRW8~z8LE|HVkhHK3rMsZ z_X^aN3zUD`!gLIGCXT>WE~ZOd0(`TL9ZuQ6RH!{)1Ok$#R`UuR=66z6eN()u#bj_+=tyODo{l5o^LP7ul literal 0 HcmV?d00001 diff --git a/pycls/models/__pycache__/resnet.cpython-37.pyc b/pycls/models/__pycache__/resnet.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f90ccf07559970934a722a186d8e5bf554e9b3f9 GIT binary patch literal 9154 zcmcIqO>7%UcJ4nmn-oP+lx)c!$Kh<8u{k>{kL}srWHP(4Y-?r=*c?ZaGZSKWdnr~+ zB1JW|)h$_~Ng!DduqOjKAZpKM}PVEKfC^pVf-iE3@-v?=B{NK zrm<_+9aB>>3B|2v6N=4Tu3Mjpj;FSpe!VmMPaQO)y3ZIY7o_e8(&o8%5GN#ooMM43pZ+2Q`ybUt*Ucs z2I`f_myv-)kw=KD|wZfU3Vm)ihDjWuedr_fewvP{l@U3iH?2 z!0a1+^Vm8ucHHP%Yzyny=^H2J3-gy|AI~lsn7Z9tS`A{^3J!v(5^o2UST@3FvnzKS zajP3vHuu9OH#UwcVPiK~s#ztN53P7nsooe{P7?)qzny8e7{y%Kig zO0>VX*OhS~su@+hM}wxXsJVOTm?NUI6C6dVxEw`+q*<*<*_F!G)5XjjnqocAGxIU^ z=r@L(155tr_Ra0?Zg8`ABt&==-4u_9PRbbRjh`uGvy8SC-6AB>gFTJ`$LJYyp{0iGv zcd-Yq%0hfT4H}|XU^$8_+qhY34u_-P3<~{5@1vqE?i(*G+Fa9!?PEunC!qM5*lxSn zT#G*T(gr0fXq%l{)jB1Kk+hC=h^4d9YD85}Wv~f>lovr(p6|E9R_y!A3PUx!EE;=p z>mXP@*m~0Kc5VyhH4e7?J+zhks1*hcsjSUr)mBavbT*aq46ozWCWg=XezVhvBH!l? zkn{BI%cy`9Mno!Ln8d?gDOvi`@Y_uiXu9Z^R(&5OdKXpSIEFxI-RUrn{ zsUYx3-^W{82&}T{LTGynXQ65&xx<~HUlJRZ++U&*8*PGI$ZH=kENtPtbfKFT=yu;owa-ihfQ^I{onqA;AEE)qNjK-LL?HEOo?Hlc)Kf`SHYTDS4{(oF` z)YMKO!=U4{IuNF)>|R5NR=6b>aR)H9gS|+3yTHvb-rt2zXItT3r_l`7t0nmsz2V;P zh6jKJv!SN$HR9%W4R+(UkfgcFx>r;7%9Ax^KmP2oavJ_dsGQZ{(Nhps9;?FY%A=?L z!`hQa%lB85+wf)3*;nQUfQfo~o2{_XK?~Yj!En5J@+P%tvJ)CBmq7anB`TsaEXSO; zE}La*(Y$Q-E=_J%Di|r$*m#KwZ`f5d4i^b*PK*b}PYVMuy$`E7Fn^K%8~_BBG0Cud z&Fxl4$RMoRdWzTaqRMm{N5E5-p^+ew%72P7j(is<<~4*AODjaxlU2MS3Hc-;ktE}; z5rme@Fe1N4t@L9<31;vhT_8wc;U(}WX1xMyNE6ibqIJ{7hGiT+&a+twh>XC)lf?1f zjghr}1wn2d-eLrKuY%SY0LkK2`h0W^)mfVc_5;reDEdx6bCOBG0*rUGrqgFf%otbk zF)6u4orniR#*B7^koeC~B34>b>no!*q#nmew}-$-dYwaIQTZ=4NH5zKfX^1@rWLxR z z^zdFnv;7&i_2-RJdS#T^WE4UVc0>{cD)ShGLz6R^6FH#AoIlU#k=HWGMiuBG?_l9t z5kH`oMhr<%DSv)cf$1yLCpy%qjBK7UykNg-P9)}+J z2?&$(B;aYGj04YG=gn$}KHn5|NH1&jK~fZUSsZVkL81vlda>5wE}~5xM;%f4DyKz< zdi5+jFCLJ1T6u8E?sdA&o!%l^Edj5qvO)JBRt_uQt$cjru$uTT1UGciFU^jmGkOQr zS3ytTJ#k+FA6noT_+-zMC6!CV5cviik~EPNhbWfh1&zRG$SwM%nUiowq-apw-_o&Q zb#{MCb(yDu{Agdu#2;efTX971`_ zp`j7ZsXWcjXNi)l_>3|)H8&V_m<4x1X6R84c`?;<cojG3>*R$q4+si*#oyr-8^`ckPB<*oq0xddFvB8N&!+XnqTxO5% zjV=!9VR!ghc5xif^*l-RCQ5n@73lvRBQeaZ#f`0?SK^;ace8@U)sQRr*5T#PG4D6R ziz!CJbiRA@*BIt3tnkc7urgGo0y?I!(#Fl9h!N1eFya1wF<`%A$C8)(c8JJh;E^AK z`*-_h2-kR!qY$qRPX{h?KF;)W@Y=k7zE29~4yGVtuCR}36l($Lr~cH5Wg6s4v!XdV zQs?NhXKy?oP)-7gqpg6`E9@M|hnSPft*akP|ku?v!0Vdl<4_{bdzrW4N63eGOVIiGD}tgI?CFaF!fdKw9%Og zYv;aW>VR{sZl3^d`Vv;DM|FKAc=ZL%V@E5MdY)M z8}+XWC6au^$fV~v9kvz+yS*8z$29FsMvQctz8Dcqks)kzJl(c9{Q#ev(+`;#kv1}A zSxP?$>VbYcuDlDm^vD#DG=>G-<%V}Gmv*gp6JbCrqcn$8r@urp;QeN~2tb&qJuW zF-mI}sDqs3(AP^{*7LRgH^dSxs5)O!FpCxgF)?$b{KL(n-&Fa@ryThdXA%uxnXEWY zk`-)03qv1+hGxb7Iw8aKxQ_v4I(V&*p^ejo)6W(v3B*$lO`0iz3H>7q%keNxz<#6V zO{&9s8_tMZ%A+qPd4zNh!nfBYblhP==PZxFAsu(neH4^cXGRQ_oAij|r*nL^*YZ>49ybn?)UQKn-&aI%lEDGK^G_&=7o+nq zCA0SyLlcSLh{Z#>{zh4tSMc(+QP4lrw6ufCiDydX@~#iTGWU9qgycrj+TQ4qkW&8x zeTj|=IQ+?bo|ltEZ=$3Z?JZs36f0dU15QY9TL0?fr=|3C$iyd7pghEl20$2aH@d3N zgfvB;3!$B9BT0a#AK@u^#!N|`x-k+D08@{_l%8oY{W-xW(LxWce3I#%ijl@RF_Y*i zgCmG~mf`Kz`^p>ce(xPdS>zoPcdCL@r{{x8>rC!95WPQK2L`{foBTP}SS@O|p@FvC zpsGX_ISGxaSR|P%_c^0&`w&Ms;TS3qB5%*v5Br2oA zk%nE;1xFis%hLx2+F9grOp$+yqd$L&DO1QI4Jidsrf^z1 zqD;p{!MW!4jF{$eN(@ZJn%u@LC50QD2I_tAm`*w?BG`+!E75j$zaz*h!Y?Mn2(7{q zV&w-RV({B+A$jY23-^a1yPgMuhk2`{dJ|R;ap* zYWTu;s6Cq)-^ZUlSb3x}4_2P6f6Cjh+)j53UZrwcAtpz{pzKx{vufg42*!~;8f8Vd zP;R5#(Z^y_wWp8$`;V5_)>hV(Lq~JNP>6lY3D1m#>EBVbL|AliG;htrX)N zMo7qfDLL@<6#mJilc8#2J_aYhVu? zex4oZcf5XP-Wa&?RFa7#87ahMgm9(~-^WF6G0SGG2r8cw!IO7IZr(_r%;1TKBuox@ zSyxPrJt^Ty9+H$pfbELuu_v=}84^Wvn-fKrD4kQBsr9a%BMfwi4tc=;!{>wd00sKo zf^sCojizIOQ=pA@E|B{@M?6L@e~WaDg||ge$j9w1;;jzjV!4xg-k_7rCuGoL?vu>Pt4WMx%T 0.0: + self.dropout = nn.Dropout(p=cfg.EN.DROPOUT_RATIO) + # FC + self.fc = nn.Linear(w_out, nc, bias=True) + + def forward(self, x): + x = self.conv_swish(self.conv_bn(self.conv(x))) + x = self.avg_pool(x) + x = x.view(x.size(0), -1) + x = self.dropout(x) if hasattr(self, "dropout") else x + x = self.fc(x) + return x + + +class Swish(nn.Module): + """Swish activation function: x * sigmoid(x)""" + + def __init__(self): + super(Swish, self).__init__() + + def forward(self, x): + return x * torch.sigmoid(x) + + +class SE(nn.Module): + """Squeeze-and-Excitation (SE) block w/ Swish.""" + + def __init__(self, w_in, w_se): + super(SE, self).__init__() + self._construct(w_in, w_se) + + def _construct(self, w_in, w_se): + # AvgPool + self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) + # FC, Swish, FC, Sigmoid + self.f_ex = nn.Sequential( + nn.Conv2d(w_in, w_se, kernel_size=1, bias=True), + Swish(), + nn.Conv2d(w_se, w_in, kernel_size=1, bias=True), + nn.Sigmoid(), + ) + + def forward(self, x): + return x * self.f_ex(self.avg_pool(x)) + + +class MBConv(nn.Module): + """Mobile inverted bottleneck block w/ SE (MBConv).""" + + def __init__(self, w_in, exp_r, kernel, stride, se_r, w_out): + super(MBConv, self).__init__() + self._construct(w_in, exp_r, kernel, stride, se_r, w_out) + + def _construct(self, w_in, exp_r, kernel, stride, se_r, w_out): + # Expansion ratio is wrt the input width + self.exp = None + w_exp = int(w_in * exp_r) + # Include exp ops only if the exp ratio is different from 1 + if w_exp != w_in: + # 1x1, BN, Swish + self.exp = nn.Conv2d( + w_in, w_exp, kernel_size=1, stride=1, padding=0, bias=False + ) + self.exp_bn = nn.BatchNorm2d(w_exp, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.exp_swish = Swish() + # 3x3 dwise, BN, Swish + self.dwise = nn.Conv2d( + w_exp, + w_exp, + kernel_size=kernel, + stride=stride, + groups=w_exp, + bias=False, + # Hacky padding to preserve res (supports only 3x3 and 5x5) + padding=(1 if kernel == 3 else 2), + ) + self.dwise_bn = nn.BatchNorm2d(w_exp, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.dwise_swish = Swish() + # Squeeze-and-Excitation (SE) + w_se = int(w_in * se_r) + self.se = SE(w_exp, w_se) + # 1x1, BN + self.lin_proj = nn.Conv2d( + w_exp, w_out, kernel_size=1, stride=1, padding=0, bias=False + ) + self.lin_proj_bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + # Skip connection if in and out shapes are the same (MN-V2 style) + self.has_skip = (stride == 1) and (w_in == w_out) + + def forward(self, x): + f_x = x + # Expansion + if self.exp: + f_x = self.exp_swish(self.exp_bn(self.exp(f_x))) + # Depthwise + f_x = self.dwise_swish(self.dwise_bn(self.dwise(f_x))) + # SE + f_x = self.se(f_x) + # Linear projection + f_x = self.lin_proj_bn(self.lin_proj(f_x)) + # Skip connection + if self.has_skip: + # Drop connect + if self.training and cfg.EN.DC_RATIO > 0.0: + f_x = nu.drop_connect(f_x, cfg.EN.DC_RATIO) + f_x = x + f_x + return f_x + + +class EffStage(nn.Module): + """EfficientNet stage.""" + + def __init__(self, w_in, exp_r, kernel, stride, se_r, w_out, d): + super(EffStage, self).__init__() + self._construct(w_in, exp_r, kernel, stride, se_r, w_out, d) + + def _construct(self, w_in, exp_r, kernel, stride, se_r, w_out, d): + # Construct the blocks + for i in range(d): + # Stride and input width apply to the first block of the stage + b_stride = stride if i == 0 else 1 + b_w_in = w_in if i == 0 else w_out + # Construct the block + self.add_module( + "b{}".format(i + 1), + MBConv(b_w_in, exp_r, kernel, b_stride, se_r, w_out), + ) + + def forward(self, x): + for block in self.children(): + x = block(x) + return x + + +class StemIN(nn.Module): + """EfficientNet stem for ImageNet.""" + + def __init__(self, w_in, w_out): + super(StemIN, self).__init__() + self._construct(w_in, w_out) + + def _construct(self, w_in, w_out): + # 3x3, BN, Swish + self.conv = nn.Conv2d( + w_in, w_out, kernel_size=3, stride=2, padding=1, bias=False + ) + self.bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.swish = Swish() + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class EffNet(nn.Module): + """EfficientNet model.""" + + def __init__(self): + assert cfg.TRAIN.DATASET in [ + "imagenet" + ], "Training on {} is not supported".format(cfg.TRAIN.DATASET) + assert cfg.TEST.DATASET in [ + "imagenet" + ], "Testing on {} is not supported".format(cfg.TEST.DATASET) + super(EffNet, self).__init__() + self._construct( + stem_w=cfg.EN.STEM_W, + ds=cfg.EN.DEPTHS, + ws=cfg.EN.WIDTHS, + exp_rs=cfg.EN.EXP_RATIOS, + se_r=cfg.EN.SE_R, + ss=cfg.EN.STRIDES, + ks=cfg.EN.KERNELS, + head_w=cfg.EN.HEAD_W, + nc=cfg.MODEL.NUM_CLASSES, + ) + self.apply(nu.init_weights) + + def _construct(self, stem_w, ds, ws, exp_rs, se_r, ss, ks, head_w, nc): + # Group params by stage + stage_params = list(zip(ds, ws, exp_rs, ss, ks)) + logger.info("Constructing: EfficientNet-{}".format(stage_params)) + # Construct the stem + self.stem = StemIN(3, stem_w) + prev_w = stem_w + # Construct the stages + for i, (d, w, exp_r, stride, kernel) in enumerate(stage_params): + self.add_module( + "s{}".format(i + 1), EffStage(prev_w, exp_r, kernel, stride, se_r, w, d) + ) + prev_w = w + # Construct the head + self.head = EffHead(prev_w, head_w, nc) + + def forward(self, x): + for module in self.children(): + x = module(x) + return x diff --git a/pycls/models/regnet.py b/pycls/models/regnet.py new file mode 100644 index 0000000..46b0158 --- /dev/null +++ b/pycls/models/regnet.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""RegNet models.""" + +import numpy as np +import pycls.utils.logging as lu +from pycls.core.config import cfg +from pycls.models.anynet import AnyNet + + +logger = lu.get_logger(__name__) + + +def quantize_float(f, q): + """Converts a float to closest non-zero int divisible by q.""" + return int(round(f / q) * q) + + +def adjust_ws_gs_comp(ws, bms, gs): + """Adjusts the compatibility of widths and groups.""" + ws_bot = [int(w * b) for w, b in zip(ws, bms)] + gs = [min(g, w_bot) for g, w_bot in zip(gs, ws_bot)] + ws_bot = [quantize_float(w_bot, g) for w_bot, g in zip(ws_bot, gs)] + ws = [int(w_bot / b) for w_bot, b in zip(ws_bot, bms)] + return ws, gs + + +def get_stages_from_blocks(ws, rs): + """Gets ws/ds of network at each stage from per block values.""" + ts_temp = zip(ws + [0], [0] + ws, rs + [0], [0] + rs) + ts = [w != wp or r != rp for w, wp, r, rp in ts_temp] + s_ws = [w for w, t in zip(ws, ts[:-1]) if t] + s_ds = np.diff([d for d, t in zip(range(len(ts)), ts) if t]).tolist() + return s_ws, s_ds + + +def generate_regnet(w_a, w_0, w_m, d, q=8): + """Generates per block ws from RegNet parameters.""" + assert w_a >= 0 and w_0 > 0 and w_m > 1 and w_0 % q == 0 + ws_cont = np.arange(d) * w_a + w_0 + ks = np.round(np.log(ws_cont / w_0) / np.log(w_m)) + ws = w_0 * np.power(w_m, ks) + ws = np.round(np.divide(ws, q)) * q + num_stages, max_stage = len(np.unique(ws)), ks.max() + 1 + ws, ws_cont = ws.astype(int).tolist(), ws_cont.tolist() + return ws, num_stages, max_stage, ws_cont + + +class RegNet(AnyNet): + """RegNet model.""" + + def __init__(self): + # Generate RegNet ws per block + b_ws, num_s, _, _ = generate_regnet( + cfg.REGNET.WA, cfg.REGNET.W0, cfg.REGNET.WM, cfg.REGNET.DEPTH + ) + # Convert to per stage format + ws, ds = get_stages_from_blocks(b_ws, b_ws) + # Generate group widths and bot muls + gws = [cfg.REGNET.GROUP_W for _ in range(num_s)] + bms = [cfg.REGNET.BOT_MUL for _ in range(num_s)] + # Adjust the compatibility of ws and gws + ws, gws = adjust_ws_gs_comp(ws, bms, gws) + # Use the same stride for each stage + ss = [cfg.REGNET.STRIDE for _ in range(num_s)] + # Use SE for RegNetY + se_r = cfg.REGNET.SE_R if cfg.REGNET.SE_ON else None + # Construct the model + kwargs = { + "stem_type": cfg.REGNET.STEM_TYPE, + "stem_w": cfg.REGNET.STEM_W, + "block_type": cfg.REGNET.BLOCK_TYPE, + "ss": ss, + "ds": ds, + "ws": ws, + "bms": bms, + "gws": gws, + "se_r": se_r, + "nc": cfg.MODEL.NUM_CLASSES, + } + super(RegNet, self).__init__(**kwargs) diff --git a/pycls/models/resnet.py b/pycls/models/resnet.py new file mode 100644 index 0000000..f86482c --- /dev/null +++ b/pycls/models/resnet.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""ResNe(X)t models.""" + +import pycls.utils.logging as lu +import pycls.utils.net as nu +import torch.nn as nn +from pycls.core.config import cfg + + +logger = lu.get_logger(__name__) + + +# Stage depths for ImageNet models +_IN_STAGE_DS = {50: (3, 4, 6, 3), 101: (3, 4, 23, 3), 152: (3, 8, 36, 3)} + + +def get_trans_fun(name): + """Retrieves the transformation function by name.""" + trans_funs = { + "basic_transform": BasicTransform, + "bottleneck_transform": BottleneckTransform, + } + assert ( + name in trans_funs.keys() + ), "Transformation function '{}' not supported".format(name) + return trans_funs[name] + + +class ResHead(nn.Module): + """ResNet head.""" + + def __init__(self, w_in, nc): + super(ResHead, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(w_in, nc, bias=True) + + def forward(self, x): + x = self.avg_pool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + return x + + +class BasicTransform(nn.Module): + """Basic transformation: 3x3, 3x3""" + + def __init__(self, w_in, w_out, stride, w_b=None, num_gs=1): + assert ( + w_b is None and num_gs == 1 + ), "Basic transform does not support w_b and num_gs options" + super(BasicTransform, self).__init__() + self._construct(w_in, w_out, stride) + + def _construct(self, w_in, w_out, stride): + # 3x3, BN, ReLU + self.a = nn.Conv2d( + w_in, w_out, kernel_size=3, stride=stride, padding=1, bias=False + ) + self.a_bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.a_relu = nn.ReLU(inplace=cfg.MEM.RELU_INPLACE) + # 3x3, BN + self.b = nn.Conv2d(w_out, w_out, kernel_size=3, stride=1, padding=1, bias=False) + self.b_bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.b_bn.final_bn = True + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class BottleneckTransform(nn.Module): + """Bottleneck transformation: 1x1, 3x3, 1x1""" + + def __init__(self, w_in, w_out, stride, w_b, num_gs): + super(BottleneckTransform, self).__init__() + self._construct(w_in, w_out, stride, w_b, num_gs) + + def _construct(self, w_in, w_out, stride, w_b, num_gs): + # MSRA -> stride=2 is on 1x1; TH/C2 -> stride=2 is on 3x3 + (str1x1, str3x3) = (stride, 1) if cfg.RESNET.STRIDE_1X1 else (1, stride) + # 1x1, BN, ReLU + self.a = nn.Conv2d( + w_in, w_b, kernel_size=1, stride=str1x1, padding=0, bias=False + ) + self.a_bn = nn.BatchNorm2d(w_b, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.a_relu = nn.ReLU(inplace=cfg.MEM.RELU_INPLACE) + # 3x3, BN, ReLU + self.b = nn.Conv2d( + w_b, w_b, kernel_size=3, stride=str3x3, padding=1, groups=num_gs, bias=False + ) + self.b_bn = nn.BatchNorm2d(w_b, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.b_relu = nn.ReLU(inplace=cfg.MEM.RELU_INPLACE) + # 1x1, BN + self.c = nn.Conv2d(w_b, w_out, kernel_size=1, stride=1, padding=0, bias=False) + self.c_bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.c_bn.final_bn = True + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResBlock(nn.Module): + """Residual block: x + F(x)""" + + def __init__(self, w_in, w_out, stride, trans_fun, w_b=None, num_gs=1): + super(ResBlock, self).__init__() + self._construct(w_in, w_out, stride, trans_fun, w_b, num_gs) + + def _add_skip_proj(self, w_in, w_out, stride): + self.proj = nn.Conv2d( + w_in, w_out, kernel_size=1, stride=stride, padding=0, bias=False + ) + self.bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + + def _construct(self, w_in, w_out, stride, trans_fun, w_b, num_gs): + # Use skip connection with projection if shape changes + self.proj_block = (w_in != w_out) or (stride != 1) + if self.proj_block: + self._add_skip_proj(w_in, w_out, stride) + self.f = trans_fun(w_in, w_out, stride, w_b, num_gs) + self.relu = nn.ReLU(cfg.MEM.RELU_INPLACE) + + def forward(self, x): + if self.proj_block: + x = self.bn(self.proj(x)) + self.f(x) + else: + x = x + self.f(x) + x = self.relu(x) + return x + + +class ResStage(nn.Module): + """Stage of ResNet.""" + + def __init__(self, w_in, w_out, stride, d, w_b=None, num_gs=1): + super(ResStage, self).__init__() + self._construct(w_in, w_out, stride, d, w_b, num_gs) + + def _construct(self, w_in, w_out, stride, d, w_b, num_gs): + # Construct the blocks + for i in range(d): + # Stride and w_in apply to the first block of the stage + b_stride = stride if i == 0 else 1 + b_w_in = w_in if i == 0 else w_out + # Retrieve the transformation function + trans_fun = get_trans_fun(cfg.RESNET.TRANS_FUN) + # Construct the block + res_block = ResBlock(b_w_in, w_out, b_stride, trans_fun, w_b, num_gs) + self.add_module("b{}".format(i + 1), res_block) + + def forward(self, x): + for block in self.children(): + x = block(x) + return x + + +class ResStem(nn.Module): + """Stem of ResNet.""" + + def __init__(self, w_in, w_out): + assert ( + cfg.TRAIN.DATASET == cfg.TEST.DATASET + ), "Train and test dataset must be the same for now" + super(ResStem, self).__init__() + if "cifar" in cfg.TRAIN.DATASET: + self._construct_cifar(w_in, w_out) + else: + self._construct_imagenet(w_in, w_out) + + def _construct_cifar(self, w_in, w_out): + # 3x3, BN, ReLU + self.conv = nn.Conv2d( + w_in, w_out, kernel_size=3, stride=1, padding=1, bias=False + ) + self.bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.relu = nn.ReLU(cfg.MEM.RELU_INPLACE) + + def _construct_imagenet(self, w_in, w_out): + # 7x7, BN, ReLU, maxpool + self.conv = nn.Conv2d( + w_in, w_out, kernel_size=7, stride=2, padding=3, bias=False + ) + self.bn = nn.BatchNorm2d(w_out, eps=cfg.BN.EPS, momentum=cfg.BN.MOM) + self.relu = nn.ReLU(cfg.MEM.RELU_INPLACE) + self.pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResNet(nn.Module): + """ResNet model.""" + + def __init__(self): + assert cfg.TRAIN.DATASET in [ + "cifar10", + "imagenet", + ], "Training ResNet on {} is not supported".format(cfg.TRAIN.DATASET) + assert cfg.TEST.DATASET in [ + "cifar10", + "imagenet", + ], "Testing ResNet on {} is not supported".format(cfg.TEST.DATASET) + super(ResNet, self).__init__() + if "cifar" in cfg.TRAIN.DATASET: + self._construct_cifar() + else: + self._construct_imagenet() + self.apply(nu.init_weights) + + def _construct_cifar(self): + assert ( + cfg.MODEL.DEPTH - 2 + ) % 6 == 0, "Model depth should be of the format 6n + 2 for cifar" + logger.info("Constructing: ResNet-{}".format(cfg.MODEL.DEPTH)) + # Each stage has the same number of blocks for cifar + d = int((cfg.MODEL.DEPTH - 2) / 6) + # Stem: (N, 3, 32, 32) -> (N, 16, 32, 32) + self.stem = ResStem(w_in=3, w_out=16) + # Stage 1: (N, 16, 32, 32) -> (N, 16, 32, 32) + self.s1 = ResStage(w_in=16, w_out=16, stride=1, d=d) + # Stage 2: (N, 16, 32, 32) -> (N, 32, 16, 16) + self.s2 = ResStage(w_in=16, w_out=32, stride=2, d=d) + # Stage 3: (N, 32, 16, 16) -> (N, 64, 8, 8) + self.s3 = ResStage(w_in=32, w_out=64, stride=2, d=d) + # Head: (N, 64, 8, 8) -> (N, num_classes) + self.head = ResHead(w_in=64, nc=cfg.MODEL.NUM_CLASSES) + + def _construct_imagenet(self): + logger.info( + "Constructing: ResNe(X)t-{}-{}x{}, {}".format( + cfg.MODEL.DEPTH, + cfg.RESNET.NUM_GROUPS, + cfg.RESNET.WIDTH_PER_GROUP, + cfg.RESNET.TRANS_FUN, + ) + ) + # Retrieve the number of blocks per stage + (d1, d2, d3, d4) = _IN_STAGE_DS[cfg.MODEL.DEPTH] + # Compute the initial bottleneck width + num_gs = cfg.RESNET.NUM_GROUPS + w_b = cfg.RESNET.WIDTH_PER_GROUP * num_gs + # Stem: (N, 3, 224, 224) -> (N, 64, 56, 56) + self.stem = ResStem(w_in=3, w_out=64) + # Stage 1: (N, 64, 56, 56) -> (N, 256, 56, 56) + self.s1 = ResStage(w_in=64, w_out=256, stride=1, d=d1, w_b=w_b, num_gs=num_gs) + # Stage 2: (N, 256, 56, 56) -> (N, 512, 28, 28) + self.s2 = ResStage( + w_in=256, w_out=512, stride=2, d=d2, w_b=w_b * 2, num_gs=num_gs + ) + # Stage 3: (N, 512, 56, 56) -> (N, 1024, 14, 14) + self.s3 = ResStage( + w_in=512, w_out=1024, stride=2, d=d3, w_b=w_b * 4, num_gs=num_gs + ) + # Stage 4: (N, 1024, 14, 14) -> (N, 2048, 7, 7) + self.s4 = ResStage( + w_in=1024, w_out=2048, stride=2, d=d4, w_b=w_b * 8, num_gs=num_gs + ) + # Head: (N, 2048, 7, 7) -> (N, num_classes) + self.head = ResHead(w_in=2048, nc=cfg.MODEL.NUM_CLASSES) + + def forward(self, x): + for module in self.children(): + x = module(x) + return x diff --git a/pycls/utils/__init__.py b/pycls/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pycls/utils/__pycache__/__init__.cpython-37.pyc b/pycls/utils/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96f4f6f1bcfdd4536461b9519cdfa59dd4615e7d GIT binary patch literal 138 zcmZ?b<>g`k0^5egaUl9Jh=2h`Aj1KOi&=m~3PUi1CZpd0FjiSxRgwqBJJfW zwgjpOpj`6?J;>;(Z_-C7_S#ckp{LF)X+=ROa5%gBX?JJ7Z{|_E-PGXu>CKhiC1Xl8qK$RWAk~cNnXsa#*4XxJHD{#GxZyR37MaWDwZ=?l zz10@PYcZQSZ#B0C-(uy;fhk3NbP?_>eq7v=%EWTH%n5QrM{qZtF)G;q6cZV5_{<=nj1i~ zEzmkwgL;>yBKsJ2u`5bH-R;8;fN_-|oElr7vxaag#Kp_?lS?~kmrl|iqUg!&?kDmX z1mRhw6UctQw*egUAkMf6fh#d^3q2c*bZ)@h!8uG6$5K}8^A>t8nl_pZFuex<3_j9H z^P=#FlZ+!5(n^Ctloosw2j)4CSRoCBcnQlJMGGsrgw=)d5@Z5maF=ZmjaAzC-hqDghj9)4 zfmuM5J=TQ&|t9EH;xsUx=wI7ul ib%Qm*tFV;nogblZPSWUSJgLCiGL+IG2L0BybN7Fa%-%Qv literal 0 HcmV?d00001 diff --git a/pycls/utils/__pycache__/checkpoint.cpython-37.pyc b/pycls/utils/__pycache__/checkpoint.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44434ad469c82e9fb7a099608fa29c06144c9d70 GIT binary patch literal 2798 zcmZuzTW{Mo6ecN3krT(cv}@O*1JvDGV}sM(fTGBXb!oZ<+tMOwhr!q;s3H@~u_TY8 zi^i3KVd?80_Pz%Pd)i;(>wp3M3w_znp&HxCCP0T2Ns;F}-?{Mf#l?97&!<2C@#EhM zg#3jcCyxW2&)_Ydf)J!l1Qo1D+tju!br_)}L{E}zKY)**mq!Bo`CbrrWPH%ilS~(_7oa!UZc^jKd;9HrnhV(r zWj~KQp;?$&7~{H=heKxr-eMVqCS5va+Uaub9-}sNxV9s;O5_uX+DJG$j!?H?FIPdK zbA@`$?A>a5hUJCf{YZBV?}BEE!Omvx{-cNC-K`x{Lm6)0`DXJ^qG~W@#mi6DI(aJB zhKE9AhsBzRbW}*a)<2AU#Va|`3G}Cjwfcu(vz*>;Vpwqn2 zfs(Pz^epV_on*fwVant}R zQnd(cz!t1^!_d7mR(LtT-b)G{=V|};ySO0|>5<_zG)^xnW*wlljvm@8ZbRqfpBMkY z)AX#pEeB=}=fG8rSL8}dF(swxn8s;$QfS%s`7`H)|Hsz3|54~oRM-bWn8(gjpbgD2 zp$&Z#Jz@jiq6MO&4QOFw2PrEq2)au&7fd*oNFb{s@(9M8eAv1xwNz=6L4+oI7^F%D z5xi3vMG!rSl3w%}eD&Tbju2ac?*$ORP~(K5Zz0$H#00?{04e1aKWYV*sZ|t{$hD68 zvbMf2Gx@ZyZr{M|s!$tV?L@^)bk2}-vU-*l%YyObQy`}>PLv(0*I+X^9YgxeiKcOS z@AvT7S9S&~bE+^{CScLdsFbzydFYf78W(NC>#_?(umenY&T##-HBZHQ(a8rr5j?iJ zSc(9$U_4L?L&Jta>jj>oP&Az?>$wki?rd$Fm5nbqH@;7HH(lz+G zO3!DZol9n|R;~F6c0I{zSD_yxb@*T^nRrkaJ~SEAV^Y+u-#}K={0Q&~=T}CFcDtV7 z2VC7KNq27SACt^03AQ7rWF;g9>}z*iJ0`;YosYPlFPY#MF>$zhVO%eH38jIO(gk~Y z!*jb4mWEk7wYDDuI^WV5<_GEx(5cB)0~)hX6{SK)TAHQ2uah(xN)_4xYqI8|VPaq! zV1g7v8|L~dkE^it)964#*cMeNBcI0Ke)LWF#e+w?#)G{FJ!$6cW>5)YZMYbueVZta z3&kpik_2^BLWXv&CZ;ygp;C|)!pu)2$1F|zA?EtCR4w6lGcJA;bh5!j9G`Nw%&s_$ zGV0N~Q^(*(8Yk2tZIhfW;G?R)Hta8J2g(!2{t^9!B5~ldmd25z=eoY7@n%W8wTzW? z#7jP&FCF1tf?_i={f)yDdPx{iZQ99+V|%0(=zO;>%0lb9~m8j25a8B2@*uw(z6V%&yx z_M$`BD)pTPB;*F;v_aJ^n88f9YUaXF{p<$%qh%Ll?P)5xAnj#D}`}wJ;bUr1I8v>dm-@GD=)u| Zn-pkG&U^$z>zp(0_^$8yHQ$H!!oQgrw*>$I literal 0 HcmV?d00001 diff --git a/pycls/utils/__pycache__/distributed.cpython-37.pyc b/pycls/utils/__pycache__/distributed.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..002be998868f3e48ba035c983a34930519ea6ae5 GIT binary patch literal 1883 zcmaJ>&2QT_6elIwRum^#inQ3UVIVLdn6b_tdf1`pz8Vi}Ff5mb07J)v;=sWO$zYh^$7-SJUD)PbAeM~5NJ&XkZYFa!=cKC_t~XZ7A}lu0+?LjJsHX5tjJYnP1ot$%W7L?Y8CtL;5jqWq7;Zr z#*IOwq&a=yrN=EoF!RQ1?iMOOhF5`F&nI#%Yr=hVNRk;8BX$&S0tA@>WGOE_aZB46i zC~b@p{6TbxqDcP!yf?}xvNxLwkxflcaLXYdy<(cAW=Tt{;QofHZZWMkm5J+e#TdjU z=rZU82{qz1ad)`j*@W$|@&GP~l`C&ft|`U4KMrhT+!XYx1!o9*?b@5uIUwj&0O0ij zMZs)mFN4bRRLuZG2p}N`916H91sm$TEV{FGn-mXwy_buJFBhGlYMt`rOlG25hw9jP zBJC&_Rdku_RKy09RRJbszZ2;OxK;&8DR>n@0%9RAR3bIHUfprh%UZ?GRGsj4fA8=p ze*Deu_uuaKt0rJ^!fh2id3JbI1qaW5IO=#OYbfXPt~Ws0ZR+(Jer^GI54LFoA%5Fy z(tvJKkGel^Z#hhMoyKbdDWZ@fZ}w4NLjUH||I5#Q$9I9&VciwAgexF);WDxtY}Bqg zFYkfH9oTh=J&oT9LJwB}8`I}XEZ{OE3;JSbPWt4h4;LPKI`-xs&Zbx1!k^PCf9_-X z&!c1VVtY>ET^smrZ~*rKa6fR;+Y6ApY6#yp$F27rWX_))lQ}_9@Vnr^fz+7wI%t#d z6k3L-X$-AXCX(ea#kf^@25skX1I4(2&Vs&@naN?o)idS@Af>XRv@1U2(6W>Jh{fDyJ;_y|f^rt(WBX_78TjvD5w%uRHB8?6mhN*^7%}H26e5lX~BK-$&}}?d=|c_T?{s{QN^m z$X_^FH6I3#pldXOBt%fb3Yt*&WC?RmFY%rd;R*kW2w(b_UebYiM+7hrWB~I}up=@E z=l8y!6(+A$l}WAhauiFYDix2@QWR3{{|!;dVTN|qymp{t7)Sz<2}Vu4*JKD-gm)Ds z{%hisqyrIryYqOQ7vjmwOx7l^%K6?4mDV~=%UG^_@laLMIE^!08Mk2eGuB4JCq~7t z-vMKYp%>(mE!YKJQVMl|Nc3>v*^ZvokOA(n5$AcC8_oyJ`dSu4yX69KqtZ+csO@r| z6{*&o|4IJ&;$U1&<-z=1i1J(?h}5K7nuGd0D|BmTCWo;r5MLFjK37P|E<$s7>HNLT zg!k7l0)$kiQ^~pQaXziYtiW-f^Rro6wEu*hiz?&Xz1MEH7&BQE;EN!RGXttX8bo!` zVBj0jHA0D~&zvWN$-3c^N6__sFxOdgjFH3_zUW*5XCe?GJOdGlE<8iggW7J{{u9*2 zw~Z|3zyGgl;=L27ortS49;Ug{^6XPR*zfgXXy4`9$Wq5c5Sgo_j`MPpc!}Z=ZZ$}u zPDiN$ml@005qFYir!cLYr%shD%~~>U(}r%aB9AfehhQ#1HJpqHSZdhByC4%E&&zKN zJRO*I7QvEGV!}me!bt?4?vl0o@(zzdNP$V!NE#c6R8FgM(6d{OAkJoTChfLU7+~Uc zF0}2Mv688Pb7g~LH?%?FDv<0n+nSkFpj>z}yjSO=GA(R)^z7-6N6#PFoyM^e+<>VL zWTH@T|EZyo0p|J)fs7xKSoNTCPktVE}291#OQO1^U_i&T~0J z+zPnaWNZHrAl-spHc0DQmZC*yJ*dDN2yHhNg1Zv!Ax?K-*!=bezV(uf!WwM2Y|Z0m zFlhiT$t&O!ptiSw@fJSx&XSm55<1=*@SnV0bf_ptLhQJML?TyK>mk8jJu{Y7+WMz? zQP=={n9%zWUVQ*&MMQv(^|5S-xD7tUsqs(Z^;n$E25FnJ#Y_vr>i95pf_I5rb(ZJBC}IIRB&UdySV?2H1B}#bLefFeY(r$A8w?% zsa-^b(o|b$`e?S%VD<`ytFUkiUwbW-3+k>pyy9?$(|6SIS+o`TQCFcKUhUo$!?+;{ nKNPmR{_Zs2grGW^$jrDcZ;N50ht0M86^3b*kI zH1%l!dWP26Ov|DnoqC0%S!(T~`gH%sgNLN2vOH56ljLSXldfWvv;^;vFPLJf61=Ft zhQ*-mI@Z2m&w5O%{>N>lx^k`Ar1?T?FUh*Sy*x`>SOahl82GUMR$CT#<> z{2hcsL)cLX`oJ35)H{at)WXPQsJs0? zzTihi&J#*hn-Q}gBD*ZjS{avZ$uvt$|AWtA?I3zhEAKD~K&&76N- z-h~_3?%%CWoA3gLB|XJEiOSL>hkw=jq{%BuR;`STV|SOyUN1dlYK@!>`s8faS+Nfj zcetAD{bXP^a5AW0(3W7@h0JMNhY!Boez>!_ac_51hf*a%$)ilQ^<4bu$;Q@hd>j`b57c0FEXj5_Tey#q{DR3Ne&tOk=o1o z{yr1h$%+=&({yq}aZluRFPZ`sa}k@PjcRW%B=Rz8BD4)6nj70qOl+o$X3rU5lE09c z2UxyoG~2wxn*C!+i(}cOiAp3>V>C~ssTSGH_(JzsPo{$zE*oSM5Xhak9PB`w#tsgw zX`?F{S{HGMg^7HpFaj`Ch4Bk?C>~e}DO_45nn%hi0rpqXDae7XfC|oAseKA%K%LT_ zN7RK8_7xcO0Rs5j2hI@6AE54*P%hNJicA~X73huEA0YreD)FH!UQXfzz;!(ygXq zFI3|!Rbwv^jjU)u!8B5c;yG(Bt*ovrE-o%tnP12wT<_*GIrI+gthTHdCoH(Z3kApD*kZ-LY6}AnZ5{1x z*Qay{<0dba_IJilmiEEOyQ~oJnMiw|!SzZdvaYyh436<>N3euy8}3(p1lD>eE`fr_ z^ZeFyn6`VxVJx#gi)P;5vss(AVO||Tp>zSzOotVK#*x!|$BM~V^0<`RE0Y6o2eNOvvOnDl|+BZ}QBrc|4sIZOr5QO%| z2Pw>hEb7KmgZ*!fc{PY*!?~lk4(%2m< RfQ6dhv>jTG9b5}8{R0oQgY5tS literal 0 HcmV?d00001 diff --git a/pycls/utils/__pycache__/logging.cpython-37.pyc b/pycls/utils/__pycache__/logging.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7d70c65240da862903fcf8f4ac6bf7dfb454a452 GIT binary patch literal 3790 zcmZ`*TW=f372cWMC07)6u_N0x>NcCi2}Bg7(iBZwLown+wrjwV5m{*hHt}|~v!qsD z?y@sOH)1HDfK=EoO;PmK4-)d!zcjCX>R<3vzcWixm%1e8va_>uzB%VRmroWJ8V-IR z{pE)z^G(P3H$6;$E+!wNsh8-4({Th7JZBwdN8aIfbUUscy^d!`zvJ7n)~|DiIsKqB z$Eq(Iod({x`Fy|GX=2UmEC|1|C~BQ0Q5V6H(^(dCqJi;}m={fqm&Jlu#Q26-63ZA@ z#3gYV;}vmLtcWW|taDZH$4=|oD0o=*dRftH{|8u|%@#9m((R2_|M2c=H_Ou^?x!tv zD|~k~&x*7tTfY?F&{IEED(>OcsG+ndhq|4Yy-{;dl|`gry>+x(Z)@+G~L zHR|fr1?Wr=l{c|_LlASyiDR;-ELQVH*qCb?JHBOv}}T%VU|M~%J}w! z7L#CK1*(EKk4Lj~;oLW|Xz%K{S6z7tPv@*dTCmo6f{AvH*@+{!d$7lz8+qt`T@&mj zY^3XZf#6TM{6Np`HO7u`*PUWT&+j#l7w~RTFP(6#dh7JgtaK`& zxiT^J$w9`GWvNZD8y6x^r83@w&3lhbJ-YX3>$5xCk`!-(hmY<@8+RXX%LP1El6Z-| zgIMnxZ?DV>Q-AcucC_(e%gjZz``&|xcg_4xtg__Ovgl?#tHl=UEKUSRNwqxVh_sei z04;`tXtHS);?y!a#|_+oH+h3K`3hg;@*S)tL`(Y7=%QJhP@Ex~jGYrkL>*n(O0~?O zD>c(#D%&kCuVJT|>!tc3NS8|ZvPb@VO4|xj13N6BRIcNTii=#nhxrTvNhde32tLMy zeD(U?UEP&%{^e+)gIRQm>>JXKd$Nen4Rzw*k9CuyFOY5EEQ6S z2yKXHpOZ%Z_83jQk1m6xZ&ttN2RKhVW3JgD0BU~h3g_4x4~lmE%HTB&_^ ztNmg3pyiuJrr;ntE|S#v-Moyosf#qp`f+aR8`VU<0Vyrl@b>$LJvHpv1tq;|tcxts zN!cIV-XK)On;IXz4MRq!QSQ`VPq%M_%5$oeI(nM!tIqsTrO`zEDERJ{I-s`M3XGrW zw69DtfH0{z4Khy3r02W-qJHXQIT9l<|$ww$lX z{vLS2Ehk*Wa(Z&@lQ$;I)JCfh%UD!~uW~~b$m0-Z>g8z&fFZee+c3E~!(kN;@s<;9 zOnStL_rNn}%%0YyH@0Xryh4K`hwK^x`Cs{U=N}VBY_?qaD|~0XZiZ~&BALNQenu-S zzn~?UJI_>Q?$5EKGPg5&2ka4Nw0Fkdv;F4~llCm)tC(2C(3V-XElEaG?;@MOb!;CZ`dD z*g<&6DHKIjtLu;$DxB-i3kKQViy2d1U~S3^)CF|()C`soE(AieF2tE&D!j4;MR99{ zuSitI%F)e1EaBFt-uF6Uz;BVBjGqq5WcM6PD6(3X{B(-^YQ^hgpg%#GcpZG6;N`E_ znxuq4_)rMJ^#^&Tr&M;K!q-uzbBeaQDGCW_Q2q|<@;*A_;mfL|!CACYy|YSnk5JA1~qKGdlYD-ievr)7dvD&F!1LN-nk$~Fbf*1lXybmW!#K}paYKb!5sFNrL@VV!$W-V!D~L&I zxPx^?>;d246(a1HBF)wRA<~b4+Y)K{fSr=(jN6_g4J(jg=WUFJ>%r6)2=hmrHYLnk zlbmavRK1QZ=LkZ{&{F%x6wwM1WY6kxj8uE#oT;BP$0E48-ih#vq;=iB&L>risDBk$*<`#Y%{5!> zw{4l<7QiU8ogq*g-!7T?Gi#F4K^5^YRi7c`rR_<{NIoQgOdZ+HCM{cC+M>`F12#q5 xXl(t?Vm+hAnJ!JR4uTK!^fvjbBHMX9K#sS6ei>ua^IZA|bHPgB2ThE@e*yDteDVMQ literal 0 HcmV?d00001 diff --git a/pycls/utils/__pycache__/meters.cpython-37.pyc b/pycls/utils/__pycache__/meters.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ff94ec6049fdf543873f0e66aed7950276c7131 GIT binary patch literal 7415 zcmc&(OOG5^6|Q$zKd0y68OM%bfBSh0_U#ExIUrdYDdfo1;x@OXj2@;`N_uNO_syg>PzkB<| zdc9)cdhR!Wc<({|;aH!N9( zX$<{#IlEKIMyljq+Z{B9OYJ4Jy|DC=(QdwKgnn55$Oy~P@{!$MLBA4K z(XUE>75!RRN53xpwa|LYXf-CKYf%!Zc7O5)D5He_aijy?#?dzH+#cKbxq#K zi7pBQ=@Mjnu&?p3e6+yUF*;~w9&UiOM1?s8`{VA)SOyVXX>6f_^vZ~`s#Re zCsIkg*%@s{NoO;`XToT})_AkCxzqbF3LpKT7bdqi<3#mFyPG>>HS8o?y=KEOK6@6Q zo?N{9*fWnke)!1U%dDQc`?#*2VAXO`C+r-=sTcO{498(wilgp$6kXZ#AT9CbV0{hG$3BQ*d1ljWm}&{_E(d0pg~k<>m?Sb_Bt{#;Y(s>h4dpta z6S}C~Oxd1Rb-Sch-1fC<+vTtU;Vz}sx4NA{M~P}Dm#;;gcwa@3^kyvMn|C^c{b=*y zPT1K?qHuFZjfa~rf$?sl#-r9&m;CTzZKGU8i5~<>j42HGV<BR$Qnwf&h$G-J~1?CnLPP#6?eTb6K{x}xK=%$7T07|5O0oZ%X5a&oT7a6apl zKyyo|;n@V@q|2X}(7d_bvQulc2kl2;uQL(>-a}rE-6#p5@jy@AA|GOJ=Hx<=htt!( z1S5)u3LlQXn2+l01D!j&mkUg6EMfw4PH~Xi7DYkZLNPDW2_OpVzBVH=*d2^-bp`=m z%`kdT8O*0`w<`39QD+zhL0Sm{M78|^+qEEgf4?)x?v#Qc9Cw32ox^+82FXPdq$ope zl6-@Najih;ZmJ1F@x|=Ah1IZz+7Iht19drE3Y(}a;c~cwx*D#AYp84CIf!&4t=v?d-bjbD^LaQ+ILO2h zC!Hid9kp@=-2!h&J&0F{*|+FSYfx6;B<>MwFDYT%KjMtCgoaAU@oHhbR%q8Ha5RK2 z32iP8BFkwt+8cLo2R)`&MOmfp^&2-|zm}G-z3_JM@{Q|PUrT*NOPQgF7t#tR2h4;@ z#GO{&xc+Kz<#8BS_2WRbp7W8A3cpbz*U~8DZ#9z;?+3|foJMO;og`XXFyONeMi8bx`Dj!fF zdbP96MomwVFqq}~Hrlqdp>1j$na+iyx@(4HJ6A?LV8+>qCnKe)osX1c#GktIgg=vL zSu0v%o@iUlKrCRccd-glzNZtWpL-Eo;zBBaFfGB&7U7fH{vNPk1e}Ku^e)%U5Ngmn z=qJ{S$=_t7U?IgPx{M3FN+?v6i0&8-imopimeH$kBWhV2052QHd1K80z;^m>;!W)% zY%4xC51piR?4KBf+M#n?CQ92dl1jhYubo&j0zA1Pf}{*_;1f1WY}>9#G?rL4aqk{px_j8Frf!fN z>_w?7kuY^=r>RdTW7bQ{(h7S!JE;w}`hR$xJ1LXHO zkufXvsR6!Azfm?3sofhT&{~A8LpM(AyLvv<!lA8f@$ zs@@{G34+}zQ@av_U1Y0Ew7mr}uex>>s!qEPlXW4aF+d7HOlvp-n&awOPosi4h!FgsH{ANob47`csN@rrcm;%} z?$2PehR!RwnC^$1>GMF&O8M8+A)(!SrOkU z?%@i^IhVv@sND7`A8a||f^Eu@iI)F_k}cVhtVN`(G4o_;zU2QBDytvj^^2jhqAz{Q zuXD>`n3IBlC4T^9e|eCs=w|o$ApQ2wDEIsz;ITvs%wf$UL24TfsEwy6*}+c{9_wSD zDNvt2`VpY(01VGYW`Nn&V0jkWytB~vjICy=ISidvLNvGrEs-V#t z>jWtEc!`(<$=mtFKXj%%M0U5^C?85{ zl?|n2+Mn?i;u~{#D4BMiE!{#gp^_z0w)M__5&V;Y|5r@R_LoJpN2CXwcYZGT{}0S7 z^jrWj z_OL5DF2U*?u{uAqx^f?TA+fSrzfM1r=dtBX8VPnj8DOJrxl>ncE3Jra6*1c9jZ1*q z+@$7_RE%r+4D3p6M9o6Dox`(5rZ=~`GZv;dtQxM&zkLJZT7M*JN&;Cum7HW5ElxzYE-I%txA3a z*lI;_1~9llIfCxA+8qxD_yeUk9>v-J2RLKfJJ6@mtMW(Dmi$Gug@x<_2c@ob(^A$K zCreMw%(kx*qta#lj_z1RTlgbur?;z56|-^uZheA9@d=bqz45I+|JG@8gpg4yE literal 0 HcmV?d00001 diff --git a/pycls/utils/__pycache__/metrics.cpython-37.pyc b/pycls/utils/__pycache__/metrics.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23233a917bcb9b1add4a1137b999471a7288d552 GIT binary patch literal 3724 zcmbtXTaVku6`tWulvdiCw`+TyI?TpREH{x;_mb4zIQBXh7>M1VPEk~FO3<8HEmsmL z&rs`KfdY!!LEVP}&OeaQr~Z?oKVeYxrB8nAb6(nWhSX)(a)6=~=FEk|Gv|Es%{imp zTCGCh`SACD`Q7RYA^*b8DBV5CVHWk!cK4PBc}Q`c74ZrT*znH{HFYL=jF zHC<*i=P7BHS&6w&S6G=`MV6BO|bHOf#C- zZ9~46f-g_#A82YEm>Ef}40_CD7X0i#+YgMPIkYlsWRD5dbmYK0XKaj0nKh2VLAdX=8-jw_gT`)_&I0YZvTL;P+D6#>f1t@*>){s7fozCdj27~%*W6wBWU^k*;GBU>G z8{?4${u%cK!VHU9hY^3Q?7dDBq{@{2ZrxBu<2^;&usLhst=|of{I*|s{8#erwX6F{ zmtP$mGZr7qt1L(Z$bNdHxfd00e=x*$E zk}&AV8_<}{u9uZ1qXAdTx1{6(DfNjEiBNW$h;Uz7s6m-vqoS#BLANUNqaFb42T~dQ zQN5&WgkCBqM_d_g*huWnTf1~9Ww`) zPB0Rz0UmP1LLae$c_$cxTdWzf4GqnZtszBzJS$jnft(}E*qE@wY{5Ku{{4|AaMAni zP4IIKhUz)ZQ0nMf`t)SD=kA1G;irB`!Aw)q~!YR~Uh8D7T?V zFtob~ zsOr4hpG?*xhIkv=Q_nCk;c3*UJpc?-azGD^TpDCZhme(r=5u;<9Z&@9J|@rU zSMNX`vxYWgH#@ZtoF|6(Co?ju?T81Nnb`*=DDDOUSsqo!`Rxz`bnd$Oq1-@U!l`QlWCAfkZC%{MW!|-Z@fUB2n@#qyU`QLL48x;kg~u0)vY`CRO!xL z|NfokC#n_$ahVTn=y$kOl}JW0XpBP+{E3xwJBbgkF|`!Mez+gRF=TmFP5SAqXFZGp zDc(TkrCz`oT#VFGn+urg>tyFhxFsL-!ShOt1N`Xsl>g+50{irtHU9oo4ou1}i@=KL-W! z5(ol`%%YpLN?p1|4SEHBThOl3WqOf%P+z2C7v?Rd{s-Sh{mnV*yDw4y?*;WgZc z>h2uX%TuaX7Es+~Gpd)dt;txy)*RKBMpYnLX#vguwt(g}P4mry=5QeOkt^q#Ptknw z+ZoN5r!-&5J=C;bEoj|%1+4?Pf*t~=@xHx~+yc|&339J#a*H~SUqOM{TfBqfT@-k! z6z`$9hT_L4FuZ9_il1WZeH0&{xQ=3uYvLC;u!G`56gN;n2_%-}3 z=_caX>&u|Ub)v^hKjeME!(thY?9=__~xZt5IFycf0dJ^mE*=*y?R9tjV-n4R=F)%fY9=1iMyi@TG@ cxf__lzUwxN{Lrv-$>BC+9tCse8mS}6z-6^hg{QW;Ig&c?}n#rCM# zl~+P)UdwOT6;J#nzw*Ri;E8K*kdj)u*LSY%W8Ztue7m!gA{gKN{^xIxV}$mY~Rl>W%@G}GZ4oq`!%U>1` z3|eE0EZz`|EVYpx{DCogfts%(WXM^nzQpi4jDAJU$B?0rdA7}1vX0DP9dFPEPQW^O z3FkNl#Nh@`yo4CuOO~Jh+&6ksOpg1eG_q|b$EvO2yF8HRg~hWD9>Da+^V`Txwt6Z`&?+F z+Qm)4Y3vezL#~Pj_P=gb#d9r}!i@mFRrz@@df_O%*fDT;03q7W>Nu=gmUz^THHbnEh^1R{F~KMnYWQq-@qL994beLG*2Kh(u&ecJVHW!0uD^ z9^}FuW#pG(c%da=fxb%!EC}d~sj);W>S^4dRb+^b-r&_>j#lx-fuRm!G{$uWW$aQ1BPN$rI>^0ht2B#Ki{MCya}H7pEgf>duXx%et$?Prm1hKw)kG zyv&6gKHF+MG{eP3*NKL^fd&kdB?aeskPrw!au=PpvYDoiHGLh}R=WVDZh7ZBtaiX< zJ6nEFw!cP3I_4qrlputez>&&dv#H=Mm${Y6qi9f&=PrUHR1AofAA)YGn!1H~9Z^n+3M!DjFwP$vI?U^&@eCIo} zJ~!tPJU{#GpMU+`5+VQ8owLV6<1-Z5Ma4;jaLQRs8`Qj6gPFJ0u*}=;I1PvDHC9~c zxD6L=n>+7FqZnGlnGxXy?!F_ZwBhk0pTYPn_xLQ{C0^olc+c@NpU1n***2*z^zS|G zCCw~K6It0yMWquYQMVTdS(LOYNtnG&#euB7hi;@^rOImVwVG7-@MzTwC{Ix2-%({G zCmC(CQzkZY(uT@6F@iU{NtSfWULpgEx6>}PU2bK?;S9HPdW(!GcfMehBw`}NgN#39 zvpEiQ%F;c2Q%d);xpuidKVV}C!-brQV{RpkyCX`q{A=yPaB)N+=l(>_b;$YK`HnD( z+2`U~J_AE|{Rgi?0Sk0wWh<2RF#EpDPU5igHjG;PStUxMEDGYN56h%UO=ugu*HsI} zJZFv1s!Z9PFi4aoGp>qzQ4+-dZZetw_ouDjEdBM_Kl<-JtyhOL zoPH=$Ulm?NNf?N#rD#XFdqLtup~k$K)Q0g}L8lY^g@{`a{$*`-KkbC8{S(fU6S=Cj zlwr2oJ!!^rQune5ZNRJ6Jy9hiVtgpM2nlit6=5Z|LhsNLTV^+CnU?4x6<^1AQ=dRT zb0|d=1Hb$5MP|U)wsP9GS4igM^pu{mfr+4ZfXA>fA{&T}fyDtyj98{;BZAI*4tFl} z3df6j)f;c%@$w^)udLziEb52%C`%E9>5t?Y-JG;W3Zlm?3_b85J)`WtOtLRI;ZGrS3=n<&wPc%kQ`9|Z~ zqd08_v3!ii*@su_ybMW3=LN0vkOC@f^ntdP(C1Q~2DkL%=m6m-dWklMucNxk#5eQ; zwwqj1TU0RqG0H^W|4#$)O})$P+15u0zwyrETbQ_N>jz)4wWev~FdbXw@E%B*w#h{& zQB~|9YLl$j`Ct3g&i9#eTD@9URwqgfy@=~rp|00|`O@EBd->|c=C=Q8YxBwT?M>zF zYGN9*niYqMayu#S#i116*5AxTStznJ)hrjuI07l!%^nZbOqldKp$IU27YoHbRMk1< z;Oj6}g*Xj(C>lj9^0kPDwVOyN)RPTM9?FJ|vq@t($W4GD*>L?h*wu7OBZEOjk1iQi zEq@i>vZ5`|J2Spe3Sd z>CFld3Abd3p|*`E@^U(KbS^ve5VRCQ%lgYmO(7z5f{JO#*6P)QxDRnYf})Kq%1Vt!7UEYPP(Bg@bCyQsuss#?S!3fcA^})A9O?U6D$=! zMFm_U-KM=bj*$9zbfnX1FZQdpqQ|&OK?fh;i_(U75QdYs&bad6GH6O>@kHUDL$<_? z52h>9yMTlK32o3RB#^hx4Y-1irX_vXGZ~AGGZwc-`nG2GSD$J32XS06$e-L!FoJ&t zH;EQ*EPJ@URgPe;RD8(X+J}Y+^frbF7RN+@4dJ&D8xum6z5frMi6Mnfv&ue-!nbGC zN4u8}ZJ6&h)}1l+y%zAM_3$TY{VIkw?y64s)@lZs^16WtIw1mBR_Pd@A7%!%a9Qxy z*{+0%XJ}7>sX?jpjsfQ9XmC1U$LrH@`$Pk;5mdFA>(Pn*6kbSe|`x-uEDX!3V0Ner{7Yvw*zYo;PZmFz{WG56~$n!c3!EfH|T xeugj%=#7WWJ{JIFJgn2cDn1$~gU9-gAa9_u5jI7X5-nN9YYTUZZqX|`{{>gaFz)~W literal 0 HcmV?d00001 diff --git a/pycls/utils/__pycache__/timer.cpython-37.pyc b/pycls/utils/__pycache__/timer.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0840c242e0930f0f259591752a01acaa2bae3785 GIT binary patch literal 1031 zcmaJ=O^?$s5VaF0O}p)~5*$`SLJ_CUrQI7BK;=W?)(f0^zzP|6NlBVwyGV_CKw^Ig zh!cOwS5EsEI5Fe2D^{v-^%zJ(wzZ{JQ4BNA>Ki_`}8T(C>Llfvsv6~bPusH(` zLB;3%4TBKGH3Ol7Z{a+I;4RCdHhx#uN{??an#~HbhZdkS#cnRqC^qMqEdWk`LU_&Q z0t7@jMi4^}#~2dm+%1 zaKxcTBZ0rdZiZ-f>^u8`%fK)(pJm)dS{Y^05k^&uUra|B_UCf)v8mOh-2$w(W&*j* zF>12j7M0oCjV;mlOvdZ2OQbAUrIqrGun3g#_Oz4auyvf*DTZ3lYsVx?(=iL2pxka? zY_6>&KjWJ2#53$nj8hldvbc{<2O75h4is1K;NkUlI!w_y;4S$%Y9fOj6Komo*fMy6 zf=TpnE`a-)z;)Ua4=FbC41?JXiJQeb!9n-YI05Bi;Rd#`dF2VasK~3zxMBW9>HJFl z{}fGej#&)lWBd%iQ+|7v=I&W;@ANUo0!LO1@C>LP`B++LLvMv=8_upMD>dzDM|wm) zN$#Fe#P&rr%MvPsRbDG8-9XB^flWpJP|DAnyxQ;d zq=crBQuhdy5^5?MkMYmtQXy)(VNJ&+z8*Z`B1pm{N@BmB3H^X#NH!NYAC~k8=!avO QT=KNz^qj&CRTCxq3l8$i%m4rY literal 0 HcmV?d00001 diff --git a/pycls/utils/benchmark.py b/pycls/utils/benchmark.py new file mode 100644 index 0000000..44b9949 --- /dev/null +++ b/pycls/utils/benchmark.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Functions for benchmarking networks.""" + +import pycls.utils.logging as lu +import torch +from pycls.core.config import cfg +from pycls.utils.timer import Timer + + +@torch.no_grad() +def compute_fw_test_time(model, inputs): + """Computes forward test time (no grad, eval mode).""" + # Use eval mode + model.eval() + # Warm up the caches + for _cur_iter in range(cfg.PREC_TIME.WARMUP_ITER): + model(inputs) + # Make sure warmup kernels completed + torch.cuda.synchronize() + # Compute precise forward pass time + timer = Timer() + for _cur_iter in range(cfg.PREC_TIME.NUM_ITER): + timer.tic() + model(inputs) + torch.cuda.synchronize() + timer.toc() + # Make sure forward kernels completed + torch.cuda.synchronize() + return timer.average_time + + +def compute_fw_bw_time(model, loss_fun, inputs, labels): + """Computes forward backward time.""" + # Use train mode + model.train() + # Warm up the caches + for _cur_iter in range(cfg.PREC_TIME.WARMUP_ITER): + preds = model(inputs) + loss = loss_fun(preds, labels) + loss.backward() + # Make sure warmup kernels completed + torch.cuda.synchronize() + # Compute precise forward backward pass time + fw_timer = Timer() + bw_timer = Timer() + for _cur_iter in range(cfg.PREC_TIME.NUM_ITER): + # Forward + fw_timer.tic() + preds = model(inputs) + loss = loss_fun(preds, labels) + torch.cuda.synchronize() + fw_timer.toc() + # Backward + bw_timer.tic() + loss.backward() + torch.cuda.synchronize() + bw_timer.toc() + # Make sure forward backward kernels completed + torch.cuda.synchronize() + return fw_timer.average_time, bw_timer.average_time + + +def compute_precise_time(model, loss_fun): + """Computes precise time.""" + # Generate a dummy mini-batch + im_size = cfg.TRAIN.IM_SIZE + inputs = torch.rand(cfg.PREC_TIME.BATCH_SIZE, 3, im_size, im_size) + labels = torch.zeros(cfg.PREC_TIME.BATCH_SIZE, dtype=torch.int64) + # Copy the data to the GPU + inputs = inputs.cuda(non_blocking=False) + labels = labels.cuda(non_blocking=False) + # Compute precise time + fw_test_time = compute_fw_test_time(model, inputs) + fw_time, bw_time = compute_fw_bw_time(model, loss_fun, inputs, labels) + # Log precise time + lu.log_json_stats( + { + "prec_test_fw_time": fw_test_time, + "prec_train_fw_time": fw_time, + "prec_train_bw_time": bw_time, + "prec_train_fw_bw_time": fw_time + bw_time, + } + ) diff --git a/pycls/utils/checkpoint.py b/pycls/utils/checkpoint.py new file mode 100644 index 0000000..1c4cc45 --- /dev/null +++ b/pycls/utils/checkpoint.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Functions that handle saving and loading of checkpoints.""" + +import os + +import pycls.utils.distributed as du +import torch +from pycls.core.config import cfg + + +# Common prefix for checkpoint file names +_NAME_PREFIX = "model_epoch_" +# Checkpoints directory name +_DIR_NAME = "checkpoints" + + +def get_checkpoint_dir(): + """Retrieves the location for storing checkpoints.""" + return os.path.join(cfg.OUT_DIR, _DIR_NAME) + + +def get_checkpoint(epoch): + """Retrieves the path to a checkpoint file.""" + name = "{}{:04d}.pyth".format(_NAME_PREFIX, epoch) + return os.path.join(get_checkpoint_dir(), name) + + +def get_last_checkpoint(): + """Retrieves the most recent checkpoint (highest epoch number).""" + checkpoint_dir = get_checkpoint_dir() + # Checkpoint file names are in lexicographic order + checkpoints = [f for f in os.listdir(checkpoint_dir) if _NAME_PREFIX in f] + last_checkpoint_name = sorted(checkpoints)[-1] + return os.path.join(checkpoint_dir, last_checkpoint_name) + + +def has_checkpoint(): + """Determines if there are checkpoints available.""" + checkpoint_dir = get_checkpoint_dir() + if not os.path.exists(checkpoint_dir): + return False + return any(_NAME_PREFIX in f for f in os.listdir(checkpoint_dir)) + + +def is_checkpoint_epoch(cur_epoch): + """Determines if a checkpoint should be saved on current epoch.""" + return (cur_epoch + 1) % cfg.TRAIN.CHECKPOINT_PERIOD == 0 + + +def save_checkpoint(model, optimizer, epoch): + """Saves a checkpoint.""" + # Save checkpoints only from the master process + if not du.is_master_proc(): + return + # Ensure that the checkpoint dir exists + os.makedirs(get_checkpoint_dir(), exist_ok=True) + # Omit the DDP wrapper in the multi-gpu setting + sd = model.module.state_dict() if cfg.NUM_GPUS > 1 else model.state_dict() + # Record the state + checkpoint = { + "epoch": epoch, + "model_state": sd, + "optimizer_state": optimizer.state_dict(), + "cfg": cfg.dump(), + } + # Write the checkpoint + checkpoint_file = get_checkpoint(epoch + 1) + torch.save(checkpoint, checkpoint_file) + return checkpoint_file + + +def load_checkpoint(checkpoint_file, model, optimizer=None): + """Loads the checkpoint from the given file.""" + assert os.path.exists(checkpoint_file), "Checkpoint '{}' not found".format( + checkpoint_file + ) + # Load the checkpoint on CPU to avoid GPU mem spike + checkpoint = torch.load(checkpoint_file, map_location="cpu") + # Account for the DDP wrapper in the multi-gpu setting + ms = model.module if cfg.NUM_GPUS > 1 else model + ms.load_state_dict(checkpoint["model_state"]) + # Load the optimizer state (commonly not done when fine-tuning) + if optimizer: + optimizer.load_state_dict(checkpoint["optimizer_state"]) + return checkpoint["epoch"] diff --git a/pycls/utils/distributed.py b/pycls/utils/distributed.py new file mode 100644 index 0000000..5c6116a --- /dev/null +++ b/pycls/utils/distributed.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Distributed helpers.""" + +import torch +from pycls.core.config import cfg + + +def is_master_proc(): + """Determines if the current process is the master process. + + Master process is responsible for logging, writing and loading checkpoints. + In the multi GPU setting, we assign the master role to the rank 0 process. + When training using a single GPU, there is only one training processes + which is considered the master processes. + """ + return cfg.NUM_GPUS == 1 or torch.distributed.get_rank() == 0 + + +def init_process_group(proc_rank, world_size): + """Initializes the default process group.""" + # Set the GPU to use + torch.cuda.set_device(proc_rank) + # Initialize the process group + torch.distributed.init_process_group( + backend=cfg.DIST_BACKEND, + init_method="tcp://{}:{}".format(cfg.HOST, cfg.PORT), + world_size=world_size, + rank=proc_rank, + ) + + +def destroy_process_group(): + """Destroys the default process group.""" + torch.distributed.destroy_process_group() + + +def scaled_all_reduce(tensors): + """Performs the scaled all_reduce operation on the provided tensors. + + The input tensors are modified in-place. Currently supports only the sum + reduction operator. The reduced values are scaled by the inverse size of + the process group (equivalent to cfg.NUM_GPUS). + """ + # Queue the reductions + reductions = [] + for tensor in tensors: + reduction = torch.distributed.all_reduce(tensor, async_op=True) + reductions.append(reduction) + # Wait for reductions to finish + for reduction in reductions: + reduction.wait() + # Scale the results + for tensor in tensors: + tensor.mul_(1.0 / cfg.NUM_GPUS) + return tensors diff --git a/pycls/utils/error_handler.py b/pycls/utils/error_handler.py new file mode 100644 index 0000000..8ef0702 --- /dev/null +++ b/pycls/utils/error_handler.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Multiprocessing error handler.""" + +import os +import signal +import threading + + +class ChildException(Exception): + """Wraps an exception from a child process.""" + + def __init__(self, child_trace): + super(ChildException, self).__init__(child_trace) + + +class ErrorHandler(object): + """Multiprocessing error handler (based on fairseq's). + + Listens for errors in child processes and + propagates the tracebacks to the parent process. + """ + + def __init__(self, error_queue): + # Shared error queue + self.error_queue = error_queue + # Children processes sharing the error queue + self.children_pids = [] + # Start a thread listening to errors + self.error_listener = threading.Thread(target=self.listen, daemon=True) + self.error_listener.start() + # Register the signal handler + signal.signal(signal.SIGUSR1, self.signal_handler) + + def add_child(self, pid): + """Registers a child process.""" + self.children_pids.append(pid) + + def listen(self): + """Listens for errors in the error queue.""" + # Wait until there is an error in the queue + child_trace = self.error_queue.get() + # Put the error back for the signal handler + self.error_queue.put(child_trace) + # Invoke the signal handler + os.kill(os.getpid(), signal.SIGUSR1) + + def signal_handler(self, _sig_num, _stack_frame): + """Signal handler.""" + # Kill children processes + for pid in self.children_pids: + os.kill(pid, signal.SIGINT) + # Propagate the error from the child process + raise ChildException(self.error_queue.get()) diff --git a/pycls/utils/io.py b/pycls/utils/io.py new file mode 100644 index 0000000..d9d98d8 --- /dev/null +++ b/pycls/utils/io.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""IO utilities (adapted from Detectron)""" + +import logging +import os +import re +import sys +from urllib import request as urlrequest + + +logger = logging.getLogger(__name__) + +_PYCLS_BASE_URL = "https://dl.fbaipublicfiles.com/pycls" + + +def cache_url(url_or_file, cache_dir): + """Download the file specified by the URL to the cache_dir and return the + path to the cached file. If the argument is not a URL, simply return it as + is. + """ + is_url = re.match(r"^(?:http)s?://", url_or_file, re.IGNORECASE) is not None + + if not is_url: + return url_or_file + + url = url_or_file + assert url.startswith(_PYCLS_BASE_URL), ( + "pycls only automatically caches URLs in the pycls S3 bucket: {}" + ).format(_PYCLS_BASE_URL) + + cache_file_path = url.replace(_PYCLS_BASE_URL, cache_dir) + if os.path.exists(cache_file_path): + return cache_file_path + + cache_file_dir = os.path.dirname(cache_file_path) + if not os.path.exists(cache_file_dir): + os.makedirs(cache_file_dir) + + logger.info("Downloading remote file {} to {}".format(url, cache_file_path)) + download_url(url, cache_file_path) + return cache_file_path + + +def _progress_bar(count, total): + """Report download progress. + Credit: + https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console/27871113 + """ + bar_len = 60 + filled_len = int(round(bar_len * count / float(total))) + + percents = round(100.0 * count / float(total), 1) + bar = "=" * filled_len + "-" * (bar_len - filled_len) + + sys.stdout.write( + " [{}] {}% of {:.1f}MB file \r".format(bar, percents, total / 1024 / 1024) + ) + sys.stdout.flush() + if count >= total: + sys.stdout.write("\n") + + +def download_url(url, dst_file_path, chunk_size=8192, progress_hook=_progress_bar): + """Download url and write it to dst_file_path. + Credit: + https://stackoverflow.com/questions/2028517/python-urllib2-progress-hook + """ + req = urlrequest.Request(url) + response = urlrequest.urlopen(req) + total_size = response.info().get("Content-Length").strip() + total_size = int(total_size) + bytes_so_far = 0 + + with open(dst_file_path, "wb") as f: + while 1: + chunk = response.read(chunk_size) + bytes_so_far += len(chunk) + if not chunk: + break + if progress_hook: + progress_hook(bytes_so_far, total_size) + f.write(chunk) + + return bytes_so_far diff --git a/pycls/utils/logging.py b/pycls/utils/logging.py new file mode 100644 index 0000000..c9f1565 --- /dev/null +++ b/pycls/utils/logging.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Logging.""" + +import builtins +import decimal +import logging +import os +import sys + +import pycls.utils.distributed as du +import simplejson +from pycls.core.config import cfg + + +# Show filename and line number in logs +_FORMAT = "[%(filename)s: %(lineno)3d]: %(message)s" + +# Log file name (for cfg.LOG_DEST = 'file') +_LOG_FILE = "stdout.log" + +# Printed json stats lines will be tagged w/ this +_TAG = "json_stats: " + + +def _suppress_print(): + """Suppresses printing from the current process.""" + + def ignore(*_objects, _sep=" ", _end="\n", _file=sys.stdout, _flush=False): + pass + + builtins.print = ignore + + +def setup_logging(): + """Sets up the logging.""" + # Enable logging only for the master process + if du.is_master_proc(): + # Clear the root logger to prevent any existing logging config + # (e.g. set by another module) from messing with our setup + logging.root.handlers = [] + # Construct logging configuration + logging_config = {"level": logging.INFO, "format": _FORMAT} + # Log either to stdout or to a file + if cfg.LOG_DEST == "stdout": + logging_config["stream"] = sys.stdout + else: + logging_config["filename"] = os.path.join(cfg.OUT_DIR, _LOG_FILE) + # Configure logging + logging.basicConfig(**logging_config) + else: + _suppress_print() + + +def get_logger(name): + """Retrieves the logger.""" + return logging.getLogger(name) + + +def log_json_stats(stats): + """Logs json stats.""" + # Decimal + string workaround for having fixed len float vals in logs + stats = { + k: decimal.Decimal("{:.6f}".format(v)) if isinstance(v, float) else v + for k, v in stats.items() + } + json_stats = simplejson.dumps(stats, sort_keys=True, use_decimal=True) + logger = get_logger(__name__) + logger.info("{:s}{:s}".format(_TAG, json_stats)) + + +def load_json_stats(log_file): + """Loads json_stats from a single log file.""" + with open(log_file, "r") as f: + lines = f.readlines() + json_lines = [l[l.find(_TAG) + len(_TAG) :] for l in lines if _TAG in l] + json_stats = [simplejson.loads(l) for l in json_lines] + return json_stats + + +def parse_json_stats(log, row_type, key): + """Extract values corresponding to row_type/key out of log.""" + vals = [row[key] for row in log if row["_type"] == row_type and key in row] + if key == "iter" or key == "epoch": + vals = [int(val.split("/")[0]) for val in vals] + return vals + + +def get_log_files(log_dir, name_filter=""): + """Get all log files in directory containing subdirs of trained models.""" + names = [n for n in sorted(os.listdir(log_dir)) if name_filter in n] + files = [os.path.join(log_dir, n, _LOG_FILE) for n in names] + f_n_ps = [(f, n) for (f, n) in zip(files, names) if os.path.exists(f)] + files, names = zip(*f_n_ps) + return files, names diff --git a/pycls/utils/lr_policy.py b/pycls/utils/lr_policy.py new file mode 100644 index 0000000..9c751db --- /dev/null +++ b/pycls/utils/lr_policy.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Learning rate policies.""" + +import numpy as np +from pycls.core.config import cfg + + +def lr_fun_steps(cur_epoch): + """Steps schedule (cfg.OPTIM.LR_POLICY = 'steps').""" + ind = [i for i, s in enumerate(cfg.OPTIM.STEPS) if cur_epoch >= s][-1] + return cfg.OPTIM.BASE_LR * (cfg.OPTIM.LR_MULT ** ind) + + +def lr_fun_exp(cur_epoch): + """Exponential schedule (cfg.OPTIM.LR_POLICY = 'exp').""" + return cfg.OPTIM.BASE_LR * (cfg.OPTIM.GAMMA ** cur_epoch) + + +def lr_fun_cos(cur_epoch): + """Cosine schedule (cfg.OPTIM.LR_POLICY = 'cos').""" + base_lr, max_epoch = cfg.OPTIM.BASE_LR, cfg.OPTIM.MAX_EPOCH + return 0.5 * base_lr * (1.0 + np.cos(np.pi * cur_epoch / max_epoch)) + + +def get_lr_fun(): + """Retrieves the specified lr policy function""" + lr_fun = "lr_fun_" + cfg.OPTIM.LR_POLICY + if lr_fun not in globals(): + raise NotImplementedError("Unknown LR policy:" + cfg.OPTIM.LR_POLICY) + return globals()[lr_fun] + + +def get_epoch_lr(cur_epoch): + """Retrieves the lr for the given epoch according to the policy.""" + lr = get_lr_fun()(cur_epoch) + # Linear warmup + if cur_epoch < cfg.OPTIM.WARMUP_EPOCHS: + alpha = cur_epoch / cfg.OPTIM.WARMUP_EPOCHS + warmup_factor = cfg.OPTIM.WARMUP_FACTOR * (1.0 - alpha) + alpha + lr *= warmup_factor + return lr diff --git a/pycls/utils/meters.py b/pycls/utils/meters.py new file mode 100644 index 0000000..c0d9883 --- /dev/null +++ b/pycls/utils/meters.py @@ -0,0 +1,239 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Meters.""" + +import datetime +from collections import deque + +import numpy as np +import pycls.utils.logging as lu +import pycls.utils.metrics as metrics +from pycls.core.config import cfg +from pycls.utils.timer import Timer + + +def eta_str(eta_td): + """Converts an eta timedelta to a fixed-width string format.""" + days = eta_td.days + hrs, rem = divmod(eta_td.seconds, 3600) + mins, secs = divmod(rem, 60) + return "{0:02},{1:02}:{2:02}:{3:02}".format(days, hrs, mins, secs) + + +class ScalarMeter(object): + """Measures a scalar value (adapted from Detectron).""" + + def __init__(self, window_size): + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + + def reset(self): + self.deque.clear() + self.total = 0.0 + self.count = 0 + + def add_value(self, value): + self.deque.append(value) + self.count += 1 + self.total += value + + def get_win_median(self): + return np.median(self.deque) + + def get_win_avg(self): + return np.mean(self.deque) + + def get_global_avg(self): + return self.total / self.count + + +class TrainMeter(object): + """Measures training stats.""" + + def __init__(self, epoch_iters): + self.epoch_iters = epoch_iters + self.max_iter = cfg.OPTIM.MAX_EPOCH * epoch_iters + self.iter_timer = Timer() + self.loss = ScalarMeter(cfg.LOG_PERIOD) + self.loss_total = 0.0 + self.lr = None + # Current minibatch errors (smoothed over a window) + self.mb_top1_err = ScalarMeter(cfg.LOG_PERIOD) + self.mb_top5_err = ScalarMeter(cfg.LOG_PERIOD) + # Number of misclassified examples + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + + def reset(self, timer=False): + if timer: + self.iter_timer.reset() + self.loss.reset() + self.loss_total = 0.0 + self.lr = None + self.mb_top1_err.reset() + self.mb_top5_err.reset() + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + + def iter_tic(self): + self.iter_timer.tic() + + def iter_toc(self): + self.iter_timer.toc() + + def update_stats(self, top1_err, top5_err, loss, lr, mb_size): + # Current minibatch stats + self.mb_top1_err.add_value(top1_err) + self.mb_top5_err.add_value(top5_err) + self.loss.add_value(loss) + self.lr = lr + # Aggregate stats + self.num_top1_mis += top1_err * mb_size + self.num_top5_mis += top5_err * mb_size + self.loss_total += loss * mb_size + self.num_samples += mb_size + + def get_iter_stats(self, cur_epoch, cur_iter): + eta_sec = self.iter_timer.average_time * ( + self.max_iter - (cur_epoch * self.epoch_iters + cur_iter + 1) + ) + eta_td = datetime.timedelta(seconds=int(eta_sec)) + mem_usage = metrics.gpu_mem_usage() + stats = { + "_type": "train_iter", + "epoch": "{}/{}".format(cur_epoch + 1, cfg.OPTIM.MAX_EPOCH), + "iter": "{}/{}".format(cur_iter + 1, self.epoch_iters), + "time_avg": self.iter_timer.average_time, + "time_diff": self.iter_timer.diff, + "eta": eta_str(eta_td), + "top1_err": self.mb_top1_err.get_win_median(), + "top5_err": self.mb_top5_err.get_win_median(), + "loss": self.loss.get_win_median(), + "lr": self.lr, + "mem": int(np.ceil(mem_usage)), + } + return stats + + def log_iter_stats(self, cur_epoch, cur_iter): + if (cur_iter + 1) % cfg.LOG_PERIOD != 0: + return + stats = self.get_iter_stats(cur_epoch, cur_iter) + lu.log_json_stats(stats) + + def get_epoch_stats(self, cur_epoch): + eta_sec = self.iter_timer.average_time * ( + self.max_iter - (cur_epoch + 1) * self.epoch_iters + ) + eta_td = datetime.timedelta(seconds=int(eta_sec)) + mem_usage = metrics.gpu_mem_usage() + top1_err = self.num_top1_mis / self.num_samples + top5_err = self.num_top5_mis / self.num_samples + avg_loss = self.loss_total / self.num_samples + stats = { + "_type": "train_epoch", + "epoch": "{}/{}".format(cur_epoch + 1, cfg.OPTIM.MAX_EPOCH), + "time_avg": self.iter_timer.average_time, + "eta": eta_str(eta_td), + "top1_err": top1_err, + "top5_err": top5_err, + "loss": avg_loss, + "lr": self.lr, + "mem": int(np.ceil(mem_usage)), + } + return stats + + def log_epoch_stats(self, cur_epoch): + stats = self.get_epoch_stats(cur_epoch) + lu.log_json_stats(stats) + + +class TestMeter(object): + """Measures testing stats.""" + + def __init__(self, max_iter): + self.max_iter = max_iter + self.iter_timer = Timer() + # Current minibatch errors (smoothed over a window) + self.mb_top1_err = ScalarMeter(cfg.LOG_PERIOD) + self.mb_top5_err = ScalarMeter(cfg.LOG_PERIOD) + # Min errors (over the full test set) + self.min_top1_err = 100.0 + self.min_top5_err = 100.0 + # Number of misclassified examples + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + + def reset(self, min_errs=False): + if min_errs: + self.min_top1_err = 100.0 + self.min_top5_err = 100.0 + self.iter_timer.reset() + self.mb_top1_err.reset() + self.mb_top5_err.reset() + self.num_top1_mis = 0 + self.num_top5_mis = 0 + self.num_samples = 0 + + def iter_tic(self): + self.iter_timer.tic() + + def iter_toc(self): + self.iter_timer.toc() + + def update_stats(self, top1_err, top5_err, mb_size): + self.mb_top1_err.add_value(top1_err) + self.mb_top5_err.add_value(top5_err) + self.num_top1_mis += top1_err * mb_size + self.num_top5_mis += top5_err * mb_size + self.num_samples += mb_size + + def get_iter_stats(self, cur_epoch, cur_iter): + mem_usage = metrics.gpu_mem_usage() + iter_stats = { + "_type": "test_iter", + "epoch": "{}/{}".format(cur_epoch + 1, cfg.OPTIM.MAX_EPOCH), + "iter": "{}/{}".format(cur_iter + 1, self.max_iter), + "time_avg": self.iter_timer.average_time, + "time_diff": self.iter_timer.diff, + "top1_err": self.mb_top1_err.get_win_median(), + "top5_err": self.mb_top5_err.get_win_median(), + "mem": int(np.ceil(mem_usage)), + } + return iter_stats + + def log_iter_stats(self, cur_epoch, cur_iter): + if (cur_iter + 1) % cfg.LOG_PERIOD != 0: + return + stats = self.get_iter_stats(cur_epoch, cur_iter) + lu.log_json_stats(stats) + + def get_epoch_stats(self, cur_epoch): + top1_err = self.num_top1_mis / self.num_samples + top5_err = self.num_top5_mis / self.num_samples + self.min_top1_err = min(self.min_top1_err, top1_err) + self.min_top5_err = min(self.min_top5_err, top5_err) + mem_usage = metrics.gpu_mem_usage() + stats = { + "_type": "test_epoch", + "epoch": "{}/{}".format(cur_epoch + 1, cfg.OPTIM.MAX_EPOCH), + "time_avg": self.iter_timer.average_time, + "top1_err": top1_err, + "top5_err": top5_err, + "min_top1_err": self.min_top1_err, + "min_top5_err": self.min_top5_err, + "mem": int(np.ceil(mem_usage)), + } + return stats + + def log_epoch_stats(self, cur_epoch): + stats = self.get_epoch_stats(cur_epoch) + lu.log_json_stats(stats) diff --git a/pycls/utils/metrics.py b/pycls/utils/metrics.py new file mode 100644 index 0000000..ea095a1 --- /dev/null +++ b/pycls/utils/metrics.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Functions for computing metrics.""" + +import numpy as np +import torch +import torch.nn as nn +from pycls.core.config import cfg + + +# Number of bytes in a megabyte +_B_IN_MB = 1024 * 1024 + + +def topks_correct(preds, labels, ks): + """Computes the number of top-k correct predictions for each k.""" + assert preds.size(0) == labels.size( + 0 + ), "Batch dim of predictions and labels must match" + # Find the top max_k predictions for each sample + _top_max_k_vals, top_max_k_inds = torch.topk( + preds, max(ks), dim=1, largest=True, sorted=True + ) + # (batch_size, max_k) -> (max_k, batch_size) + top_max_k_inds = top_max_k_inds.t() + # (batch_size, ) -> (max_k, batch_size) + rep_max_k_labels = labels.view(1, -1).expand_as(top_max_k_inds) + # (i, j) = 1 if top i-th prediction for the j-th sample is correct + top_max_k_correct = top_max_k_inds.eq(rep_max_k_labels) + # Compute the number of topk correct predictions for each k + topks_correct = [top_max_k_correct[:k, :].view(-1).float().sum() for k in ks] + return topks_correct + + +def topk_errors(preds, labels, ks): + """Computes the top-k error for each k.""" + num_topks_correct = topks_correct(preds, labels, ks) + return [(1.0 - x / preds.size(0)) * 100.0 for x in num_topks_correct] + + +def topk_accuracies(preds, labels, ks): + """Computes the top-k accuracy for each k.""" + num_topks_correct = topks_correct(preds, labels, ks) + return [(x / preds.size(0)) * 100.0 for x in num_topks_correct] + + +def params_count(model): + """Computes the number of parameters.""" + return np.sum([p.numel() for p in model.parameters()]).item() + + +def flops_count(model): + """Computes the number of flops statically.""" + h, w = cfg.TRAIN.IM_SIZE, cfg.TRAIN.IM_SIZE + count = 0 + for n, m in model.named_modules(): + if isinstance(m, nn.Conv2d): + if "se." in n: + count += m.in_channels * m.out_channels + m.bias.numel() + continue + h_out = (h + 2 * m.padding[0] - m.kernel_size[0]) // m.stride[0] + 1 + w_out = (w + 2 * m.padding[1] - m.kernel_size[1]) // m.stride[1] + 1 + count += np.prod([m.weight.numel(), h_out, w_out]) + if ".proj" not in n: + h, w = h_out, w_out + elif isinstance(m, nn.MaxPool2d): + h = (h + 2 * m.padding - m.kernel_size) // m.stride + 1 + w = (w + 2 * m.padding - m.kernel_size) // m.stride + 1 + elif isinstance(m, nn.Linear): + count += m.in_features * m.out_features + m.bias.numel() + return count.item() + + +def acts_count(model): + """Computes the number of activations statically.""" + h, w = cfg.TRAIN.IM_SIZE, cfg.TRAIN.IM_SIZE + count = 0 + for n, m in model.named_modules(): + if isinstance(m, nn.Conv2d): + if "se." in n: + count += m.out_channels + continue + h_out = (h + 2 * m.padding[0] - m.kernel_size[0]) // m.stride[0] + 1 + w_out = (w + 2 * m.padding[1] - m.kernel_size[1]) // m.stride[1] + 1 + count += np.prod([m.out_channels, h_out, w_out]) + if ".proj" not in n: + h, w = h_out, w_out + elif isinstance(m, nn.MaxPool2d): + h = (h + 2 * m.padding - m.kernel_size) // m.stride + 1 + w = (w + 2 * m.padding - m.kernel_size) // m.stride + 1 + elif isinstance(m, nn.Linear): + count += m.out_features + return count.item() + + +def gpu_mem_usage(): + """Computes the GPU memory usage for the current device (MB).""" + mem_usage_bytes = torch.cuda.max_memory_allocated() + return mem_usage_bytes / _B_IN_MB diff --git a/pycls/utils/multiprocessing.py b/pycls/utils/multiprocessing.py new file mode 100644 index 0000000..380104a --- /dev/null +++ b/pycls/utils/multiprocessing.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Multiprocessing helpers.""" + +import multiprocessing as mp +import traceback + +import pycls.utils.distributed as du +from pycls.utils.error_handler import ErrorHandler + + +def run(proc_rank, world_size, error_queue, fun, fun_args, fun_kwargs): + """Runs a function from a child process.""" + try: + # Initialize the process group + du.init_process_group(proc_rank, world_size) + # Run the function + fun(*fun_args, **fun_kwargs) + except KeyboardInterrupt: + # Killed by the parent process + pass + except Exception: + # Propagate exception to the parent process + error_queue.put(traceback.format_exc()) + finally: + # Destroy the process group + du.destroy_process_group() + + +def multi_proc_run(num_proc, fun, fun_args=(), fun_kwargs=None): + """Runs a function in a multi-proc setting.""" + + if fun_kwargs is None: + fun_kwargs = {} + + # Handle errors from training subprocesses + error_queue = mp.SimpleQueue() + error_handler = ErrorHandler(error_queue) + + # Run each training subprocess + ps = [] + for i in range(num_proc): + p_i = mp.Process( + target=run, args=(i, num_proc, error_queue, fun, fun_args, fun_kwargs) + ) + ps.append(p_i) + p_i.start() + error_handler.add_child(p_i.pid) + + # Wait for each subprocess to finish + for p in ps: + p.join() diff --git a/pycls/utils/net.py b/pycls/utils/net.py new file mode 100644 index 0000000..9abed63 --- /dev/null +++ b/pycls/utils/net.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Functions for manipulating networks.""" + +import itertools +import math + +import torch +import torch.nn as nn +from pycls.core.config import cfg + + +def init_weights(m): + """Performs ResNet-style weight initialization.""" + if isinstance(m, nn.Conv2d): + # Note that there is no bias due to BN + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(mean=0.0, std=math.sqrt(2.0 / fan_out)) + elif isinstance(m, nn.BatchNorm2d): + zero_init_gamma = ( + hasattr(m, "final_bn") and m.final_bn and cfg.BN.ZERO_INIT_FINAL_GAMMA + ) + m.weight.data.fill_(0.0 if zero_init_gamma else 1.0) + m.bias.data.zero_() + elif isinstance(m, nn.Linear): + m.weight.data.normal_(mean=0.0, std=0.01) + m.bias.data.zero_() + + +@torch.no_grad() +def compute_precise_bn_stats(model, loader): + """Computes precise BN stats on training data.""" + # Compute the number of minibatches to use + num_iter = min(cfg.BN.NUM_SAMPLES_PRECISE // loader.batch_size, len(loader)) + # Retrieve the BN layers + bns = [m for m in model.modules() if isinstance(m, torch.nn.BatchNorm2d)] + # Initialize stats storage + mus = [torch.zeros_like(bn.running_mean) for bn in bns] + sqs = [torch.zeros_like(bn.running_var) for bn in bns] + # Remember momentum values + moms = [bn.momentum for bn in bns] + # Disable momentum + for bn in bns: + bn.momentum = 1.0 + # Accumulate the stats across the data samples + for inputs, _labels in itertools.islice(loader, num_iter): + model(inputs.cuda()) + # Accumulate the stats for each BN layer + for i, bn in enumerate(bns): + m, v = bn.running_mean, bn.running_var + sqs[i] += (v + m * m) / num_iter + mus[i] += m / num_iter + # Set the stats and restore momentum values + for i, bn in enumerate(bns): + bn.running_var = sqs[i] - mus[i] * mus[i] + bn.running_mean = mus[i] + bn.momentum = moms[i] + + +def reset_bn_stats(model): + """Resets running BN stats.""" + for m in model.modules(): + if isinstance(m, torch.nn.BatchNorm2d): + m.reset_running_stats() + + +def drop_connect(x, drop_ratio): + """Drop connect (adapted from DARTS).""" + keep_ratio = 1.0 - drop_ratio + mask = torch.empty([x.shape[0], 1, 1, 1], dtype=x.dtype, device=x.device) + mask.bernoulli_(keep_ratio) + x.div_(keep_ratio) + x.mul_(mask) + return x + + +def get_flat_weights(model): + """Gets all model weights as a single flat vector.""" + return torch.cat([p.data.view(-1, 1) for p in model.parameters()], 0) + + +def set_flat_weights(model, flat_weights): + """Sets all model weights from a single flat vector.""" + k = 0 + for p in model.parameters(): + n = p.data.numel() + p.data.copy_(flat_weights[k : (k + n)].view_as(p.data)) + k += n + assert k == flat_weights.numel() diff --git a/pycls/utils/plotting.py b/pycls/utils/plotting.py new file mode 100644 index 0000000..77164a2 --- /dev/null +++ b/pycls/utils/plotting.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Plotting functions.""" + +import colorlover as cl +import matplotlib.pyplot as plt +import plotly.graph_objs as go +import plotly.offline as offline +import pycls.utils.logging as lu + + +def get_plot_colors(max_colors, color_format="pyplot"): + """Generate colors for plotting.""" + colors = cl.scales["11"]["qual"]["Paired"] + if max_colors > len(colors): + colors = cl.to_rgb(cl.interp(colors, max_colors)) + if color_format == "pyplot": + return [[j / 255.0 for j in c] for c in cl.to_numeric(colors)] + return colors + + +def prepare_plot_data(log_files, names, key="top1_err"): + """Load logs and extract data for plotting error curves.""" + plot_data = [] + for file, name in zip(log_files, names): + d, log = {}, lu.load_json_stats(file) + for phase in ["train", "test"]: + x = lu.parse_json_stats(log, phase + "_epoch", "epoch") + y = lu.parse_json_stats(log, phase + "_epoch", key) + d["x_" + phase], d["y_" + phase] = x, y + d[phase + "_label"] = "[{:5.2f}] ".format(min(y) if y else 0) + name + plot_data.append(d) + assert len(plot_data) > 0, "No data to plot" + return plot_data + + +def plot_error_curves_plotly(log_files, names, filename, key="top1_err"): + """Plot error curves using plotly and save to file.""" + plot_data = prepare_plot_data(log_files, names, key) + colors = get_plot_colors(len(plot_data), "plotly") + # Prepare data for plots (3 sets, train duplicated w and w/o legend) + data = [] + for i, d in enumerate(plot_data): + s = str(i) + line_train = {"color": colors[i], "dash": "dashdot", "width": 1.5} + line_test = {"color": colors[i], "dash": "solid", "width": 1.5} + data.append( + go.Scatter( + x=d["x_train"], + y=d["y_train"], + mode="lines", + name=d["train_label"], + line=line_train, + legendgroup=s, + visible=True, + showlegend=False, + ) + ) + data.append( + go.Scatter( + x=d["x_test"], + y=d["y_test"], + mode="lines", + name=d["test_label"], + line=line_test, + legendgroup=s, + visible=True, + showlegend=True, + ) + ) + data.append( + go.Scatter( + x=d["x_train"], + y=d["y_train"], + mode="lines", + name=d["train_label"], + line=line_train, + legendgroup=s, + visible=False, + showlegend=True, + ) + ) + # Prepare layout w ability to toggle 'all', 'train', 'test' + titlefont = {"size": 18, "color": "#7f7f7f"} + vis = [[True, True, False], [False, False, True], [False, True, False]] + buttons = zip(["all", "train", "test"], [[{"visible": v}] for v in vis]) + buttons = [{"label": l, "args": v, "method": "update"} for l, v in buttons] + layout = go.Layout( + title=key + " vs. epoch
[dash=train, solid=test]", + xaxis={"title": "epoch", "titlefont": titlefont}, + yaxis={"title": key, "titlefont": titlefont}, + showlegend=True, + hoverlabel={"namelength": -1}, + updatemenus=[ + { + "buttons": buttons, + "direction": "down", + "showactive": True, + "x": 1.02, + "xanchor": "left", + "y": 1.08, + "yanchor": "top", + } + ], + ) + # Create plotly plot + offline.plot({"data": data, "layout": layout}, filename=filename) + + +def plot_error_curves_pyplot(log_files, names, filename=None, key="top1_err"): + """Plot error curves using matplotlib.pyplot and save to file.""" + plot_data = prepare_plot_data(log_files, names, key) + colors = get_plot_colors(len(names)) + for ind, d in enumerate(plot_data): + c, lbl = colors[ind], d["test_label"] + plt.plot(d["x_train"], d["y_train"], "--", c=c, alpha=0.8) + plt.plot(d["x_test"], d["y_test"], "-", c=c, alpha=0.8, label=lbl) + plt.title(key + " vs. epoch\n[dash=train, solid=test]", fontsize=14) + plt.xlabel("epoch", fontsize=14) + plt.ylabel(key, fontsize=14) + plt.grid(alpha=0.4) + plt.legend() + if filename: + plt.savefig(filename) + plt.clf() + else: + plt.show() diff --git a/pycls/utils/timer.py b/pycls/utils/timer.py new file mode 100644 index 0000000..14f0bc4 --- /dev/null +++ b/pycls/utils/timer.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +"""Timer.""" + +import time + + +class Timer(object): + """A simple timer (adapted from Detectron).""" + + def __init__(self): + self.reset() + + def tic(self): + # using time.time instead of time.clock because time time.clock + # does not normalize for multithreading + self.start_time = time.time() + + def toc(self): + self.diff = time.time() - self.start_time + self.total_time += self.diff + self.calls += 1 + self.average_time = self.total_time / self.calls + + def reset(self): + self.total_time = 0.0 + self.calls = 0 + self.start_time = 0.0 + self.diff = 0.0 + self.average_time = 0.0 diff --git a/test.py b/test.py new file mode 100644 index 0000000..3441599 --- /dev/null +++ b/test.py @@ -0,0 +1,78 @@ + +import torch +import argparse +import numpy as np + +from PIL import Image + +from RegDanbooru2019_8G import RegDanbooru2019 + +parser = argparse.ArgumentParser(description='Test RegDeepDanbooru') +parser.add_argument('--model', default='', type=str, help='trained model') +parser.add_argument('--image', default='', type=str, help='image to test') +parser.add_argument('--size', default=768, type=int, help='canvas size') +parser.add_argument('--threshold', default=0.5, type=float, help='threshold') +args = parser.parse_args() + +DANBOORU_LABEL_MAP = {} + +def load_danbooru_label_map() : + print(' -- Loading danbooru2019 labels') + global DANBOORU_LABEL_MAP + with open('danbooru_labels.txt', 'r') as fp : + for l in fp : + l = l.strip() + if l : + idx, tag = l.split(' ') + DANBOORU_LABEL_MAP[int(idx)] = tag + +def test(model, image_resized) : + print(' -- Running model on GPU') + image_resized_torch = torch.from_numpy(image_resized).float() / 127.5 - 1.0 + if len(image_resized_torch.shape) == 3 : + image_resized_torch = image_resized_torch.unsqueeze(0).permute(0, 3, 1, 2) + elif len(image_resized_torch.shape) == 4 : + image_resized_torch = image_resized_torch.permute(0, 3, 1, 2) + image_resized_torch = image_resized_torch.cuda() + with torch.no_grad() : + danbooru_logits = model(image_resized_torch) + danbooru = danbooru_logits.sigmoid().cpu() + return danbooru + +def load_and_resize_image(img_path, canvas_size = 512) : + img = Image.open(img_path).convert('RGB') + old_size = img.size + w, h = old_size + w, h = float(w), float(h) + ratio = float(canvas_size) / max(old_size) + new_size = tuple([int(round(x * ratio)) for x in old_size]) + print(f'Test image size: {new_size}') + return np.array(img.resize(new_size, Image.ANTIALIAS)) + +def translate_danbooru_labels(probs, threshold = 0.8) : + global DANBOORU_LABEL_MAP + choosen_indices = (probs > threshold).nonzero() + result = [] + for i in range(probs.size(0)) : + prob_single = probs[0].numpy() + indices_single = choosen_indices[choosen_indices[:, 0] == i][:, 1].numpy() + tag_prob_map = {DANBOORU_LABEL_MAP[idx]: prob_single[idx] for idx in indices_single} + result.append(tag_prob_map) + return result + +def main() : + model = RegDanbooru2019().cuda() + model.load_state_dict(torch.load(args.model)['model']) + model.eval() + torch.save(model, 'RegNetY-8G.pth',) + + test_img = load_and_resize_image(args.image, args.size) + + danbooru = test(model, test_img) + + tags = translate_danbooru_labels(danbooru, args.threshold) + print(tags) + +if __name__ == "__main__": + load_danbooru_label_map() + main()