diff --git a/2020_0000_0599/error_images.txt b/2020_0000_0599/error_images.txt new file mode 100644 index 0000000..c61cedf --- /dev/null +++ b/2020_0000_0599/error_images.txt @@ -0,0 +1,10 @@ +0385/3928385.jpg: image file is truncated (45 bytes not processed) +0989/2262989.jpg: file is damaged + + +0985/30985.jpg: found 0 tags +0988/2430988.jpg: found 0 tags +0994/104994.jpg: found 0 tags +0998/13998.jpg: found 0 tags +0999/6999.jpg: found 0 tags +0999/175999.jpg: found 0 tags diff --git a/2020_0000_0599/selected_tags.csv b/2020_0000_0599/selected_tags.csv new file mode 100644 index 0000000..62a02e0 --- /dev/null +++ b/2020_0000_0599/selected_tags.csv @@ -0,0 +1,2381 @@ +tag_id,name,category,count +470575,1girl,0,3015777 +212816,solo,0,2483745 +13197,long_hair,0,2111456 +8601,breasts,0,1539975 +3389,blush,0,1428057 +1815,smile,0,1301354 +469576,looking_at_viewer,0,1278562 +15080,short_hair,0,1142798 +11906,open_mouth,0,1078681 +566835,multiple_girls,0,882925 +10959,blue_eyes,0,877345 +87788,blonde_hair,0,818305 +16867,brown_hair,0,784698 +429,skirt,0,777196 +16751,bangs,0,754229 +403247,large_breasts,0,730108 +13200,black_hair,0,679198 +1882,thighhighs,0,668916 +2373,hat,0,661724 +29,touhou,3,658813 +412368,simple_background,0,654400 +8526,red_eyes,0,639489 +380350,hair_ornament,0,622964 +5735,gloves,0,597274 +464575,ribbon,0,588507 +2241,dress,0,568047 +1821,2girls,0,565503 +1304828,eyebrows_visible_through_hair,0,564801 +540830,1boy,0,552918 +4563,bow,0,547574 +12590,shirt,0,536558 +515193,white_background,0,531106 +9294,navel,0,519259 +383159,long_sleeves,0,516237 +16578,brown_eyes,0,493775 +1709,twintails,0,476089 +464906,underwear,0,462432 +4607,cleavage,0,462046 +16509,school_uniform,0,454501 +667868,medium_breasts,0,443496 +6126,animal_ears,0,433204 +12289,sitting,0,432070 +376054,bare_shoulders,0,425419 +417660,very_long_hair,0,421179 +10960,green_eyes,0,419645 +375387,holding,0,394538 +391,panties,0,393621 +16613,jewelry,0,393001 +1681,monochrome,0,390556 +10953,blue_hair,0,383676 +658573,hair_between_eyes,0,375290 +1231860,kantai_collection,3,374290 +15654,purple_eyes,0,351507 +464561,hair_ribbon,0,340246 +15675,standing,0,338762 +613194,black_legwear,0,338587 +63,comic,0,336948 +465619,closed_eyes,0,325455 +466499,collarbone,0,323857 +444,swimsuit,0,323246 +11449,weapon,0,316124 +513837,greyscale,0,307292 +2355,ponytail,0,306610 +572080,closed_mouth,0,300873 +16442,purple_hair,0,300597 +8101,ass,0,298110 +15674,tail,0,297875 +89189,yellow_eyes,0,295826 +11429,pink_hair,0,291223 +15425,silver_hair,0,283901 +608813,full_body,0,283296 +374844,hair_bow,0,279671 +464559,flower,0,274961 +4334,braid,0,272947 +3843,jacket,0,265740 +3522,ahoge,0,264253 +384553,:d,0,262864 +2363,pantyhose,0,248599 +2376,hairband,0,239271 +547463,upper_body,0,237430 +472154,short_sleeves,0,237227 +6539,bikini,0,236936 +2750,heart,0,236437 +5827,boots,0,236363 +5403,red_hair,0,230209 +383282,pleated_skirt,0,228670 +465265,small_breasts,0,217853 +4569,sweat,0,214571 +301022,lying,0,213335 +2866,wings,0,212126 +1707,japanese_clothes,0,211087 +1300281,male_focus,0,210732 +8388,green_hair,0,210290 +461042,one_eye_closed,0,209887 +387884,detached_sleeves,0,208320 +1575,food,0,208005 +662939,fate_(series),3,204861 +6532,glasses,0,203546 +268819,serafuku,0,203043 +376766,white_shirt,0,202464 +484168,sidelocks,0,199234 +660909,cowboy_shot,0,195421 +568656,multiple_boys,0,193694 +16581,white_hair,0,190832 +11826,barefoot,0,189598 +13804,earrings,0,187315 +613209,white_legwear,0,187140 +384774,necktie,0,186672 +3870,thighs,0,184737 +194013,frills,0,184179 +8672,shoes,0,183793 +406,sky,0,183362 +375669,open_clothes,0,174640 +664375,parted_lips,0,173799 +10228,elbow_gloves,0,164774 +435324,day,0,164530 +2177,striped,0,163056 +3985,shorts,0,163045 +13879,outdoors,0,161233 +2785,horns,0,157631 +431446,alternate_costume,0,156211 +401228,sword,0,154350 +1793,fang,0,153597 +6010,tears,0,153505 +390728,multicolored_hair,0,153366 +9843,midriff,0,152949 +2646,hairclip,0,152297 +4025,choker,0,151617 +410129,looking_back,0,150573 +10863,tongue,0,149722 +400314,sleeveless,0,148084 +15522,pointy_ears,0,147975 +399836,3girls,0,146285 +1283444,fate/grand_order,3,136586 +9864,belt,0,135898 +464549,cloud,0,134548 +2831,idolmaster,3,133121 +2772,shiny,0,131677 +581144,puffy_sleeves,0,130426 +72,cat_ears,0,130058 +474820,black_gloves,0,124583 +474821,white_gloves,0,122406 +400123,hair_flower,0,119203 +657955,artist_name,0,118512 +6054,teeth,0,117761 +375020,fingerless_gloves,0,116840 +9168,miniskirt,0,115912 +16750,pink_eyes,0,113918 +449194,2boys,0,113886 +1747,chibi,0,112446 +401481,on_back,0,112123 +445,kimono,0,111699 +4152,uniform,0,111691 +5576,armpits,0,110404 +71730,dark_skin,0,110110 +374791,white_panties,0,108484 +411263,signature,0,108334 +394970,wide_sleeves,0,108100 +261,yuri,0,107768 +6346,cape,0,107421 +3198,scarf,0,107215 +502710,tongue_out,0,107106 +656624,star_(symbol),0,106890 +1573,water,0,105762 +5948,hood,0,105175 +375171,indoors,0,103902 +3796,bra,0,103319 +4659,armor,0,103188 +1731,socks,0,101366 +10926,pants,0,101267 +4333,bag,0,101228 +125238,sweatdrop,0,100716 +510962,character_name,0,99919 +14599,:o,0,99911 +16580,grey_hair,0,99630 +385445,vocaloid,3,99602 +537684,blunt_bangs,0,98627 +234192,from_behind,0,98041 +389378,sailor_collar,0,97580 +374628,black_skirt,0,97499 +378032,flat_chest,0,97428 +381629,side_ponytail,0,96782 +13199,black_eyes,0,96005 +4068,necklace,0,95859 +87676,orange_hair,0,94878 +470,bunny_ears,0,92861 +273,apron,0,92584 +478849,collared_shirt,0,91700 +2999,pokemon,3,91077 +52138,twin_braids,0,90681 +89368,aqua_eyes,0,90260 +11628,bowtie,0,90177 +10644,zettai_ryouiki,0,89459 +384884,grin,0,89299 +1931,sketch,0,88887 +1254363,twitter_username,0,87949 +6176,vest,0,86269 +399541,hair_over_one_eye,0,86184 +375229,off_shoulder,0,85527 +11904,bracelet,0,85021 +9882,high_heels,0,84249 +632214,grey_background,0,84048 +560958,dated,0,83734 +102962,nail_polish,0,83617 +539367,hand_up,0,83427 +2060,feet,0,83396 +14620,white_dress,0,83377 +652604,two_side_up,0,82546 +5501,lips,0,82432 +399827,arms_up,0,81068 +4052,collar,0,80769 +670071,holding_weapon,0,79892 +659061,idolmaster_cinderella_girls,3,79206 +7820,military,0,78766 +4019,mole,0,78083 +475187,arm_up,0,77722 +641577,looking_at_another,0,77226 +3477,sweater,0,76981 +464586,tree,0,76813 +467104,stomach,0,76652 +659098,black_footwear,0,75687 +754325,groin,0,75565 +431755,head_tilt,0,74437 +1401711,red_neckwear,0,73891 +39127,black_dress,0,73013 +589,gun,0,72824 +3314,kneehighs,0,72405 +494744,shiny_skin,0,72221 +464553,cup,0,72214 +464535,book,0,72035 +572915,dark_skinned_female,0,71614 +59,maid,0,71509 +389404,cat_tail,0,70466 +412078,wrist_cuffs,0,69198 +4867,wet,0,68777 +451155,hand_on_hip,0,68724 +511141,from_side,0,68702 +10923,torn_clothes,0,68654 +412555,4girls,0,68526 +524070,shiny_hair,0,68459 +358,cosplay,0,68438 +465277,plaid,0,68238 +387214,military_uniform,0,67720 +590165,english_text,0,67582 +3918,4koma,0,67581 +4311,legs,0,67549 +375372,dutch_angle,0,67026 +13132,petals,0,67002 +6107,blurry,0,66977 +463173,maid_headdress,0,66619 +427008,fingernails,0,66473 +490655,blue_sky,0,66438 +390401,red_ribbon,0,66286 +6295,leotard,0,66266 +684639,puffy_short_sleeves,0,66238 +385430,hatsune_miku,4,65997 +464572,pillow,0,65908 +1445905,v-shaped_eyebrows,0,65867 +494251,one-piece_swimsuit,0,65707 +438623,neckerchief,0,65542 +379475,sash,0,64872 +5126,hug,0,64812 +470807,red_bow,0,64743 +428173,blue_skirt,0,64503 +10447,kneeling,0,64200 +451371,see-through,0,64057 +16718,black_panties,0,63894 +1431357,azur_lane,3,63090 +94007,aqua_hair,0,63005 +440465,speech_bubble,0,62909 +391631,gradient,0,62842 +479939,sleeves_past_wrists,0,60728 +95405,orange_eyes,0,60522 +2585,bed,0,59611 +389813,single_braid,0,58955 +5569,coat,0,58720 +12239,hakurei_reimu,4,58655 +413564,two-tone_hair,0,58358 +8641,dress_shirt,0,58302 +464579,sparkle,0,58116 +426781,medium_hair,0,57813 +531371,gradient_background,0,57378 +379615,open_jacket,0,56735 +539584,fur_trim,0,56690 +2508,blood,0,56347 +551772,parted_bangs,0,56321 +567316,6+girls,0,56231 +2687,v,0,56101 +10422,witch_hat,0,55907 +675314,copyright_name,0,55769 +475418,bare_arms,0,55661 +542846,no_humans,0,55443 +442865,headgear,0,55369 +5832,fruit,0,55305 +626,underboob,0,55221 +468554,double_bun,0,55132 +394174,short_shorts,0,55065 +12242,kirisame_marisa,4,55042 +469714,bare_legs,0,55004 +6028,sideboob,0,54935 +2813,window,0,54791 +402217,^_^,0,54358 +442167,alternate_hairstyle,0,54305 +547302,pokemon_(game),3,54189 +9344,bodysuit,0,53801 +2335,fox_ears,0,53691 +1736,profile,0,53638 +449676,ascot,0,53620 +537200,symbol-shaped_pupils,0,53490 +381555,headband,0,53381 +592923,mahou_shoujo_madoka_magica,3,53303 +2279,headphones,0,52904 +427348,leaning_forward,0,52555 +5565,:3,0,52516 +9312,makeup,0,52282 +16005,side-tie_bikini,0,51630 +411323,capelet,0,51556 +1594634,clothing_cutout,0,51541 +377844,siblings,0,51361 +6536,bell,0,51276 +579466,looking_to_the_side,0,51248 +1915,umbrella,0,50413 +1723,parody,0,50164 +613623,striped_legwear,0,50043 +202817,hoodie,0,49802 +2770,pantyshot,0,49651 +395321,crop_top,0,49384 +1797,mask,0,49321 +382397,strapless,0,49292 +464808,holding_hands,0,49207 +416202,eyelashes,0,49149 +464546,chain,0,49147 +425206,magical_girl,0,48663 +474500,black_ribbon,0,48519 +4320,glowing,0,48399 +400120,traditional_media,0,48206 +3472,night,0,48132 +398263,eyebrows,0,48125 +395448,thigh_boots,0,48030 +1277433,mole_under_eye,0,47936 +1128907,pokemon_(creature),0,47882 +600250,hair_tubes,0,47722 +460802,rose,0,47717 +1407561,virtual_youtuber,0,47502 +394222,arm_support,0,47074 +89228,grey_eyes,0,46638 +444539,skindentation,0,46496 +51528,fox_tail,0,46288 +8091,drill_hair,0,46234 +407186,sleeveless_shirt,0,46212 +460262,depth_of_field,0,46208 +420531,nose_blush,0,45822 +541727,wariza,0,45814 +478565,bed_sheet,0,45664 +568880,sleeveless_dress,0,45260 +464584,tattoo,0,45166 +464534,bird,0,45129 +4009,turtleneck,0,45124 +653206,one_side_up,0,45058 +1269639,covered_navel,0,45033 +374938,frown,0,44956 +10231,mouth_hold,0,44633 +10701,animal,0,44445 +144876,embarrassed,0,44425 +547860,black_jacket,0,44365 +1976,cover,0,44343 +11325,ocean,0,44206 +258190,beret,0,44011 +1247160,black_bow,0,43986 +600177,detached_collar,0,43925 +687973,girls_und_panzer,3,43808 +635352,love_live!,3,43653 +499624,blush_stickers,0,43607 +484666,transparent_background,0,43423 +565513,floating_hair,0,43368 +469652,garter_straps,0,43306 +8714,watermark,0,43284 +1310137,remodel_(kantai_collection),0,43274 +418899,plaid_skirt,0,43038 +1328533,ass_visible_through_thighs,0,43032 +377124,back,0,42751 +444002,from_above,0,42681 +391128,gradient_hair,0,42522 +13176,shadow,0,42410 +8831,skirt_lift,0,42371 +4207,eating,0,42345 +467863,neck_ribbon,0,42329 +12248,remilia_scarlet,4,42276 +447393,on_side,0,42215 +513475,multiple_views,0,42175 +481846,hands_up,0,42167 +574407,wavy_mouth,0,42015 +443395,thigh_strap,0,41865 +377140,blue_dress,0,41740 +479563,low_twintails,0,41717 +2270,eyepatch,0,41598 +2726,moon,0,41566 +1486996,animal_ear_fluff,0,41339 +3449,heterochromia,0,41253 +4244,leaf,0,41130 +221,school_swimsuit,0,41046 +2091,bandages,0,40976 +2904,chair,0,40938 +503552,red_skirt,0,40926 +403649,thigh_gap,0,40857 +319,lingerie,0,40830 +13133,fire_emblem,3,40826 +547073,underwear_only,0,40539 +475775,arms_behind_back,0,40264 +15737,bat_wings,0,40149 +460404,stuffed_toy,0,40064 +446950,fangs,0,39737 +421663,wavy_hair,0,39702 +3920,obi,0,39698 +2799,beach,0,39575 +375176,crossed_arms,0,39532 +647551,flying_sweatdrops,0,39468 +457597,swept_bangs,0,39348 +452032,looking_away,0,39296 +1094664,colored_skin,0,39131 +374620,black_bikini,0,39003 +28200,toes,0,38998 +12314,izayoi_sakuya,4,38919 +436576,short_dress,0,38843 +546821,halterneck,0,38697 +6059,blazer,0,38347 +375404,chinese_clothes,0,38201 +3986,sandals,0,38074 +2716,cat,0,38042 +1390441,black_headwear,0,37883 +458933,facial_hair,0,37880 +580545,blue_background,0,37667 +421662,5girls,0,37487 +452195,hair_bobbles,0,37315 +419938,expressionless,0,37223 +488167,scrunchie,0,37183 +12249,flandre_scarlet,4,37170 +454933,hair_bun,0,37151 +431235,knee_boots,0,36919 +3592,undressing,0,36796 +5831,katana,0,36795 +464822,precure,3,36744 +3875,sleeping,0,36713 +524961,black_shirt,0,36659 +464588,hat_ribbon,0,36622 +3988,scar,0,36541 +2689,formal,0,36194 +464574,polka_dot,0,36105 +466990,;d,0,36092 +490999,on_bed,0,36034 +476621,floral_print,0,35999 +413672,leg_up,0,35831 +481508,facial_mark,0,35738 +379915,stuffed_animal,0,35723 +426936,white_bikini,0,35706 +529256,fake_animal_ears,0,35659 +395400,buttons,0,35628 +420366,from_below,0,35618 +1441865,black_neckwear,0,35612 +356542,sunlight,0,35550 +1936,crossover,0,35492 +516477,outstretched_arms,0,35407 +494115,younger,0,35326 +436054,highleg,0,35165 +2993,phone,0,35140 +572731,heart-shaped_pupils,0,35073 +413179,trembling,0,34880 +719985,streaked_hair,0,34695 +477028,cleavage_cutout,0,34684 +14814,couple,0,34582 +16700,>_<,0,34410 +465719,muscular,0,34404 +3209,table,0,34212 +3649,helmet,0,34210 +4850,lipstick,0,34164 +7952,suspenders,0,34079 +385639,wolf_ears,0,33987 +582201,mob_cap,0,33982 +458482,short_twintails,0,33855 +13227,happy,0,33830 +534982,^^^,0,33829 +375476,soles,0,33804 +1303252,brown_footwear,0,33646 +9714,bent_over,0,33589 +575982,light_smile,0,33516 +487156,3boys,0,33312 +463179,no_shoes,0,33206 +5,fate/stay_night,3,33128 +1556,backpack,0,33124 +9114,antenna_hair,0,33039 +584749,pink_background,0,33004 +4188,crown,0,32959 +465810,squatting,0,32909 +415974,looking_down,0,32830 +4543,striped_panties,0,32715 +4232,tan,0,32546 +6128,cardigan,0,32542 +698161,holding_sword,0,32505 +11820,face,0,32386 +465525,crossed_legs,0,32325 +7525,final_fantasy,3,32312 +1441351,idolmaster_(classic),3,32277 +1345229,white_headwear,0,32182 +1324674,girls_frontline,3,32121 +464539,bug,0,32045 +709734,playboy_bunny,0,32026 +1267600,granblue_fantasy,3,31821 +389777,red_dress,0,31770 +464903,crying,0,31716 +4474,fire,0,31689 +459291,standing_on_one_leg,0,31617 +494102,multicolored,0,31468 +2328,polearm,0,31286 +480701,jojo_no_kimyou_na_bouken,3,31173 +10707,sisters,0,31021 +517832,outstretched_arm,0,30902 +447919,cover_page,0,30833 +432696,on_stomach,0,30789 +534835,hair_intakes,0,30719 +688777,half-closed_eyes,0,30702 +219401,denim,0,30638 +464563,insect,0,30575 +2362,casual,0,30449 +3239,cellphone,0,30439 +358992,bike_shorts,0,30354 +4027,staff,0,30347 +5524,sunglasses,0,30280 +93927,pink_panties,0,30239 +429999,tiara,0,30138 +10229,suit,0,30103 +4123,wind,0,30088 +7455,tank_top,0,29975 +403577,pointing,0,29957 +539837,high_ponytail,0,29815 +592975,love_live!_school_idol_project,3,29774 +8068,demon_girl,0,29768 +395015,monster_girl,0,29765 +10833,bottle,0,29683 +405345,cherry_blossoms,0,29597 +553142,blurry_background,0,29398 +374979,all_fours,0,29383 +3949,ring,0,29381 +9123,alice_margatroid,4,29310 +388200,kochiya_sanae,4,29305 +403060,gauntlets,0,29289 +12247,patchouli_knowledge,4,29251 +628293,skirt_set,0,29143 +410004,crescent,0,29137 +487205,cloudy_sky,0,29101 +546609,blue_bow,0,29088 +6364,grass,0,29077 +584911,shaded_face,0,29022 +401601,loafers,0,28763 +383830,no_pants,0,28739 +468534,light_brown_hair,0,28721 +1019196,otoko_no_ko,0,28599 +9863,bikini_top,0,28535 +11879,blouse,0,28456 +465870,surprised,0,28356 +463115,pink_bow,0,28337 +6526,side-tie_panties,0,28316 +1369969,ground_vehicle,0,28204 +1710,towel,0,28151 +2755,lace,0,28117 +375459,looking_up,0,28032 +487562,wing_collar,0,28028 +448225,tokin_hat,0,27968 +408437,checkered,0,27960 +448202,peaked_cap,0,27814 +445388,bob_cut,0,27788 +1245245,admiral_(kantai_collection),4,27779 +471498,multiple_tails,0,27719 +1430291,artoria_pendragon_(all),4,27698 +14452,feathers,0,27561 +464573,plant,0,27472 +464713,head_wings,0,27432 +376002,genderswap,0,27426 +492544,frilled_dress,0,27368 +397117,curtains,0,27327 +314230,breast_press,0,27309 +1720,cirno,4,27291 +464565,knife,0,27290 +419309,thick_thighs,0,27141 +385882,third_eye,0,27017 +419429,frilled_skirt,0,26934 +464564,instrument,0,26933 +12246,yakumo_yukari,4,26895 +521420,slit_pupils,0,26870 +656169,hand_on_own_chest,0,26824 +12244,konpaku_youmu,4,26820 +511136,border,0,26802 +3046,kiss,0,26701 +465074,doujinshi,0,26678 +12667,child,0,26559 +520398,white_skirt,0,26513 +387991,abs,0,26512 +395533,sheath,0,26464 +82326,?,0,26456 +1337464,x_hair_ornament,0,26391 +389705,between_breasts,0,26348 +1318624,kemono_friends,3,26279 +1288957,white_footwear,0,26249 +643561,eyes_visible_through_hair,0,26244 +466654,wristband,0,26108 +611487,juliet_sleeves,0,26011 +2849,goggles,0,25823 +524399,hat_bow,0,25821 +5648,nature,0,25589 +613197,brown_legwear,0,25575 +498000,blue_ribbon,0,25550 +76,gundam,3,25498 +464542,butterfly,0,25434 +566918,adapted_costume,0,25378 +426598,bow_panties,0,25366 +456255,front-tie_top,0,25284 +446647,pale_skin,0,25276 +1297467,lifted_by_self,0,25183 +464568,musical_note,0,25138 +2252,candy,0,25073 +1227044,ribbon_trim,0,25030 +4526,cross,0,24918 +664258,no_headwear,0,24897 +376528,:<,0,24865 +401137,clenched_teeth,0,24741 +5746,box,0,24717 +379387,t-shirt,0,24664 +464540,building,0,24502 +589398,hood_down,0,24495 +1382794,black_choker,0,24474 +515302,green_skirt,0,24423 +2737,fan,0,24402 +1403814,blue_neckwear,0,24372 +1593554,light_purple_hair,0,24356 +609507,holding_food,0,24347 +390662,foreshortening,0,24301 +448007,drooling,0,24127 +4000,microphone,0,23995 +1585391,abyssal_ship,0,23977 +475152,k-on!,3,23943 +575322,clenched_hand,0,23887 +1393877,feet_out_of_frame,0,23856 +376491,wet_clothes,0,23853 +462808,tareme,0,23766 +533054,waist_apron,0,23638 +302,elf,0,23545 +610236,dark_skinned_male,0,23510 +450107,thick_eyebrows,0,23493 +556011,lace_trim,0,23490 +4831,piercing,0,23472 +526340,letterboxed,0,23440 +594766,star_(sky),0,23435 +580738,black_hairband,0,23392 +3508,angry,0,23350 +566461,anger_vein,0,23310 +701697,white_bow,0,23286 +1288118,hair_flaps,0,23249 +427968,revealing_clothes,0,23226 +383851,twin_drills,0,23215 +12308,shameimaru_aya,4,23188 +423613,animal_print,0,23121 +488133,no_hat,0,23113 +378850,armband,0,23088 +401836,messy_hair,0,23057 +374845,mary_janes,0,23036 +415668,single_thighhigh,0,23014 +1479837,hololive,3,22953 +389402,upskirt,0,22947 +404507,sharp_teeth,0,22891 +392123,bunny_tail,0,22764 +466984,...,0,22600 +471601,black_bra,0,22550 +527593,lyrical_nanoha,3,22472 +404,christmas,0,22433 +561547,shoulder_armor,0,22419 +465152,china_dress,0,22392 +389108,glowing_eyes,0,22383 +1367103,upper_teeth,0,22300 +469125,jingle_bell,0,22273 +670638,clothes_writing,0,22250 +395223,scenery,0,22238 +12307,fujiwara_no_mokou,4,22212 +413908,spiked_hair,0,22193 +612641,black_shorts,0,22170 +1388799,blue_sailor_collar,0,22130 +549225,single_horn,0,22107 +389553,skin_tight,0,22045 +538298,strapless_dress,0,21994 +11374,hong_meiling,4,21891 +12464,brooch,0,21870 +4536,tsurime,0,21867 +374782,sneakers,0,21740 +166133,night_sky,0,21718 +77257,strap_slip,0,21711 +410328,hands,0,21708 +166531,pink_dress,0,21688 +589376,red_flower,0,21686 +4241,rope,0,21651 +10847,breath,0,21630 +643253,web_address,0,21623 +554980,motor_vehicle,0,21599 +1441885,semi-rimless_eyewear,0,21557 +15689,:p,0,21527 +592924,akemi_homura,4,21514 +422340,full_moon,0,21481 +471755,machinery,0,21304 +3540,hakama,0,21250 +15672,veil,0,21188 +561,mecha,0,21186 +12304,reisen_udongein_inaba,4,21184 +407678,wolf_tail,0,21109 +11485,persona,3,21085 +602295,hair_over_shoulder,0,21081 +427142,komeiji_koishi,4,21074 +683385,red_footwear,0,21058 +103483,fishnets,0,21020 +4275,steam,0,20943 +410734,carrying,0,20906 +397327,blue_shirt,0,20904 +382270,rifle,0,20880 +507741,frilled_sleeves,0,20851 +613601,torn_legwear,0,20840 +2863,broom,0,20838 +9522,suzumiya_haruhi_no_yuuutsu,3,20799 +2102,snow,0,20730 +4172,corset,0,20713 +514890,black_pants,0,20637 +5855,couch,0,20628 +368,dog_ears,0,20534 +417741,bridal_gauntlets,0,20503 +477288,white_ribbon,0,20499 +382111,breast_hold,0,20475 +379725,dual_persona,0,20439 +429369,finger_to_mouth,0,20318 +392810,vertical_stripes,0,20270 +380831,demon_tail,0,20234 +1302826,mole_under_mouth,0,20205 +10902,close-up,0,20136 +1379643,world_witches_series,3,20092 +1364406,genderswap_(mtf),0,20035 +489235,motion_lines,0,20026 +592925,kaname_madoka,4,20015 +468477,arms_behind_head,0,20008 +1464061,gen_1_pokemon,0,19993 +455456,santa_hat,0,19879 +396680,personification,0,19871 +479729,gem,0,19701 +11410,claws,0,19645 +569748,black_background,0,19493 +535373,cross-laced_footwear,0,19487 +1441874,yellow_neckwear,0,19481 +467493,handgun,0,19481 +10905,pendant,0,19478 +389814,side_braid,0,19445 +390681,headset,0,19398 +12245,saigyouji_yuyuko,4,19386 +576310,white_flower,0,19253 +665184,ribbed_sweater,0,19235 +4530,gift,0,19169 +427143,komeiji_satori,4,19158 +453340,santa_costume,0,19100 +146061,baseball_cap,0,19081 +149598,androgynous,0,19075 +398273,long_legs,0,19050 +531403,kemonomimi_mode,0,19047 +396969,seiza,0,19008 +2444,tanline,0,18982 +580232,white_jacket,0,18971 +375441,dress_lift,0,18965 +1238034,kaga_(kantai_collection),4,18955 +479146,sailor_dress,0,18925 +513317,fate/extra,3,18881 +619736,blue_jacket,0,18852 +592555,panties_under_pantyhose,0,18716 +1433237,arknights,3,18703 +1865,bandaid,0,18689 +448477,buckle,0,18648 +634781,nontraditional_miko,0,18585 +635786,index_finger_raised,0,18531 +11813,lens_flare,0,18529 +662952,headwear_removed,0,18527 +395218,inubashiri_momiji,4,18513 +379489,side_slit,0,18502 +1008243,holding_gun,0,18477 +462583,between_legs,0,18450 +390257,eye_contact,0,18240 +12658,neon_genesis_evangelion,3,18195 +521477,half_updo,0,18165 +572753,6+boys,0,18108 +460502,dual_wielding,0,18053 +393879,alcohol,0,18012 +320292,cloak,0,18009 +4261,forest,0,18006 +434996,o_o,0,17954 +668635,pom_pom_(clothes),0,17830 +5474,ball,0,17829 +432529,sweater_vest,0,17818 +1373022,red-framed_eyewear,0,17767 +390594,red_shirt,0,17752 +689532,low-tied_long_hair,0,17702 +384552,wading,0,17697 +2907,desk,0,17662 +375519,armlet,0,17636 +613195,blue_legwear,0,17577 +4596,halloween,0,17528 +2447,camisole,0,17528 +593298,yellow_background,0,17514 +390596,purple_dress,0,17505 +4648,teacup,0,17452 +635112,light_particles,0,17448 +510802,french_braid,0,17417 +556132,tied_hair,0,17354 +645083,smartphone,0,17284 +571690,hair_scrunchie,0,17267 +15260,beard,0,17250 +452086,lolita_fashion,0,17243 +469517,doujin_cover,0,17240 +478557,straight_hair,0,17227 +491144,blue_bikini,0,17207 +8243,flying,0,17200 +6175,cuffs,0,17193 +376117,wide_hips,0,17170 +470790,demon_horns,0,17160 +395913,to_aru_majutsu_no_index,3,17086 +524897,interlocked_fingers,0,17081 +1316316,pelvic_curtain,0,17064 +392024,tray,0,17019 +553367,turret,0,16960 +609,fish,0,16907 +491544,hands_together,0,16900 +467811,knees_up,0,16870 +7450,gym_uniform,0,16660 +1255562,folded_ponytail,0,16607 +427674,epaulettes,0,16588 +2169,innertube,0,16561 +380540,legs_up,0,16540 +470314,pencil_skirt,0,16527 +428523,hooded_jacket,0,16514 +454379,=_=,0,16507 +465382,crossdressing,0,16456 +438458,clenched_hands,0,16454 +12250,yakumo_ran,4,16436 +656165,hand_on_own_face,0,16424 +1559,minigirl,0,16403 +685432,two-tone_background,0,16371 +457883,string_bikini,0,16360 +523327,starry_sky,0,16354 +617615,legs_apart,0,16350 +4145,spear,0,16347 +665452,facing_viewer,0,16303 +376594,spikes,0,16266 +613208,red_legwear,0,16251 +464578,skull,0,16241 +9311,zipper,0,16199 +492380,jitome,0,16147 +3943,smoke,0,16128 +534168,v_arms,0,16112 +584958,spoken_heart,0,16059 +602223,out_of_frame,0,16038 +382969,bow_(weapon),0,16013 +1651,rain,0,16007 +520850,bespectacled,0,15997 +2378,buruma,0,15991 +545992,high_heel_boots,0,15954 +10279,hair_rings,0,15950 +464538,bubble,0,15912 +494669,spot_color,0,15910 +563256,yellow_bow,0,15888 +497007,backlighting,0,15869 +10402,bowl,0,15852 +900563,own_hands_together,0,15810 +593109,miki_sayaka,4,15797 +464566,monster,0,15771 +631529,white_apron,0,15763 +669624,contrapposto,0,15731 +486327,faceless,0,15724 +520555,older,0,15713 +1441886,under-rim_eyewear,0,15689 +717047,page_number,0,15683 +494241,long_fingernails,0,15671 +643274,partially_submerged,0,15645 +1258089,ribbon-trimmed_sleeves,0,15624 +6,saber,4,15607 +106450,shield,0,15580 +7585,new_year,0,15573 +549356,arm_at_side,0,15548 +459933,drinking_glass,0,15493 +1916,cake,0,15466 +3284,forehead,0,15461 +646849,fate/apocrypha,3,15455 +375986,empty_eyes,0,15444 +658106,o-ring,0,15428 +491758,sleeves_rolled_up,0,15418 +394151,plate,0,15403 +593296,red_background,0,15401 +4091,bunny,0,15395 +416676,denim_shorts,0,15362 +704500,holding_cup,0,15348 +435433,red_gloves,0,15343 +448882,|_|,0,15299 +1292999,spoken_ellipsis,0,15246 +479955,4boys,0,15229 +384842,moriya_suwako,4,15229 +401968,red_nails,0,15218 +9831,teddy_bear,0,15194 +376923,black_wings,0,15190 +11527,reflection,0,15190 +1515358,blue_theme,0,15173 +401582,kazami_yuuka,4,15163 +1233478,black_serafuku,0,15139 +1441877,eyewear_on_head,0,15135 +374849,school_bag,0,15134 +717927,short_hair_with_long_locks,0,15129 +615165,green_bow,0,15124 +390427,kagamine_rin,4,15119 +149799,happy_birthday,0,15104 +11797,strike_witches,3,15104 +479374,single_glove,0,15077 +463127,white_bra,0,15071 +2082,rumia,4,15060 +14442,walking,0,15043 +475720,short_ponytail,0,15032 +538012,hat_removed,0,15011 +2489,sunset,0,15004 +470798,pink_nails,0,14932 +4039,dog,0,14926 +728008,thighband_pantyhose,0,14922 +511594,arm_behind_back,0,14899 +650560,fishnet_legwear,0,14890 +547132,extra_ears,0,14854 +394305,curly_hair,0,14845 +399655,striped_bikini,0,14838 +466881,sun_hat,0,14838 +509884,impossible_clothes,0,14832 +427050,shiny_clothes,0,14808 +602257,konpaku_youmu_(ghost),4,14784 +381650,fate/zero,3,14775 +426491,hands_on_hips,0,14767 +819964,pink_flower,0,14728 +520397,pink_skirt,0,14705 +3474,portrait,0,14676 +1447087,jeanne_d'arc_(fate)_(all),4,14623 +1811,twins,0,14596 +406964,floating,0,14563 +670088,hand_on_another's_head,0,14540 +1303004,touken_ranbu,3,14538 +399,yukata,0,14529 +10538,ice,0,14529 +1240738,hibiki_(kantai_collection),4,14465 +483898,frilled_bikini,0,14460 +388067,demon_wings,0,14417 +1648,oekaki,0,14402 +1568,yaoi,0,14401 +1238036,shimakaze_(kantai_collection),4,14391 +486611,brown_gloves,0,14377 +412202,:q,0,14372 +529213,maid_apron,0,14371 +427184,reiuji_utsuho,4,14361 +1509969,hip_focus,0,14351 +12302,kamishirasawa_keine,4,14295 +474,chen,4,14293 +3059,street_fighter,3,14289 +451370,upside-down,0,14259 +473529,;),0,14256 +457726,track_jacket,0,14221 +6439,running,0,14206 +2268,popsicle,0,14201 +690177,tress_ribbon,0,14192 +501706,card_(medium),0,14174 +452445,+_+,0,14172 +559163,emphasis_lines,0,14118 +648056,paw_pose,0,14095 +506510,military_hat,0,14015 +15571,shawl,0,14007 +1314596,black_leotard,0,13972 +15261,freckles,0,13971 +391713,long_skirt,0,13948 +460324,red_bikini,0,13929 +572906,crying_with_eyes_open,0,13896 +670636,multicolored_clothes,0,13854 +1582,miko,0,13846 +658950,holding_book,0,13818 +646879,brown_background,0,13806 +427398,butt_crack,0,13774 +589430,sakura_kyouko,4,13730 +5953,snake,0,13719 +456193,outstretched_hand,0,13609 +616524,grey_skirt,0,13600 +383392,kawashiro_nitori,4,13551 +570718,fur_collar,0,13517 +534968,mini_hat,0,13504 +593108,tomoe_mami,4,13503 +1373029,black-framed_eyewear,0,13500 +1416353,red_headwear,0,13477 +655303,fox_girl,0,13463 +583857,oni_horns,0,13461 +674623,feathered_wings,0,13443 +133767,swimsuit_under_clothes,0,13435 +529447,arm_behind_head,0,13412 +16721,blue_panties,0,13410 +473327,tatara_kogasa,4,13405 +465046,cat_girl,0,13397 +14635,paws,0,13373 +400,pajamas,0,13356 +516211,tassel,0,13355 +583299,goggles_on_head,0,13333 +4010,halo,0,13328 +1260354,text_focus,0,13321 +522454,hand_in_hair,0,13291 +493843,head_rest,0,13290 +488864,red_jacket,0,13289 +626528,cropped_legs,0,13281 +447403,jumping,0,13275 +374533,creature,0,13266 +392585,contemporary,0,13256 +11882,watch,0,13239 +520991,pink_ribbon,0,13215 +634316,pectorals,0,13196 +536573,colored_sclera,0,13188 +405124,smirk,0,13187 +1309317,love_live!_sunshine!!,3,13173 +415326,hinanawi_tenshi,4,13166 +477354,sheathed,0,13150 +14946,eyeshadow,0,13124 +13282,pose,0,13120 +379820,aircraft,0,13109 +8565,sarashi,0,13108 +398889,circlet,0,13093 +417866,serious,0,13083 +508016,pocket,0,13080 +643898,purple_background,0,13068 +652293,legs_together,0,13039 +464562,injury,0,13023 +2467,ghost,0,13013 +515769,black_nails,0,13001 +384087,angel_wings,0,12970 +16554,plump,0,12969 +613207,purple_legwear,0,12966 +3603,lollipop,0,12958 +513807,hug_from_behind,0,12949 +452112,tabard,0,12944 +15916,hair_down,0,12929 +1295739,overwatch,3,12905 +462546,hand_between_legs,0,12885 +427145,kaenbyou_rin,4,12819 +442474,competition_swimsuit,0,12816 +1352777,eighth_note,0,12802 +15770,top_hat,0,12782 +397215,outline,0,12780 +593297,green_background,0,12751 +643286,full-face_blush,0,12709 +4909,chopsticks,0,12701 +486,doll,0,12700 +379945,hairpin,0,12645 +1455296,sleeves_past_fingers,0,12644 +402062,!,0,12632 +2336,cigarette,0,12599 +1332796,round_teeth,0,12533 +16609,bloomers,0,12493 +586765,low_ponytail,0,12483 +16738,geta,0,12479 +9872,pauldrons,0,12421 +654772,knee_up,0,12417 +3661,oni,0,12372 +610272,purple_skirt,0,12361 +2967,jeans,0,12361 +671227,tearing_up,0,12345 +12306,houraisan_kaguya,4,12342 +538475,asymmetrical_hair,0,12317 +4357,chocolate,0,12316 +376102,realistic,0,12314 +1405752,meme_attire,0,12310 +1671,underwater,0,12309 +5705,ofuda,0,12292 +2312,valentine,0,12266 +544306,colored_eyelashes,0,12235 +467008,hoop_earrings,0,12227 +658682,knees_together_feet_apart,0,12201 +401340,nose,0,12200 +537668,poke_ball,0,12197 +461117,snowing,0,12180 +9434,wedding_dress,0,12178 +1238054,kongou_(kantai_collection),4,12177 +575806,league_of_legends,3,12171 +423620,:t,0,12167 +398209,the_legend_of_zelda,3,12128 +1799,koakuma,4,12118 +450779,emblem,0,12085 +662424,frog_hair_ornament,0,12075 +3714,cannon,0,12072 +600222,light_rays,0,12072 +571639,tales_of_(series),3,12043 +428330,brother_and_sister,0,11992 +537271,sword_art_online,3,11989 +6441,beads,0,11963 +1242707,shigure_(kantai_collection),4,11940 +467585,highleg_leotard,0,11912 +166757,green_dress,0,11897 +380572,dog_tail,0,11893 +1231245,kill_la_kill,3,11879 +446472,red_rose,0,11859 +403785,lowleg,0,11857 +611670,grey_shirt,0,11845 +1293269,blue_footwear,0,11812 +394759,hand_in_pocket,0,11808 +460642,spread_arms,0,11806 +10031,faceless_male,0,11788 +7532,stairs,0,11771 +391297,white_skin,0,11752 +695946,white_blouse,0,11741 +1238065,inazuma_(kantai_collection),4,11733 +8891,zoom_layer,0,11714 +491367,cropped_jacket,0,11694 +1569657,arrow_(projectile),0,11677 +442316,pouch,0,11671 +464582,strawberry,0,11649 +578153,drinking_straw,0,11629 +1414209,mash_kyrielight,4,11622 +383337,sailor_hat,0,11619 +6207,sportswear,0,11602 +11135,rock,0,11600 +387213,handbag,0,11577 +676924,anchor_symbol,0,11572 +464567,mountain,0,11558 +528471,mario_(series),3,11503 +521712,ear_piercing,0,11503 +1238031,akagi_(kantai_collection),4,11462 +8705,ice_cream,0,11454 +2562,camera,0,11445 +579793,frilled_apron,0,11445 +464536,bouquet,0,11425 +1487578,danganronpa_(series),3,11423 +403286,toned,0,11420 +448621,arm_warmers,0,11399 +405996,white_swimsuit,0,11396 +538768,height_difference,0,11378 +43263,anchor,0,11375 +12303,inaba_tewi,4,11372 +462209,thought_bubble,0,11370 +375144,nurse_cap,0,11370 +9351,gothic_lolita,0,11364 +475744,breastplate,0,11361 +12313,ibuki_suika,4,11335 +23249,everyone,0,11332 +3478,mirror,0,11327 +478640,hair_bell,0,11324 +8807,city,0,11315 +417888,age_difference,0,11311 +503349,hijiri_byakuren,4,11288 +610013,pink_lips,0,11263 +417883,striped_shirt,0,11260 +451332,bun_cover,0,11251 +397051,shirtless,0,11246 +456370,hands_in_pockets,0,11237 +7426,sign,0,11231 +499629,headpiece,0,11217 +2205,mouse_ears,0,11206 +447784,halter_top,0,11194 +4033,lantern,0,11170 +484456,back-to-back,0,11160 +659072,muneate,0,11159 +1462527,nijisanji,3,11147 +7446,mahou_shoujo_lyrical_nanoha,3,11144 +5228,wand,0,11139 +9044,leg_lift,0,11125 +2809,spoon,0,11118 +421198,center_opening,0,11115 +407647,adjusting_hair,0,11115 +1491014,blue_headwear,0,11106 +1388800,black_sailor_collar,0,11100 +456585,pink_bra,0,11099 +7486,fighting_stance,0,11095 +165438,anklet,0,11086 +465444,antennae,0,11059 +712723,striped_bow,0,11045 +450702,naval_uniform,0,11034 +613205,pink_legwear,0,11032 +407056,unbuttoned,0,11030 +126800,code_geass,3,11012 +379178,lucky_star,3,10986 +498828,adjusting_clothes,0,10984 +1242758,blue_flower,0,10979 +3313,dragon,0,10974 +560,card,0,10971 +553015,open_book,0,10945 +1310829,princess_connect!,3,10943 +1295582,strapless_leotard,0,10922 +432198,arched_back,0,10920 +252271,mug,0,10905 +408040,hitodama,0,10899 +7783,pout,0,10886 +509,pocky,0,10885 +529493,jack-o'-lantern,0,10882 +381092,paper,0,10873 +1513844,pokemon_swsh,3,10872 +863718,idolmaster_million_live!,3,10862 +12300,mystia_lorelei,4,10850 +433182,!?,0,10811 +667463,visor_cap,0,10806 +484479,front-tie_bikini,0,10799 +663804,armored_dress,0,10795 +1354782,pokemon_sm,3,10787 +464560,frog,0,10779 +493026,tentacle_hair,0,10774 +446622,hime_cut,0,10765 +494245,red_scarf,0,10753 +376830,untied,0,10741 +1402258,fire_emblem_heroes,3,10735 +458728,waving,0,10732 +1238042,tenryuu_(kantai_collection),4,10731 +387050,sun,0,10705 +668121,monogatari_(series),3,10692 +15200,robe,0,10685 +613200,grey_legwear,0,10681 +397380,name_tag,0,10669 +1462684,princess_connect!_re:dive,3,10643 +10713,unzipped,0,10620 +1112079,starter_pokemon,0,10609 +514515,pink_shirt,0,10584 +467285,crystal,0,10580 +2367,blindfold,0,10541 +713203,crotch_seam,0,10530 +574399,blue_swimsuit,0,10500 +16139,mustache,0,10498 +11270,mittens,0,10497 +613885,brown_skirt,0,10456 +594859,backless_outfit,0,10450 +548703,sitting_on_person,0,10444 +1333464,re:zero_kara_hajimeru_isekai_seikatsu,3,10428 +464583,sunflower,0,10424 +17035,mahou_shoujo_lyrical_nanoha_strikers,3,10420 +515681,bishoujo_senshi_sailor_moon,3,10411 +524661,folding_fan,0,10400 +8418,fence,0,10393 +685353,photo_(medium),0,10392 +673911,hood_up,0,10343 +1416387,kita_high_school_uniform,0,10341 +662799,animal_hood,0,10332 +461529,apple,0,10318 +13027,fork,0,10308 +9260,blanket,0,10305 +401223,blazblue,3,10294 +1249693,borrowed_character,0,10292 +384841,yasaka_kanako,4,10280 +8822,eyeball,0,10270 +230,waitress,0,10270 +572346,wide-eyed,0,10257 +709959,inazuma_eleven_(series),3,10253 +397690,motion_blur,0,10249 +497607,blue_nails,0,10233 +815229,gochuumon_wa_usagi_desu_ka?,3,10232 +508272,belt_buckle,0,10223 +375026,headdress,0,10221 +15749,furry,0,10215 +13207,bandana,0,10213 +471090,hair_tie,0,10193 +1500438,splatoon_(series),3,10177 +483081,high_collar,0,10171 +537093,lace-up_boots,0,10166 +15395,clock,0,10166 +473267,nazrin,4,10154 +541716,yokozuwari,0,10145 +381163,petticoat,0,10143 +3846,robot,0,10099 +561624,water_drop,0,10099 +232297,gohei,0,10096 +11030,silhouette,0,10088 +11858,forehead_mark,0,10055 +378454,wind_lift,0,10049 +394992,shoulder_bag,0,10046 +12305,yagokoro_eirin,4,10039 +479794,number,0,10035 +548722,enmaided,0,10022 +384441,alternate_color,0,10007 +1757,music,0,9990 +1386163,patreon_username,0,9987 +1238045,ikazuchi_(kantai_collection),4,9982 +3242,airplane,0,9978 +406736,covering_mouth,0,9961 +664922,dragon_horns,0,9956 +507245,glint,0,9950 +1119509,high-waist_skirt,0,9921 +385182,pistol,0,9904 +393597,kagamine_len,4,9898 +407852,to_aru_kagaku_no_railgun,3,9896 +382800,bell_collar,0,9893 +1243463,orange_bow,0,9884 +464569,palm_tree,0,9876 +633528,alternate_breast_size,0,9867 +3429,drink,0,9865 +503343,houjuu_nue,4,9861 +457810,megurine_luka,4,9850 +10440,door,0,9828 +1471829,naruto_(series),3,9806 +526218,bandeau,0,9806 +464995,dragon_ball,3,9804 +684620,puffy_long_sleeves,0,9794 +478262,double_v,0,9785 +558436,silent_comic,0,9785 +530083,kneepits,0,9772 +442898,skirt_hold,0,9769 +1258262,frilled_shirt_collar,0,9762 +280408,salute,0,9755 +1569656,arrow_(symbol),0,9751 +1764,guitar,0,9750 +2565,battle,0,9749 +594129,head_out_of_frame,0,9748 +378561,blue_skin,0,9737 +2898,basket,0,9723 +493832,military_vehicle,0,9699 +1314750,white_hairband,0,9677 +578430,floral_background,0,9664 +456150,akiyama_mio,4,9654 +420598,highleg_panties,0,9644 +1248878,yuudachi_(kantai_collection),4,9643 +640898,alternate_hair_length,0,9627 +375285,winter_clothes,0,9625 +1275600,bright_pupils,0,9612 +472727,wooden_floor,0,9606 +7508,straw_hat,0,9595 +1251593,holding_phone,0,9590 +644684,chestnut_mouth,0,9589 +409592,argyle,0,9582 +405195,persona_4,3,9549 +667171,holding_umbrella,0,9536 +475833,souryuu_asuka_langley,4,9530 +1256698,partially_fingerless_gloves,0,9527 +549355,arms_at_sides,0,9523 +713987,outside_border,0,9522 +574271,science_fiction,0,9506 +454489,no_nose,0,9506 +453768,fairy_wings,0,9502 +2802,earmuffs,0,9501 +524779,microskirt,0,9491 +6032,tabi,0,9484 +1464087,gen_3_pokemon,0,9481 +677856,star_hair_ornament,0,9479 +1463605,1other,0,9477 +377993,petting,0,9466 +401289,yellow_ribbon,0,9464 +11173,sundress,0,9459 +544390,>:),0,9457 +10801,android,0,9453 +11285,@_@,0,9441 +7580,mecha_musume,0,9425 +2319,fantasy,0,9420 +464544,can,0,9398 +1499655,fire_emblem:_three_houses,3,9392 +5267,labcoat,0,9383 +409425,bookshelf,0,9373 +4816,gakuran,0,9368 +152,ninja,0,9363 +615735,asymmetrical_legwear,0,9342 +8574,dragon_quest,3,9322 +531068,hakama_skirt,0,9315 +1263229,hand_on_headwear,0,9293 +464570,peach,0,9272 +1459169,idolmaster_shiny_colors,3,9251 +1292897,neptune_(series),3,9232 +1238044,akatsuki_(kantai_collection),4,9230 +465517,tengen_toppa_gurren_lagann,3,9227 +1222476,crescent_hair_ornament,0,9223 +626241,triangular_headpiece,0,9202 +390186,silver_eyes,0,9189 +617356,blue_shorts,0,9187 +383606,umineko_no_naku_koro_ni,3,9182 +645753,orange_background,0,9178 +639717,taut_clothes,0,9168 +2880,scythe,0,9159 +84427,reading,0,9155 +6226,rozen_maiden,3,9118 +492202,flat_cap,0,9113 +1247162,purple_bow,0,9107 +10280,flag,0,9095 +1441873,striped_neckwear,0,9086 +443915,yu-gi-oh!,3,9065 +15224,sand,0,9059 +656161,hand_on_own_cheek,0,9052 +444411,licking_lips,0,9047 +451767,food_on_face,0,9027 +1238872,zuikaku_(kantai_collection),4,9019 +469668,raised_eyebrows,0,9019 +1396724,white_sailor_collar,0,9017 +397045,railing,0,8980 +498413,jacket_on_shoulders,0,8965 +742715,adjusting_eyewear,0,8950 +5621,smoking,0,8944 +701042,bandaged_arm,0,8929 +413783,mizuhashi_parsee,4,8911 +375606,open_coat,0,8907 +505450,gift_box,0,8902 +375883,dragon_girl,0,8876 +620577,brown_jacket,0,8857 +472407,turtleneck_sweater,0,8855 +502047,blood_on_face,0,8850 +393578,beanie,0,8844 +668761,white_border,0,8834 +688713,snake_hair_ornament,0,8817 +374409,bad_anatomy,0,8811 +472943,black_sclera,0,8811 +1406114,pink_neckwear,0,8794 +624352,strap_gap,0,8793 +1358345,pink_footwear,0,8792 +642967,fate/extra_ccc,3,8757 +1505129,chinese_text,0,8736 +560167,character_doll,0,8725 +1894,succubus,0,8724 +457574,horizon,0,8724 +473301,bow_bra,0,8716 +3992,bucket,0,8673 +1321879,fire_emblem_fates,3,8637 +458,nurse,0,8634 +1464337,gen_4_pokemon,0,8631 +516299,style_parody,0,8627 +15987,logo,0,8622 +643257,white_leotard,0,8619 +16704,glass,0,8617 +455932,size_difference,0,8614 +588702,vambraces,0,8610 +406034,arm_grab,0,8600 +2586,witch,0,8583 +450337,ankle_boots,0,8574 +494337,purple_ribbon,0,8572 +13814,morichika_rinnosuke,4,8562 +149791,drinking,0,8553 +441419,:>,0,8540 +1402579,green_neckwear,0,8526 +7441,fate_testarossa,4,8519 +494024,gym_leader,0,8498 +4308,pen,0,8491 +487236,long_dress,0,8488 +460323,pink_bikini,0,8470 +526693,holding_clothes,0,8453 +704492,armored_boots,0,8447 +872616,white_kimono,0,8437 +657790,smile_precure!,3,8427 +1271922,black_vest,0,8378 +1447377,tamamo_(fate)_(all),4,8371 +1391467,white_neckwear,0,8369 +607626,hair_stick,0,8365 +374921,sports_bra,0,8363 +1238032,fubuki_(kantai_collection),4,8361 +656170,hands_on_own_chest,0,8356 +1278425,rigging,0,8336 +12457,tatami,0,8336 +458819,talking,0,8335 +456270,tasuki,0,8332 +552306,criss-cross_halter,0,8313 +416906,bangle,0,8300 +2780,beachball,0,8298 +9466,suzumiya_haruhi,4,8283 +1335533,holding_staff,0,8270 +452205,stuffed_bunny,0,8265 +727448,touhou_(pc-98),3,8251 +1238043,nagato_(kantai_collection),4,8248 +1448617,abigail_williams_(fate/grand_order),4,8248 +8035,guilty_gear,3,8238 +472387,nakano_azusa,4,8235 +1501540,skin_fang,0,8222 +465977,hoshiguma_yuugi,4,8166 +522051,on_head,0,8166 +687736,short_eyebrows,0,8154 +9715,nagato_yuki,4,8149 +1238035,haruna_(kantai_collection),4,8144 +16149,sack,0,8121 +1464079,gen_2_pokemon,0,8114 +456688,multicolored_eyes,0,8111 +544230,asymmetrical_wings,0,8111 +513423,graphite_(medium),0,8109 +397948,road,0,8108 +469042,2koma,0,8107 +185924,key,0,8099 +374934,leg_hug,0,8096 +1574,magic,0,8085 +669033,triangle_mouth,0,8079 +2181,cheerleader,0,8075 +404958,pilot_suit,0,8073 +15020,hibiscus,0,8069 +375764,bara,0,8051 +547303,pokemon_(anime),3,8041 +1464481,gen_5_pokemon,0,8041 +728081,rwby,3,8021 +506,dark,0,8008 +7516,final_fantasy_vii,3,7986 +665407,hand_to_own_mouth,0,7980 +1308012,red_hakama,0,7976 +434704,string,0,7975 +7495,sarong,0,7973 +1256690,suspender_skirt,0,7969 +476461,the_king_of_fighters,3,7960 +525747,lolita_hairband,0,7953 +8038,higurashi_no_naku_koro_ni,3,7943 +200140,magic_circle,0,7936 +572684,white_scarf,0,7921 +1077303,underbust,0,7921 +1611664,oil-paper_umbrella,0,7912 +393840,purple_panties,0,7899 +638495,toyosatomimi_no_miko,4,7888 +15131,riding,0,7869 +15258,torpedo,0,7868 +15272,bridal_veil,0,7865 +599169,shanghai_doll,4,7859 +4960,parasol,0,7853 +394223,hands_clasped,0,7847 +1464362,gen_7_pokemon,0,7843 +509379,on_floor,0,7829 +1292950,boku_no_hero_academia,3,7811 +594664,green_shirt,0,7810 +408438,hair_up,0,7809 +558230,ice_wings,0,7807 +1269433,hamakaze_(kantai_collection),4,7802 +5104,shade,0,7797 +4026,pumpkin,0,7796 +387999,leg_garter,0,7787 +1781,car,0,7774 +408359,bare_back,0,7767 +525,macross,3,7764 +389456,holster,0,7759 +652707,domino_mask,0,7745 +599873,air_bubble,0,7740 +542666,flipped_hair,0,7732 +681331,heart_hair_ornament,0,7730 +448279,cabbie_hat,0,7729 +551848,polka_dot_background,0,7728 +3105,naruto,3,7727 +513625,asymmetrical_bangs,0,7726 +11870,manly,0,7723 +553797,sharp_fingernails,0,7718 +444095,tail_ornament,0,7713 +517935,purple_bikini,0,7708 +2276,pool,0,7698 +384197,kagiyama_hina,4,7695 +455424,hirasawa_yui,4,7685 +496617,marker_(medium),0,7685 +399786,wedding_band,0,7681 +568920,+++,0,7669 +1391754,fujimaru_ritsuka_(female),4,7656 +1340178,kashima_(kantai_collection),4,7655 +1528497,genshin_impact,3,7654 +480899,5boys,0,7645 +554098,mole_on_breast,0,7640 +416331,animal_costume,0,7631 +1447959,nero_claudius_(fate)_(all),4,7627 +457877,leaning_back,0,7627 +197,garters,0,7627 +600200,kyubey,4,7600 +3468,classroom,0,7595 +4436,bench,0,7578 +523548,black_swimsuit,0,7576 +518422,covered_mouth,0,7573 +527256,green_ribbon,0,7570 +474598,tate_eboshi,0,7563 +581558,pokemon_bw,3,7548 +1244676,one-piece_tan,0,7530 +537886,shorts_under_skirt,0,7529 +1252945,scar_across_eye,0,7528 +482857,joints,0,7523 +415948,cravat,0,7522 +1575551,gap_(touhou),0,7509 +498201,mini_crown,0,7504 +465707,biting,0,7500 +11883,cityscape,0,7495 +409720,scabbard,0,7493 +459290,snowflakes,0,7483 +684289,spoken_question_mark,0,7483 +1369802,blurry_foreground,0,7481 +465646,tohsaka_rin,4,7472 +461492,green_jacket,0,7472 +374936,lamp,0,7470 +8180,drunk,0,7461 +481383,wristwatch,0,7444 +2905,bat,0,7429 +614271,lace-trimmed_legwear,0,7411 +375017,pom_poms,0,7406 +604625,shingeki_no_kyojin,3,7405 +12299,wriggle_nightbug,4,7405 +457114,dragon_tail,0,7404 +547893,indian_style,0,7387 +460911,tiles,0,7376 +1238060,ryuujou_(kantai_collection),4,7374 +546229,black_belt,0,7370 +386389,bracer,0,7361 +435262,stubble,0,7348 +391568,assault_rifle,0,7345 +13126,lineart,0,7338 +502548,yellow_shirt,0,7334 +1369967,watercraft,0,7332 +412037,open_fly,0,7329 +6318,space,0,7323 +1254756,kono_subarashii_sekai_ni_shukufuku_wo!,3,7292 +413878,electricity,0,7283 +576561,alternate_eye_color,0,7281 +464545,carrot,0,7281 +693118,absurdly_long_hair,0,7277 +466164,laughing,0,7275 +4572,candle,0,7275 +426594,lace-trimmed_panties,0,7269 +1256471,off-shoulder_shirt,0,7268 +613597,print_legwear,0,7264 +547289,drawstring,0,7262 +414765,hair_over_eyes,0,7260 +535691,butterfly_hair_ornament,0,7256 +670983,hand_on_another's_shoulder,0,7249 +469551,facepaint,0,7239 +646549,red_cape,0,7238 +672604,fire_emblem_awakening,3,7234 +1226391,yellow_flower,0,7233 +3102,greaves,0,7224 +4964,nosebleed,0,7215 +4358,plugsuit,0,7197 +572672,japanese_armor,0,7191 +399930,highleg_swimsuit,0,7185 +1447086,jeanne_d'arc_(alter)_(fate),4,7183 +613596,vertical-striped_legwear,0,7177 +404056,yukkuri_shiteitte_ne,0,7160 +463399,randoseru,0,7150 +13810,torii,0,7145 +613947,toeless_legwear,0,7139 +421188,against_wall,0,7137 +8875,one_piece,3,7135 +2231,fairy,0,7131 +145788,fur,0,7095 +461736,alternate_hair_color,0,7080 +12311,onozuka_komachi,4,7079 +8552,takamachi_nanoha,4,7068 +648077,heart_of_string,0,7067 +448185,garrison_cap,0,7060 +617243,tiger_&_bunny,3,7052 +693708,furrowed_eyebrows,0,7050 +477439,white_wings,0,7049 +412048,ribbon_choker,0,7041 +541854,self_shot,0,7039 +388908,raglan_sleeves,0,7031 +638499,mononobe_no_futo,4,7030 +609887,blue_gloves,0,7030 +15771,raccoon_ears,0,7029 +502136,whisker_markings,0,7028 +464543,cable,0,7028 +9481,teapot,0,7027 +381911,bikini_bottom,0,7021 +656166,hands_on_own_face,0,7006 +1336966,idolmaster_cinderella_girls_starlight_stage,3,6999 +2056,toenails,0,6997 +1333465,rem_(re:zero),4,6995 +494869,black_coat,0,6989 +610524,star-shaped_pupils,0,6963 +394136,red_panties,0,6956 +5214,overalls,0,6954 +543958,bags_under_eyes,0,6940 +1241714,korean_text,0,6939 +341,saki,3,6929 +468991,kicking,0,6922 +1564151,xenoblade_chronicles_(series),3,6916 +375526,short_kimono,0,6897 +3880,light,0,6895 +482679,facing_away,0,6885 +578643,striped_background,0,6879 +211409,wet_shirt,0,6877 +7493,daiyousei,4,6872 +645579,fake_tail,0,6859 +504750,princess_carry,0,6856 +428808,wagashi,0,6851 +843399,double-breasted,0,6848 +3444,computer,0,6835 +718360,polka_dot_bow,0,6821 +465688,shiki_eiki,4,6811 +1339995,green_headwear,0,6804 +1672,tea,0,6801 +596449,pigeon-toed,0,6800 +519572,toenail_polish,0,6796 +456151,tainaka_ritsu,4,6789 +4787,axe,0,6787 +1409552,ooarai_school_uniform,0,6775 +466325,autumn_leaves,0,6771 +1244843,suzuya_(kantai_collection),4,6770 +581500,plaid_vest,0,6760 +411783,potted_plant,0,6759 +408248,transparent,0,6758 +5629,sample,0,6744 +393028,controller,0,6736 +1349206,cropped_torso,0,6730 +492174,ore_no_imouto_ga_konna_ni_kawaii_wake_ga_nai,3,6721 +503351,toramaru_shou,4,6716 +476771,bush,0,6714 +1394832,nier_(series),3,6705 +395918,misaka_mikoto,4,6700 +388149,ranguage,0,6699 +688711,white_sleeves,0,6692 +661340,holding_poke_ball,0,6678 +500472,evil_smile,0,6656 +622688,muscular_female,0,6655 +700405,nishizumi_miho,4,6646 +3748,slippers,0,6634 +1045681,white_shorts,0,6634 +501583,in_container,0,6628 +574178,o-ring_top,0,6623 +2414,vampire,0,6615 +9505,tubetop,0,6602 +384905,symmetrical_docking,0,6600 +513428,watercolor_(medium),0,6596 +670635,cross-laced_clothes,0,6590 +1363594,red_hairband,0,6579 +1372775,pinafore_dress,0,6577 +656167,hand_on_another's_face,0,6572 +430765,sunbeam,0,6571 +1240523,houshou_(kantai_collection),4,6546 +457806,artist_self-insert,0,6544 +1740,mermaid,0,6531 +400382,mouse_tail,0,6530 +684644,gold_trim,0,6527 +490556,final_fantasy_xiv,3,6526 +602796,star_print,0,6507 +555246,black_border,0,6504 +1307840,stud_earrings,0,6487 +1585358,northern_ocean_princess,4,6482 +1081309,mouth_mask,0,6477 +493887,food_in_mouth,0,6475 +1349592,blue_kimono,0,6462 +440369,multiple_persona,0,6461 +610698,hat_ornament,0,6460 +608932,inazuma_eleven_go,3,6452 +375461,belly,0,6440 +617355,grey_jacket,0,6440 +3898,tank,0,6440 +6313,link,4,6427 +481391,reaching_out,0,6419 +592977,nishikino_maki,4,6418 +1226390,purple_flower,0,6403 +470562,spaghetti_strap,0,6401 +394881,school_desk,0,6398 +530616,checkered_floor,0,6385 +42918,scales,0,6381 +383680,vampire_(game),3,6367 +1451201,draph,0,6366 +454128,head_wreath,0,6365 +9504,magatama,0,6363 +1485665,scathach_(fate)_(all),4,6358 +24,tsukihime,3,6351 +547199,bikini_skirt,0,6350 +484616,maple_leaf,0,6325 +1516549,pink_headwear,0,6317 +1571375,poke_ball_(basic),0,6310 +468789,paw_gloves,0,6308 +1262298,off-shoulder_dress,0,6307 +297241,smug,0,6303 +3473,giant,0,6301 +583983,black_cape,0,6299 +5843,stretch,0,6287 +1391753,fujimaru_ritsuka_(male),4,6274 +15060,persona_3,3,6268 +1388933,light_blush,0,6264 +10704,bamboo,0,6262 +461639,happy_new_year,0,6260 +4429,pencil,0,6256 +593046,sonoda_umi,4,6254 +699839,halloween_costume,0,6251 +684986,beige_background,0,6233 +3861,pikachu,4,6228 +388861,rockman,3,6227 +1285325,wrist_scrunchie,0,6223 +521761,voiceroid,3,6214 +1337483,nier_automata,3,6207 +11019,scared,0,6206 +408887,shoulder_pads,0,6204 +646363,company_name,0,6200 +75228,aura,0,6194 +665137,sandwiched,0,6186 +484924,heart_hands,0,6185 +522720,naked_towel,0,6178 +479549,rebuild_of_evangelion,3,6169 +1377868,bang_dream!,3,6155 +471181,eyeliner,0,6145 +516432,imminent_kiss,0,6133 +1328421,crown_braid,0,6131 +8027,to_heart_2,3,6129 +1332478,shrug_(clothing),0,6129 +421031,huge_weapon,0,6127 +1238051,murakumo_(kantai_collection),4,6116 +125420,bald,0,6099 +566116,scar_on_face,0,6092 +394722,wet_hair,0,6078 +8577,paintbrush,0,6076 +469422,paw_print,0,6076 +515909,asymmetrical_clothes,0,6071 +458210,bikini_armor,0,6069 +535186,breast_pocket,0,6068 +491943,dark_persona,0,6066 +2817,eyes,0,6065 +4965,dagger,0,6064 +158473,string_panties,0,6062 +478321,checkered_background,0,6061 +666816,hand_on_own_head,0,6058 +662185,shibuya_rin,4,6052 +419695,sakazuki,0,6048 +416486,photo_(object),0,6040 +419116,tied_shirt,0,6022 +1329246,center_frills,0,6022 +12342,usami_renko,4,6019 +1231258,matoi_ryuuko,4,6018 +615562,toujou_nozomi,4,6017 +434799,0_0,0,6015 +592974,yazawa_nico,4,6006 +1405000,serval_(kemono_friends),4,6004 +375110,haori,0,6004 +13851,halftone,0,6001 +9983,dancing,0,6000 +450266,wine_glass,0,5998 +463475,iron_cross,0,5998 +1230896,horn_ornament,0,5994 +4065,explosion,0,5990 +1441883,round_eyewear,0,5985 +9331,falling,0,5975 +8404,pixel_art,0,5960 +1238041,atago_(kantai_collection),4,5958 +485607,side_bun,0,5957 +399932,pixiv_fantasia,3,5955 +8104,angel,0,5950 +563862,gameplay_mechanics,0,5948 +656064,high_school_dxd,3,5941 +2566,mushroom,0,5938 +1238052,ushio_(kantai_collection),4,5936 +12108,illyasviel_von_einzbern,4,5935 +593043,ayase_eli,4,5934 +544560,himekaidou_hatate,4,5934 +1233711,imaizumi_kagerou,4,5932 +495530,bikini_under_clothes,0,5914 +4633,singing,0,5905 +396470,arm_cannon,0,5902 +527682,object_hug,0,5896 +1424366,xenoblade_chronicles_2,3,5887 +613998,mismatched_legwear,0,5886 +1505172,engrish_text,0,5879 +492982,layered_dress,0,5879 +7929,shackles,0,5878 +473248,blue_bra,0,5859 +1240900,shoukaku_(kantai_collection),4,5858 +473218,lace-trimmed_bra,0,5855 +502731,hachimaki,0,5848 +409161,bakemonogatari,3,5845 +1277919,inkling,4,5842 +569558,fate/kaleid_liner_prisma_illya,3,5842 +550405,holding_flower,0,5828 +238471,babydoll,0,5822 +1257079,print_kimono,0,5815 +379504,3d,0,5813 +483919,single_earring,0,5810 +623418,seiyuu_connection,0,5805 +1264677,anchor_hair_ornament,0,5802 +1441872,purple_neckwear,0,5801 +124044,character_sheet,0,5799 +541903,purple_nails,0,5795 +547419,yuru_yuri,3,5790 +663180,senki_zesshou_symphogear,3,5789 +532949,trigger_discipline,0,5784 +399391,spiked_bracelet,0,5784 +246,nun,0,5780 +395583,mother_and_daughter,0,5778 +108765,broom_riding,0,5774 +608355,karakasa_obake,0,5774 +414957,confetti,0,5757 +2215,house,0,5756 +1516547,brown_headwear,0,5753 +1233477,crop_top_overhang,0,5744 +472197,white_pants,0,5743 +1424093,otonokizaka_school_uniform,0,5739 +443371,raccoon_tail,0,5739 +1260880,fang_out,0,5734 +394083,knee_pads,0,5733 +8433,thumbs_up,0,5732 +491150,^o^,0,5729 +1271614,amatsukaze_(kantai_collection),4,5723 +1238037,yukikaze_(kantai_collection),4,5723 +12724,field,0,5716 +394150,blue_rose,0,5713 +412952,arm_garter,0,5702 +468449,brown_dress,0,5702 +507586,yellow_skirt,0,5700 +506041,maribel_hearn,4,5698 +493064,over_shoulder,0,5694 +1359441,collared_dress,0,5693 +500740,very_short_hair,0,5692 +8028,ragnarok_online,3,5684 +1364407,genderswap_(ftm),0,5683 +2246,wall,0,5667 +374955,ruins,0,5665 +460159,body_blush,0,5657 +720637,chromatic_aberration,0,5654 +1435503,tamamo_no_mae_(fate),4,5648 +8327,ayanami_rei,4,5644 +1447648,sailor_senshi_uniform,0,5639 +413645,nagae_iku,4,5639 +521120,backless_dress,0,5627 +1393342,erune,0,5625 +16170,television,0,5620 +1343866,black_sleeves,0,5617 +503350,murasa_minamitsu,4,5615 +578891,shoulder_blades,0,5604 +471436,doughnut,0,5589 +616593,purple_gloves,0,5588 +16791,strap,0,5586 +1349171,food_themed_hair_ornament,0,5585 +416892,;o,0,5579 +1303436,red_choker,0,5578 +439891,d:,0,5577 +479176,bird_wings,0,5574 +423161,object_on_head,0,5568 +618117,legwear_under_shorts,0,5568 +1515536,official_alternate_costume,0,5551 +1708,sake,0,5538 +462974,squiggle,0,5535 +7447,mahou_shoujo_lyrical_nanoha_a's,3,5533 +10403,futon,0,5533 +562882,blue_pants,0,5530 +474717,front_ponytail,0,5521 +592643,navel_cutout,0,5502 +464998,dragon_ball_z,3,5499 +1406576,serval_ears,0,5496 +470019,one_knee,0,5488 +1508967,purple_theme,0,5475 +526025,pleated_dress,0,5471 +507496,chess_piece,0,5451 +594404,hat_flower,0,5449 +401680,sweater_dress,0,5444 +6203,vines,0,5440 +1515359,pink_theme,0,5437 +433036,super_smash_bros.,3,5428 +384906,asymmetrical_docking,0,5420 +1439950,splatoon_1,3,5406 +2570,idol,0,5406 +1236938,kitakami_(kantai_collection),4,5405 +510068,splashing,0,5399 +1349338,red_leotard,0,5399 +474715,crescent_moon,0,5394 +638538,kaku_seiga,4,5391 +1239690,rensouhou-chan,4,5389 +613198,green_legwear,0,5389 +562575,green_bikini,0,5388 +472448,highleg_bikini,0,5385 +682963,claw_pose,0,5384 +1301111,white_sweater,0,5376 +438708,frilled_panties,0,5367 +1401122,yorha_no._2_type_b,4,5365 +547394,o-ring_bikini,0,5364 +469978,glowing_eye,0,5354 +451190,layered_skirt,0,5348 +288161,office_lady,0,5347 +1481027,okita_souji_(fate)_(all),4,5315 +2409,error,0,5311 +1316394,hip_vent,0,5305 +5857,leather,0,5304 +558829,animal_on_head,0,5295 +2258,gilgamesh,4,5291 +491645,head_fins,0,5278 +1328010,fur-trimmed_jacket,0,5277 +1372733,blue_leotard,0,5276 +500,fundoshi,0,5274 +5157,sad,0,5274 +462594,bobby_socks,0,5272 +8198,meme,0,5267 +202427,3:,0,5264 +396065,green_panties,0,5261 +473328,kumoi_ichirin,4,5251 +575497,starry_background,0,5241 +5626,antlers,0,5238 +610074,platform_footwear,0,5235 +1387418,back_bow,0,5232 +456152,kotobuki_tsumugi,4,5231 +376302,kamen_rider,3,5230 +695937,white_outline,0,5228 +511640,single_shoe,0,5225 +465836,cutoffs,0,5223 +606732,soaking_feet,0,5221 +101752,hoshii_miki,4,5221 +53988,fox_mask,0,5220 +395969,macross_frontier,3,5219 +546417,girl_sandwich,0,5213 +619321,senran_kagura,3,5208 +1304046,black_bodysuit,0,5207 +682686,spoken_exclamation_mark,0,5204 +660814,dot_nose,0,5203 +464551,cookie,0,5194 +474091,downblouse,0,5193 +498950,orange_skirt,0,5187 +413652,kurodani_yamame,4,5186 +1625,bleach,3,5177 +391251,breast_rest,0,5173 +672705,pink_kimono,0,5170 +507378,yellow_bikini,0,5165 +516029,shimenawa,0,5159 +680569,foot_focus,0,5158 +443796,under_covers,0,5155 +380423,goatee,0,5153 +376472,monster_hunter,3,5151 +420298,big_hair,0,5150 +385088,earphones,0,5149 +456515,tiger_print,0,5137 +551141,arm_ribbon,0,5134 +1269638,handheld_game_console,0,5122 +474471,little_busters!,3,5122 +1246596,akebono_(kantai_collection),4,5121 +387599,kaito,4,5117 +382074,hiiragi_kagami,4,5104 +484880,architecture,0,5104 +1296381,prinz_eugen_(kantai_collection),4,5103 +14461,open_kimono,0,5101 +1408636,jeanne_d'arc_(fate),4,5101 +514087,cross_necklace,0,5099 +713730,kafuu_chino,4,5098 +843857,red_kimono,0,5097 +376671,scroll,0,5093 +5006,rice,0,5091 +1503961,warship_girls_r,3,5089 +476548,pinky_out,0,5081 +449932,w,0,5071 +570051,ibaraki_kasen,4,5068 +1233709,hata_no_kokoro,4,5061 +1601823,retro_artstyle,0,5061 +420311,\m/,0,5057 +588956,untied_bikini,0,5051 +1238038,tatsuta_(kantai_collection),4,5047 +808104,persona_5,3,5045 +13859,perspective,0,5044 +496894,face-to-face,0,5041 +7438,matou_sakura,4,5037 +2298,watermelon,0,5026 +4924,camouflage,0,5024 +726933,clothes_around_waist,0,5020 +1551197,1990s_(style),0,5018 +558653,casual_one-piece_swimsuit,0,5018 +112,clannad,3,5017 +580906,low_wings,0,5015 +384569,gundam_00,3,5013 +483418,mini_top_hat,0,5011 +30851,3koma,0,5007 +1513842,gen_8_pokemon,0,5007 +703196,leaf_hair_ornament,0,5006 +453244,wolf_girl,0,5005 +716716,dokidoki!_precure,3,4994 +461530,balloon,0,4990 +488078,purple_jacket,0,4983 +5201,pipe,0,4982 +420828,sliding_doors,0,4979 +10802,bonnet,0,4978 +567329,limited_palette,0,4975 +646445,mask_on_head,0,4973 +619180,kasodani_kyouko,4,4973 +378581,brothers,0,4972 +3138,bread,0,4965 +1441878,eyewear_removed,0,4960 +391915,print_panties,0,4955 +1468296,beamed_eighth_notes,0,4948 +485218,trading_card,0,4943 +6202,winter,0,4941 +538586,hat_feather,0,4941 +474306,headphones_around_neck,0,4940 +1340403,legendary_pokemon,0,4939 +388207,yin_yang,0,4931 +605892,pov_hands,0,4917 +642359,faulds,0,4914 +1243225,yamato_(kantai_collection),4,4913 +673253,pokemon_bw2,3,4902 +3531,bridge,0,4899 +1245652,female_admiral_(kantai_collection),4,4892 +392495,tiptoes,0,4885 +1490428,fringe_trim,0,4883 +11651,annoyed,0,4880 +418119,black_rock_shooter,3,4878 +9586,habit,0,4874 +465231,cardcaptor_sakura,3,4870 +621064,red_vest,0,4870 +1436498,darling_in_the_franxx,3,4865 +714822,red_theme,0,4860 +698860,hand_on_own_knee,0,4858 +1515356,green_theme,0,4850 +1441887,rimless_eyewear,0,4849 +569780,pink_rose,0,4848 +1492638,new_super_mario_bros._u_deluxe,3,4848 +3971,handcuffs,0,4844 +54494,kujo_jotaro,4,4839 +1492952,super_crown,0,4836 +390589,branch,0,4832 +593045,minami_kotori,4,4828 +396079,looking_afar,0,4827 +487745,sitting_on_lap,0,4825 +374961,no_pupils,0,4817 +406442,elsword,3,4811 +418817,axis_powers_hetalia,3,4807 +383707,pun,0,4803 +669725,holding_bag,0,4800 +11231,feeding,0,4799 +374606,giantess,0,4796 +525628,angel_beats!,3,4796 +572821,star_earrings,0,4795 +422720,uwabaki,0,4789 +8988,sniper_rifle,0,4784 +1600,beer,0,4783 +412080,sleeve_cuffs,0,4775 +720760,holding_tray,0,4771 +2100,summer,0,4759 +581546,bloody_clothes,0,4757 +16710,sepia,0,4748 +1283885,crossed_bangs,0,4747 +412376,stitches,0,4746 +638583,soga_no_tojiko,4,4734 +452549,!!,0,4726 +485014,white_rose,0,4724 +493164,forehead_jewel,0,4721 +391888,tifa_lockhart,4,4717 +490261,partially_colored,0,4713 +462735,old,0,4707 +549735,heartcatch_precure!,3,4704 +545740,taut_shirt,0,4703 +460907,one_eye_covered,0,4701 +622607,official_style,0,4698 +473284,frilled_bra,0,4695 +1238040,mutsu_(kantai_collection),4,4691 +8301,black_cat,0,4688 +612000,constricted_pupils,0,4684 +464547,cherry,0,4681 +392008,chalkboard,0,4677 +1410771,artist_logo,0,4674 +664517,kariginu,0,4666 +166945,yellow_dress,0,4648 +667628,pokemon_dppt,3,4644 +576693,tile_floor,0,4643 +1297657,zipper_pull_tab,0,4642 +619181,miyako_yoshika,4,4639 +623300,bubble_skirt,0,4638 +1447826,notice_lines,0,4635 +1339640,midriff_peek,0,4634 +1497479,eyebrows_behind_hair,0,4634 +10369,bedroom,0,4624 +487910,joseph_joestar_(young),4,4621 +547384,tail_raised,0,4618 +382075,izumi_konata,4,4612 +1616,kotatsu,0,4608 +3577,motorcycle,0,4600 +589024,arm_strap,0,4598 +464558,egg,0,4598 +410392,wrist_grab,0,4597 +440458,xd,0,4590 +1261046,ooyodo_(kantai_collection),4,4590 +534254,brick_wall,0,4584 +55663,danmaku,0,4583 +12286,badge,0,4579 +1328382,scathach_(fate/grand_order),4,4575 +172313,time_paradox,0,4574 +378899,bride,0,4570 +614277,frilled_legwear,0,4570 +593668,huge_ahoge,0,4567 +1465896,aikatsu!_(series),3,4563 +504234,punching,0,4559 +678558,hand_on_own_chin,0,4557 +438648,stand_(jojo),0,4551 +698622,puzzle_&_dragons,3,4549 +1405300,kaban_(kemono_friends),4,4546 +618119,atelier_(series),3,4540 +1441862,purple_footwear,0,4538 +390703,purple_skin,0,4534 +526806,tengu-geta,0,4532 +1372463,darjeeling_(girls_und_panzer),4,4524 +1243755,kijin_seija,4,4523 +1267925,bismarck_(kantai_collection),4,4521 +5086,whip,0,4519 +1363856,star_hat_ornament,0,4515 +460438,damaged,0,4514 +3904,rainbow,0,4504 +1406853,serval_print,0,4503 +3509,syringe,0,4501 +395658,christmas_tree,0,4501 +1256688,bandaged_leg,0,4500 +5252,cow_ears,0,4495 +499324,red_shorts,0,4490 +1448304,astolfo_(fate),4,4489 +5011,horse,0,4483 +413872,breast_envy,0,4482 +94496,princess_zelda,4,4475 +9071,amami_haruka,4,4473 +9894,lap_pillow,0,4469 +658791,necktie_between_breasts,0,4465 +479764,shikinami_asuka_langley,4,4460 +385431,submachine_gun,0,4455 +466986,horn_ribbon,0,4453 +1898,hammer,0,4452 +2182,police,0,4447 +466669,c.c.,4,4444 +418177,ripples,0,4442 +604394,pointing_at_viewer,0,4438 +14297,cyborg,0,4424 +2103,meiko,4,4424 +703293,bunny_hair_ornament,0,4421 +410928,multiple_4koma,0,4420 +383424,aki_minoriko,4,4419 +705938,mahou_shoujo_madoka_magica_movie,3,4418 +1237417,ooi_(kantai_collection),4,4409 +376018,ribs,0,4405 +388762,sleeveless_turtleneck,0,4402 +407260,polka_dot_panties,0,4402 +675233,open_cardigan,0,4400 +665739,holding_gift,0,4399 +666125,holding_fruit,0,4397 +447060,spiked_collar,0,4395 +387233,paper_fan,0,4387 +1248945,wo-class_aircraft_carrier,4,4386 +473775,pointy_hair,0,4386 +381678,broken,0,4381 +1240785,sendai_(kantai_collection),4,4378 +399073,waistcoat,0,4377 +1374965,kimetsu_no_yaiba,3,4376 +510663,yoko_littner,4,4372 +1542129,skin-covered_horns,0,4372 +392034,dripping,0,4370 +1253894,holding_microphone,0,4370 +1250483,holding_fan,0,4361 +1308754,flower_knight_girl,3,4360 +432947,no_socks,0,4360 +400657,one-eyed,0,4359 +615659,blue_vest,0,4355 +668671,holding_knife,0,4354 +21,suigintou,4,4354 +10322,albino,0,4352 +590917,leaf_on_head,0,4350 +15410,phantasy_star,3,4350 +1433170,nero_claudius_(fate),4,4349 +1369298,shuten_douji_(fate/grand_order),4,4347 +1369674,lillie_(pokemon),4,4343 +493130,plaid_shirt,0,4341 +593044,kousaka_honoka,4,4333 +398297,shouting,0,4329 +374967,power_lines,0,4327 +394743,cushion,0,4327 +375185,doll_joints,0,4323 +560247,white_pupils,0,4323 +1328011,fur-trimmed_sleeves,0,4321 +12438,grapes,0,4318 +511662,arm_guards,0,4318 +462578,merry_christmas,0,4317 +395294,head_scarf,0,4307 +513605,colored_pencil_(medium),0,4298 +1262171,holding_bottle,0,4297 +1272336,akashi_(kantai_collection),4,4293 +516937,oversized_object,0,4289 +452021,hieda_no_akyuu,4,4286 +614988,pink_jacket,0,4283 +613923,argyle_legwear,0,4281 +376747,nitroplus,3,4280 +6188,dressing,0,4277 +557051,hair_feathers,0,4277 +555228,panty_&_stocking_with_garterbelt,3,4272 +396,archer,4,4268 +606296,dappled_sunlight,0,4268 +375915,animal_hat,0,4262 +527888,animalization,0,4253 +8354,reclining,0,4251 +397487,pillow_hug,0,4251 +416507,torn_shirt,0,4245 +375373,long_coat,0,4244 +1245189,asashio_(kantai_collection),4,4241 +578948,closed_umbrella,0,4239 +516350,white_coat,0,4239 +1314823,black_sweater,0,4234 +626633,spoken_musical_note,0,4233 +724764,hand_on_own_thigh,0,4223 +1246198,mechanical_halo,0,4221 +1464351,gen_6_pokemon,0,4221 +713720,nishizumi_maho,4,4219 +710372,orange_shirt,0,4218 +489853,megami_magazine,3,4215 +511571,zzz,0,4210 +6425,scissors,0,4208 +1339317,d.va_(overwatch),4,4203 +1238039,takao_(kantai_collection),4,4202 +424565,shijou_takane,4,4189 +452550,inazuma_eleven,3,4188 +54490,monitor,0,4186 +109153,saucer,0,4185 +1516550,purple_headwear,0,4180 +406340,oversized_clothes,0,4180 +455615,cow_print,0,4178 +9072,kisaragi_chihaya,4,4176 +1241527,shiranui_(kantai_collection),4,4174 +562151,short_over_long_sleeves,0,4174 +3569,sleepy,0,4172 +715545,duel_monster,0,4172 +238935,track_suit,0,4165 +414971,purple_shirt,0,4162 +596854,blue_scarf,0,4161 +464537,bruise,0,4159 +390148,green_skin,0,4158 +13108,chun-li,4,4155 +2648,nightgown,0,4146 +948186,koha-ace,3,4145 +396968,reverse_trap,0,4143 +1239346,hiei_(kantai_collection),4,4133 +415942,jester_cap,0,4125 +724584,hooded_cloak,0,4118 +148527,stool,0,4113 +713721,itsumi_erika,4,4113 +481511,red_lips,0,4106 +681480,light_frown,0,4101 +413907,;p,0,4099 +389066,castle,0,4092 +580402,plaid_scarf,0,4088 +9078,minase_iori,4,4085 +1310698,curled_horns,0,4084 +7439,kinomoto_sakura,4,4080 +3860,cooking,0,4080 +1441864,aqua_neckwear,0,4079 +417892,shell,0,4072 +3516,river,0,4070 +606614,shoes_removed,0,4067 +574501,ringed_eyes,0,4064 +1305447,ro-500_(kantai_collection),4,4062 +1053124,sideways_glance,0,4061 +595503,facial_tattoo,0,4061 +383855,aki_shizuha,4,4057 +421520,no_eyes,0,4056 +615222,hoshizora_rin,4,4046 +623331,company_connection,0,4046 +487559,lamppost,0,4045 +412879,playing_games,0,4041 +465048,dog_girl,0,4041 +493465,playing_card,0,4037 +634063,flag_print,0,4037 +582261,unmoving_pattern,0,4036 +2786,loincloth,0,4033 +391704,pervert,0,4029 +485597,grey_dress,0,4027 +490606,afterimage,0,4026 +403779,navel_piercing,0,4024 +12295,letty_whiterock,4,4024 +1315939,brown_belt,0,4023 +2576,fat,0,4022 +638500,futatsuiwa_mamizou,4,4022 +423823,ship,0,4021 +374989,quiver,0,4020 +407913,bursting_breasts,0,4020 +379427,skeleton,0,4019 +7460,pot,0,4015 +1238066,kirishima_(kantai_collection),4,4015 +399834,forehead_protector,0,4013 +502301,tail_wagging,0,4012 +1242846,souryuu_(kantai_collection),4,4011 +468075,jiangshi,0,4009 +667849,bandaid_on_face,0,4006 +422767,gumi,4,4006 +410239,superhero,0,4006 +1532466,colored_inner_hair,0,4002 +1271756,black_scarf,0,4000 +10658,emiya_shirou,4,3998 +686321,half_gloves,0,3997 +2762,coffee,0,3987 +1540544,cu_chulainn_(fate)_(all),4,3986 +1441860,grey_footwear,0,3985 +495048,lily_(flower),0,3976 +1440438,idolmaster_1,3,3976 +1246961,verniy_(kantai_collection),4,3972 +465523,race_queen,0,3969 +482376,game_controller,0,3967 +417653,convenient_leg,0,3966 +421658,cardboard_box,0,3963 +9681,kyon,4,3961 +1441869,orange_neckwear,0,3961 +481812,teardrop,0,3958 +436870,:/,0,3956 +3288,penguin,0,3954 +494842,braided_ponytail,0,3947 +670055,watashi_ga_motenai_no_wa_dou_kangaetemo_omaera_ga_warui!,3,3945 +1242303,sazanami_(kantai_collection),4,3943 +11458,baseball_bat,0,3940 +1246753,senketsu,4,3936 +1837,disgaea,3,3935 +653480,yellow_jacket,0,3932 +460796,ex-keine,4,3931 +543207,east_asian_architecture,0,3931 +712121,trait_connection,0,3931 +482327,swimwear,0,3931 +482941,flight_deck,0,3925 +1462380,disposable_cup,0,3908 +643027,light_blue_hair,0,3907 +481547,holding_hair,0,3904 diff --git a/Generator/UpscalePred.py b/Generator/UpscalePred.py new file mode 100644 index 0000000..7050ef7 --- /dev/null +++ b/Generator/UpscalePred.py @@ -0,0 +1,95 @@ +import cv2 +import numpy as np + +from tensorflow.keras.utils import Sequence + + +def smart_imread(img_path, flag=cv2.IMREAD_UNCHANGED): + img = cv2.imdecode(np.fromfile(img_path, dtype=np.uint8), flag) + if img is None: + print("Error reading ", img_path) + return img + + +def make_square(img): + old_size = img.shape[:2] + desired_size = max(old_size) + + delta_w = desired_size - old_size[1] + delta_h = desired_size - old_size[0] + top, bottom = delta_h // 2, delta_h - (delta_h // 2) + left, right = delta_w // 2, delta_w - (delta_w // 2) + + color = [255, 255, 255] + new_im = cv2.copyMakeBorder( + img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color + ) + return new_im + + +class DataGenerator(Sequence): + "Generates data for Keras" + + def __init__(self, images_list, batch_size=16, dim=(48, 48), n_channels=3): + "Initialization" + self.dim = dim + self.images_list = images_list + self.batch_size = batch_size + self.n_channels = n_channels + + self.on_epoch_end() + + def __len__(self): + "Denotes the number of batches per epoch" + return int(np.ceil(len(self.images_list) / self.batch_size)) + + def __getitem__(self, index): + "Generate one batch of data" + # Generate indexes of the batch + indexes = self.indexes[index * self.batch_size : (index + 1) * self.batch_size] + + # Generate data + X = self.__data_generation(indexes) + + return (X,) + + def on_epoch_end(self): + "Updates indexes after each epoch" + # Generate all the indexes + self.indexes = np.arange(len(self.images_list)) + + def __data_generation(self, indexes): + "Generates data containing batch_size samples" + # Generate data + X = np.empty((len(indexes), self.dim[0], self.dim[1], self.n_channels)) + + # Find list of IDs + images_list_temp = [self.images_list[k] for k in indexes] + + for i, img_fullpath in enumerate(images_list_temp): + img = smart_imread(img_fullpath) + if img.dtype is np.dtype(np.uint16): + img = (img / 257).astype(np.uint8) + + if len(img.shape) == 2: + img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) + elif img.shape[2] == 4: + trans_mask = img[:, :, 3] == 0 + img[trans_mask] = [255, 255, 255, 255] + img = cv2.cvtColor(img, cv2.COLOR_BGRA2BGR) + + img = make_square(img) + if img.shape[0] > self.dim[0]: + img = cv2.resize( + img, (self.dim[0], self.dim[1]), interpolation=cv2.INTER_AREA + ) + elif img.shape[0] < self.dim[0]: + img = cv2.resize( + img, (self.dim[0], self.dim[1]), interpolation=cv2.INTER_CUBIC + ) + + X[i] = img + + X = X.astype(np.float32) / 255 + + return X diff --git a/Generator/Upscale_DB.py b/Generator/Upscale_DB.py new file mode 100644 index 0000000..5c0075b --- /dev/null +++ b/Generator/Upscale_DB.py @@ -0,0 +1,96 @@ +import sqlite3 + +import cv2 +import numpy as np +import tensorflow as tf + + +def rotate(image, angle, center=None, scale=1.0): + # grab the dimensions of the image + (h, w) = image.shape[:2] + + # if the center is None, initialize it as the center of + # the image + if center is None: + center = (w // 2, h // 2) + + # perform the rotation + M = cv2.getRotationMatrix2D(center, angle, scale) + rotated = cv2.warpAffine(image, M, (w, h), borderValue=(255, 255, 255)) + + # return the rotated image + return rotated + + +def crop(img, x, y, h, w): + crop_img = img[y : y + h, x : x + w] + return crop_img + + +class DataGenerator: + def __init__( + self, images_list, labels_list, noise_level=0, dim=(48, 48), n_channels=3 + ): + self.dim = dim + self.images_list = images_list + self.labels_list = labels_list + self.noise_level = noise_level + self.n_channels = n_channels + + def getLabels(self, filename): + db = sqlite3.connect(r"F:\MLArchives\danbooru2020\danbooru2020.db") + db_cursor = db.cursor() + + img_id = int( + filename.numpy().decode("utf-8").rsplit("/", 1)[1].rsplit(".", 1)[0] + ) + + query = "SELECT tag_id FROM imageTags WHERE image_id = ?" + db_cursor.execute(query, (img_id,)) + tags = db_cursor.fetchall() + db.close() + + tags = [tag_id[0] for tag_id in tags] + encoded = np.isin(self.labels_list, tags).astype(np.float32) + return encoded + + def getImage(self, filename): + img_fullpath = r"F:\MLArchives\danbooru2020\512px\%s" % filename.numpy().decode( + "utf-8" + ) + img = cv2.imread(img_fullpath, cv2.IMREAD_COLOR) + + if self.noise_level >= 1: + if np.random.choice([0, 1]): + img = cv2.flip(img, 1) + + if np.random.choice([0, 1]) or self.noise_level >= 2: + factor = (1.0 - 0.87) * np.random.random_sample() + 0.87 + origSize = img.shape[0] + newSize = int(origSize * factor) + if factor < 1.0: + x = np.random.randint(0, img.shape[1] - newSize) + y = np.random.randint(0, img.shape[0] - newSize) + img = crop(img, x, y, newSize, newSize) + + if np.random.choice([0, 1]) or self.noise_level >= 2: + angle = np.random.randint(-45, 45) + img = rotate(img, angle) + + img = cv2.resize(img, (self.dim[0], self.dim[1]), interpolation=cv2.INTER_AREA) + img = img * np.array(1 / 255.0).astype(np.float32) + return img + + def wrap_func(self, filename): + [ + image, + ] = tf.py_function(self.getImage, [filename], [tf.float32]) + [ + image_labels, + ] = tf.py_function(self.getLabels, [filename], [tf.float32]) + image.set_shape((self.dim[0], self.dim[1], self.n_channels)) + image_labels.set_shape(len(self.labels_list)) + return image, image_labels + + def genDS(self): + return tf.data.Dataset.from_tensor_slices(self.images_list) diff --git a/Models/NFNet.py b/Models/NFNet.py new file mode 100644 index 0000000..c9dba3c --- /dev/null +++ b/Models/NFNet.py @@ -0,0 +1,335 @@ +import numpy as np +import tensorflow as tf +from tensorflow.keras import layers, regularizers +from tensorflow.keras.models import Model + + +def formatName(prefix, name): + return ( + ("%s_%s" % (prefix, name)) if prefix is not None and name is not None else None + ) + + +def SEBlock(x, in_ch): + squeeze = tf.reduce_mean(x, [1, 2], keepdims=False) + + attn = tf.keras.layers.Dense(filters=int(in_ch * 0.5), use_bias=True)(squeeze) + attn = tf.keras.layers.Relu()(attn) + attn = tf.keras.layers.Dense(filters=in_ch, use_bias=True)(attn) + + attn = tf.reshape(attn, (-1, 1, 1, in_ch)) + attn = tf.math.sigmoid(attn) + return attn + + +def ECABlock(x, in_ch, gamma=2, b=1): + t = int(np.abs((np.log2(in_ch) + b) / gamma)) + k_size = t if t % 2 else t + 1 + + squeeze = tf.reduce_mean(x, [1, 2], keepdims=True) + squeeze = tf.squeeze(squeeze, axis=2) + squeeze = tf.transpose(squeeze, [0, 2, 1]) + + w_init = tf.keras.initializers.VarianceScaling(1.0, "fan_in", "normal") + attn = tf.keras.layers.Conv1D( + filters=1, + kernel_size=k_size, + padding="same", + use_bias=False, + kernel_initializer=w_init, + )(squeeze) + + attn = tf.transpose(attn, [0, 2, 1]) + attn = tf.expand_dims(attn, axis=2) + attn = tf.math.sigmoid(attn) + return attn + + +class SReLU(tf.keras.layers.ReLU): + def build(self, input_shape): + super(SReLU, self).build(input_shape) + self.gamma = 1.7139588594436646 + self.built = True + + def compute_output_shape(self, input_shape): + return tf.TensorShape(input_shape) + + def call(self, x): + x = super(SReLU, self).call(x) + return x * self.gamma + + +class StochDepth(tf.keras.Model): + """Batchwise Dropout used in EfficientNet, optionally sans rescaling.""" + + def __init__(self, drop_rate, scale_by_keep=False, name=None): + super(StochDepth, self).__init__(name=name) + self.drop_rate = drop_rate + self.scale_by_keep = scale_by_keep + + def call(self, x, training): + if not training: + return x + + batch_size = tf.shape(x)[0] + r = tf.random.uniform(shape=[batch_size, 1, 1, 1], dtype=x.dtype) + keep_prob = 1.0 - self.drop_rate + binary_tensor = tf.floor(keep_prob + r) + if self.scale_by_keep: + x = x / keep_prob + return x * binary_tensor + + +class SkipInit(tf.keras.layers.Layer): + def build(self, input_shape): + super(SkipInit, self).build(input_shape) + + self.skip = self.add_weight( + name="skip", + shape=(), + initializer="zeros", + dtype="float32", + trainable=True, + ) + + self.built = True + + def compute_output_shape(self, input_shape): + return tf.TensorShape(input_shape) + + def call(self, x): + return x * self.skip + + +class ScaledWSConv2d(tf.keras.layers.Conv2D): + """Implements the abs/2101.08692 technique. + You can simply replace any Conv2D with this one to use re-parametrized + convolution operation in which the kernels are standardized before conv. + """ + + def build(self, input_shape): + super(ScaledWSConv2d, self).build(input_shape) + + self.fan_in = self.kernel.shape[0] * self.kernel.shape[1] * self.kernel.shape[2] + self.gain = self.add_weight( + name="gain", + shape=(self.filters,), + initializer="ones", + dtype="float32", + trainable=True, + ) + + self.built = True + + def convolution_op(self, inputs, kernel): + # Kernel has shape HWIO, normalize over HWI + mean, var = tf.nn.moments(kernel, axes=[0, 1, 2], keepdims=True) + + # Manually fused normalization, eq. to (w - mean) * gain / sqrt(N * var) + scale = tf.math.rsqrt(tf.math.maximum(var * self.fan_in, 1e-4)) * self.gain + shift = mean * scale + return super().convolution_op(inputs, kernel * scale - shift) + + +def HeConv2D( + x, + filters=64, + kernel_size=(3, 3), + strides=(1, 1), + use_bias=True, + groups=1, + name=None, +): + if kernel_size >= 3: + padding_name = ("%s_padding" % name) if name is not None else name + x = layers.ZeroPadding2D( + padding=(int(kernel_size / 2), int(kernel_size / 2)), name=padding_name + )(x) + w_init = tf.keras.initializers.VarianceScaling(1.0, "fan_in", "normal") + x = ScaledWSConv2d( + filters, + kernel_size, + strides, + padding="valid", + use_bias=use_bias, + groups=groups, + kernel_initializer=w_init, + kernel_regularizer=regularizers.l2(0.00005), + name=name, + )(x) + return x + + +def NFBlock( + x, + out_filters=64, + alpha=1.0, + beta=1.0, + strides=1, + group_size=128, + expansion=0.5, + use_eca=True, + stochdepth_rate=0.0, + prefix=None, +): + in_channels = x.shape[-1] + + in_filters = int(out_filters * expansion) + groups = in_filters // group_size + in_filters = groups * group_size + + out = SReLU(name=formatName(prefix, "relu_01"))(x) * beta + + if strides > 1 or in_channels != out_filters: + if strides > 1: + shortcut = layers.AveragePooling2D( + padding="same", name=formatName(prefix, "averagepooling2d_shortcut") + )(out) + else: + shortcut = out + shortcut = HeConv2D( + shortcut, + out_filters, + kernel_size=1, + name=formatName(prefix, "conv2d_shortcut"), + ) + else: + shortcut = x + + out = HeConv2D(out, in_filters, kernel_size=1, name=formatName(prefix, "conv2d_01")) + + out = SReLU(name=formatName(prefix, "relu_02"))(out) + out = HeConv2D( + out, + in_filters, + kernel_size=3, + strides=strides, + groups=groups, + name=formatName(prefix, "conv2d_02"), + ) + + out = SReLU(name=formatName(prefix, "relu_03"))(out) + out = HeConv2D( + out, + in_filters, + kernel_size=3, + groups=groups, + name=formatName(prefix, "conv2d_03"), + ) + + out = SReLU(name=formatName(prefix, "relu_04"))(out) + out = HeConv2D( + out, out_filters, kernel_size=1, name=formatName(prefix, "conv2d_04") + ) + if use_eca: + out = 2 * ECABlock(out, out_filters) * out # Multiply by 2 for rescaling + + if stochdepth_rate > 0.0: + out = StochDepth(drop_rate=stochdepth_rate)(out) + + out = SkipInit()(out) + + return out * alpha + shortcut + + +# Lx variants params from TIMM: "experimental 'light' versions of NFNet-F that are little leaner" +definitions = { + "F0": { + "blocks": [1, 2, 6, 3], + "filters": [256, 512, 1536, 1536], + "group_size": 128, + "drop_rate": 0.2, + "bneck_expansion": 0.5, + "final_expansion": 2, + }, + "L0": { + "blocks": [1, 2, 6, 3], + "filters": [256, 512, 1536, 1536], + "group_size": 64, + "drop_rate": 0.2, + "bneck_expansion": 0.25, + "final_expansion": 1.5, + }, + "L1": { + "blocks": [2, 4, 12, 6], + "filters": [256, 512, 1536, 1536], + "group_size": 64, + "drop_rate": 0.3, + "bneck_expansion": 0.25, + "final_expansion": 2, + }, +} + + +def NFNetV1( + in_shape=(320, 320, 3), out_classes=2000, definition_name="L0", use_eca=True +): + alpha = 0.2 + width = 1.0 + stochdepth_rate = 0.1 + + definition = definitions[definition_name] + strides = [1, 2, 2, 2] + + num_blocks = sum(definition["blocks"]) + + img_input = layers.Input(shape=in_shape) + + # Root block / "stem" + ch = definition["filters"][0] // 2 + x = HeConv2D(img_input, filters=16, kernel_size=3, strides=2, name="root_conv2d_01") + x = SReLU(name="root_relu_01")(x) + x = HeConv2D(x, filters=32, kernel_size=3, strides=1, name="root_conv2d_02") + x = SReLU(name="root_relu_02")(x) + x = HeConv2D(x, filters=64, kernel_size=3, strides=1, name="root_conv2d_03") + x = SReLU(name="root_relu_03")(x) + x = HeConv2D(x, filters=ch, kernel_size=3, strides=2, name="root_conv2d_04") + + index = 0 + full_index = 0 + expected_std = 1.0 + for stage_depth, block_width, stride in zip( + definition["blocks"], definition["filters"], strides + ): + for block_index in range(stage_depth): + beta = 1.0 / expected_std + block_stochdepth_rate = stochdepth_rate * full_index / num_blocks + out_ch = int(block_width * width) + x = NFBlock( + x, + out_ch, + alpha, + beta, + strides=stride if block_index == 0 else 1, + group_size=definition["group_size"], + expansion=definition["bneck_expansion"], + use_eca=use_eca, + stochdepth_rate=block_stochdepth_rate, + prefix="block%d_cell%d" % (index, block_index), + ) + + ch = out_ch + if block_index == 0: + expected_std = 1.0 + expected_std = np.sqrt(expected_std ** 2 + alpha ** 2) + full_index += 1 + index += 1 + + # Classification block + x = HeConv2D( + x, + int(ch * definition["final_expansion"]), + kernel_size=1, + name="predictions_conv2d", + ) + x = SReLU(name="predictions_relu")(x) + x = layers.GlobalAveragePooling2D(name="predictions_globalavgpooling")(x) + x = layers.Dropout(definition["drop_rate"])(x) + + x = layers.Dense(out_classes, kernel_initializer="zeros", name="predictions_dense")( + x + ) + x = layers.Activation("sigmoid", name="predictions_sigmoid")(x) + + model = Model(img_input, x, name="NFNet%sV1" % definition_name) + return model diff --git a/Models/NFResNet.py b/Models/NFResNet.py new file mode 100644 index 0000000..752d7ae --- /dev/null +++ b/Models/NFResNet.py @@ -0,0 +1,225 @@ +import numpy as np +import tensorflow as tf +from tensorflow.keras import layers, regularizers +from tensorflow.keras.models import Model + + +def formatName(prefix, name): + return ( + ("%s_%s" % (prefix, name)) if prefix is not None and name is not None else None + ) + + +class SReLU(tf.keras.layers.ReLU): + def build(self, input_shape): + super(SReLU, self).build(input_shape) + self.gamma = 1.7139588594436646 + self.built = True + + def compute_output_shape(self, input_shape): + return tf.TensorShape(input_shape) + + def call(self, x): + x = super(SReLU, self).call(x) + return x * self.gamma + + +class StochDepth(tf.keras.Model): + """Batchwise Dropout used in EfficientNet, optionally sans rescaling.""" + + def __init__(self, drop_rate, scale_by_keep=False, name=None): + super(StochDepth, self).__init__(name=name) + self.drop_rate = drop_rate + self.scale_by_keep = scale_by_keep + + def call(self, x, training): + if not training: + return x + + batch_size = tf.shape(x)[0] + r = tf.random.uniform(shape=[batch_size, 1, 1, 1], dtype=x.dtype) + keep_prob = 1.0 - self.drop_rate + binary_tensor = tf.floor(keep_prob + r) + if self.scale_by_keep: + x = x / keep_prob + return x * binary_tensor + + +class SkipInit(tf.keras.layers.Layer): + def build(self, input_shape): + super(SkipInit, self).build(input_shape) + + self.skip = self.add_weight( + name="skip", + shape=(), + initializer="zeros", + dtype="float32", + trainable=True, + ) + + self.built = True + + def compute_output_shape(self, input_shape): + return tf.TensorShape(input_shape) + + def call(self, x): + return x * self.skip + + +class ScaledWSConv2d(tf.keras.layers.Conv2D): + """Implements the abs/2101.08692 technique. + You can simply replace any Conv2D with this one to use re-parametrized + convolution operation in which the kernels are standardized before conv. + """ + + def build(self, input_shape): + super(ScaledWSConv2d, self).build(input_shape) + + self.fan_in = self.kernel.shape[0] * self.kernel.shape[1] * self.kernel.shape[2] + self.gain = self.add_weight( + name="gain", + shape=(self.filters,), + initializer="ones", + dtype="float32", + trainable=True, + ) + + self.built = True + + def convolution_op(self, inputs, kernel): + # Kernel has shape HWIO, normalize over HWI + mean, var = tf.nn.moments(kernel, axes=[0, 1, 2], keepdims=True) + + # Manually fused normalization, eq. to (w - mean) * gain / sqrt(N * var) + scale = tf.math.rsqrt(tf.math.maximum(var * self.fan_in, 1e-4)) * self.gain + shift = mean * scale + return super().convolution_op(inputs, kernel * scale - shift) + + +def HeConv2D( + x, filters=64, kernel_size=(3, 3), strides=(1, 1), use_bias=True, name=None +): + if kernel_size >= 3: + padding_name = ("%s_padding" % name) if name is not None else name + x = layers.ZeroPadding2D( + padding=(int(kernel_size / 2), int(kernel_size / 2)), name=padding_name + )(x) + w_init = tf.keras.initializers.VarianceScaling(1.0, "fan_in", "normal") + x = ScaledWSConv2d( + filters, + kernel_size, + strides, + padding="valid", + use_bias=use_bias, + kernel_initializer=w_init, + kernel_regularizer=regularizers.l2(0.00005), + name=name, + )(x) + return x + + +def NFBlock( + x, filters=64, alpha=1.0, beta=1.0, strides=1, stochdepth_rate=0.0, prefix=None +): + in_channels = x.shape[-1] + + out = SReLU(name=formatName(prefix, "relu_01"))(x) * beta + + if strides > 1 or in_channels != filters * 4: + if strides > 1: + shortcut = layers.AveragePooling2D( + padding="same", name=formatName(prefix, "averagepooling2d_shortcut") + )(out) + else: + shortcut = out + shortcut = HeConv2D( + shortcut, + filters * 4, + kernel_size=1, + name=formatName(prefix, "conv2d_shortcut"), + ) + else: + shortcut = x + + out = HeConv2D(out, filters, kernel_size=1, name=formatName(prefix, "conv2d_01")) + + out = SReLU(name=formatName(prefix, "relu_02"))(out) + out = HeConv2D( + out, + filters, + kernel_size=3, + strides=strides, + name=formatName(prefix, "conv2d_02"), + ) + + out = SReLU(name=formatName(prefix, "relu_03"))(out) + out = HeConv2D( + out, filters * 4, kernel_size=1, name=formatName(prefix, "conv2d_03") + ) + + if stochdepth_rate > 0.0: + out = StochDepth(drop_rate=stochdepth_rate)(out) + + out = SkipInit()(out) + + return out * alpha + shortcut + + +def NFResNet50V1(in_shape=(320, 320, 3), out_classes=2000): + alpha = 0.2 + stochdepth_rate = 0.1 + definition = {"blocks": [3, 4, 6, 3], "filters": [64, 128, 256, 512]} + + num_blocks = sum(definition["blocks"]) + + img_input = layers.Input(shape=in_shape) + + # Root block / "stem" + x = HeConv2D( + img_input, + filters=64, + kernel_size=7, + strides=2, + use_bias=False, + name="root_conv2d_01", + ) + x = layers.ZeroPadding2D(padding=(1, 1), name="root_maxpooling2d_01_pad")(x) + x = layers.MaxPooling2D( + (3, 3), strides=(2, 2), padding="valid", name="root_maxpooling2d_01" + )(x) + + index = 0 + full_index = 0 + expected_std = 1.0 + for stage_depth, block_width in zip(definition["blocks"], definition["filters"]): + for block_index in range(stage_depth): + beta = 1.0 / expected_std + block_stochdepth_rate = stochdepth_rate * full_index / num_blocks + x = NFBlock( + x, + block_width, + alpha, + beta, + strides=2 if (block_index == 0 and index > 0) else 1, + stochdepth_rate=block_stochdepth_rate, + prefix="block%d_cell%d" % (index, block_index), + ) + + if block_index == 0: + expected_std = 1.0 + expected_std = np.sqrt(expected_std ** 2 + alpha ** 2) + full_index += 1 + index += 1 + + # Classification block + x = SReLU(name="predictions_relu")(x) + x = layers.GlobalAveragePooling2D(name="predictions_globalavgpooling")(x) + x = layers.Dropout(0.25)(x) + + x = layers.Dense(out_classes, kernel_initializer="zeros", name="predictions_dense")( + x + ) + x = layers.Activation("sigmoid", name="predictions_sigmoid")(x) + + model = Model(img_input, x, name="NFResNet50V1") + return model diff --git a/Models/ResNet.py b/Models/ResNet.py new file mode 100644 index 0000000..27a2b4c --- /dev/null +++ b/Models/ResNet.py @@ -0,0 +1,151 @@ +from tensorflow.keras import layers, regularizers +from tensorflow.keras.models import Model + + +def formatName(prefix, name): + return ( + ("%s_%s" % (prefix, name)) if prefix is not None and name is not None else None + ) + + +def HeConv2D(x, filters=64, kernel_size=(3, 3), strides=(1, 1), name=None): + if kernel_size >= 3: + padding_name = ("%s_padding" % name) if name is not None else name + x = layers.ZeroPadding2D( + padding=(int(kernel_size / 2), int(kernel_size / 2)), name=padding_name + )(x) + x = layers.Conv2D( + filters, + kernel_size, + strides, + padding="valid", + use_bias=False, + kernel_initializer="he_normal", + kernel_regularizer=regularizers.l2(0.00005), + name=name, + )(x) + return x + + +def ResBlockV2(x, filters=64, first=False, prefix=None): + x1 = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name=formatName(prefix, "batchnorm_01") + )(x) + x1 = layers.ReLU(name=formatName(prefix, "relu_01"))(x1) + + x2 = x1 + + x1 = HeConv2D(x1, filters, kernel_size=1, name=formatName(prefix, "conv2d_01")) + + x1 = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name=formatName(prefix, "batchnorm_02") + )(x1) + x1 = layers.ReLU(name=formatName(prefix, "relu_02"))(x1) + x1 = HeConv2D(x1, filters, kernel_size=3, name=formatName(prefix, "conv2d_02")) + + x1 = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name=formatName(prefix, "batchnorm_03") + )(x1) + x1 = layers.ReLU(name=formatName(prefix, "relu_03"))(x1) + x1 = HeConv2D(x1, filters * 4, kernel_size=1, name=formatName(prefix, "conv2d_03")) + + if first: + x2 = HeConv2D( + x2, filters * 4, kernel_size=1, name=formatName(prefix, "conv2d_shortcut") + ) + x = x2 + x1 + else: + x = x + x1 + return x + + +def DownBlockV2(x, filters=64, prefix=None): + x = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name=formatName(prefix, "batchnorm_01") + )(x) + x = layers.ReLU(name=formatName(prefix, "relu_01"))(x) + + x2 = x + + x1 = HeConv2D(x, filters, kernel_size=1, name=formatName(prefix, "conv2d_01")) + + x1 = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name=formatName(prefix, "batchnorm_02") + )(x1) + x1 = layers.ReLU(name=formatName(prefix, "relu_02"))(x1) + x1 = HeConv2D( + x1, filters, kernel_size=3, strides=2, name=formatName(prefix, "conv2d_02") + ) + + x1 = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name=formatName(prefix, "batchnorm_03") + )(x1) + x1 = layers.ReLU(name=formatName(prefix, "relu_03"))(x1) + x1 = HeConv2D(x1, filters * 4, kernel_size=1, name=formatName(prefix, "conv2d_03")) + + x2 = layers.AveragePooling2D( + padding="same", name=formatName(prefix, "averagepooling2d_shortcut") + )(x2) + x2 = HeConv2D( + x2, filters * 4, kernel_size=1, name=formatName(prefix, "conv2d_shortcut") + ) + + x = x2 + x1 + return x + + +def ResNet50V4(in_shape=(320, 320, 3), out_classes=2000): + img_input = layers.Input(shape=in_shape) + + # Root block / "stem" + x = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name="root_batchnorm_01" + )(img_input) + x = HeConv2D(x, filters=64, kernel_size=7, strides=2, name="root_conv2d_01") + x = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name="root_batchnorm_02" + )(x) + x = layers.ReLU(name="root_relu_01")(x) + x = layers.ZeroPadding2D(padding=(1, 1), name="root_maxpooling2d_01_pad")(x) + x = layers.MaxPooling2D( + (3, 3), strides=(2, 2), padding="valid", name="root_maxpooling2d_01" + )(x) + + # Block 1 + x = ResBlockV2(x, filters=64, first=True, prefix="block01_cell01") + x = ResBlockV2(x, filters=64, prefix="block01_cell02") + x = ResBlockV2(x, filters=64, prefix="block01_cell03") + + # Block 2 + x = DownBlockV2(x, filters=128, prefix="block02_cell01") + x = ResBlockV2(x, filters=128, prefix="block02_cell02") + x = ResBlockV2(x, filters=128, prefix="block02_cell03") + x = ResBlockV2(x, filters=128, prefix="block02_cell04") + + # Block 3 + x = DownBlockV2(x, filters=256, prefix="block03_cell01") + x = ResBlockV2(x, filters=256, prefix="block03_cell02") + x = ResBlockV2(x, filters=256, prefix="block03_cell03") + x = ResBlockV2(x, filters=256, prefix="block03_cell04") + x = ResBlockV2(x, filters=256, prefix="block03_cell05") + x = ResBlockV2(x, filters=256, prefix="block03_cell06") + + # Block 4 + x = DownBlockV2(x, filters=512, prefix="block04_cell01") + x = ResBlockV2(x, filters=512, prefix="block04_cell02") + x = ResBlockV2(x, filters=512, prefix="block04_cell03") + + # Classification block + x = layers.BatchNormalization( + momentum=0.9, epsilon=0.00001, axis=3, name="predictions_batchnorm" + )(x) + x = layers.ReLU(name="predictions_relu")(x) + x = layers.GlobalAveragePooling2D(name="predictions_globalavgpooling")(x) + + x = layers.Dense( + out_classes, kernel_initializer="he_normal", name="predictions_dense" + )(x) + x = layers.Activation("sigmoid", name="predictions_sigmoid")(x) + + model = Model(img_input, x, name="ResNet50V4") + return model diff --git a/SFW_cleanup/calcstats.py b/SFW_cleanup/calcstats.py new file mode 100644 index 0000000..86b73dc --- /dev/null +++ b/SFW_cleanup/calcstats.py @@ -0,0 +1,44 @@ +import sqlite3 + +import numpy as np +import pandas as pd + +all_files = open("danboorufiles.txt", "r").readlines() +top_tags = pd.read_csv("purged.csv") +top_counts = list(top_tags["count"]) +top_ids = list(top_tags["tag_id"]) + +db = sqlite3.connect(r"F:\MLArchives\danbooru2020\danbooru2020.db") +db_cursor = db.cursor() + +count = 0 +totalTags = 0 +tagCounts = dict.fromkeys(top_ids, 0) +for img in all_files: + img_id = int(img.rsplit("/", 1)[1].rsplit(".", 1)[0]) + query = "SELECT tag_id FROM imageTags WHERE image_id = ?" + db_cursor.execute(query, (img_id,)) + tags = db_cursor.fetchall() + tags = [tag_id[0] for tag_id in tags] + top_labels = np.intersect1d(top_ids, tags) + totalTags += len(top_labels) + count += 1 + for elem in top_labels: + tagCounts[elem] += 1 + +db.close() + +tagRatios = dict.fromkeys(top_ids, 0) +ratio = len(all_files) / 4226544 +for top_id, top_count in zip(top_ids, top_counts): + tagRatios[top_id] = int(top_count * ratio) + +stuff = sorted(tagCounts, key=lambda x: tagCounts[x], reverse=True) +for elem in stuff: + if tagCounts[elem] < tagRatios[elem] * 0.5 or tagCounts[elem] < 600: + top_tags = top_tags[top_tags.tag_id != elem] + print(elem, tagCounts[elem], tagRatios[elem]) + +top_tags.to_csv("purged.csv", index=False) + +print(totalTags / count) diff --git a/SFW_cleanup/notes.txt b/SFW_cleanup/notes.txt new file mode 100644 index 0000000..9f79f46 --- /dev/null +++ b/SFW_cleanup/notes.txt @@ -0,0 +1,7 @@ +Query: +--- +SELECT * FROM tags +WHERE category IN (0, 3, 4) +AND count >= 200 +ORDER BY count DESC +--- diff --git a/SFW_cleanup/whittle.py b/SFW_cleanup/whittle.py new file mode 100644 index 0000000..0ff3216 --- /dev/null +++ b/SFW_cleanup/whittle.py @@ -0,0 +1,27 @@ +import sqlite3 + +import numpy as np +import pandas as pd + +all_files = open("danboorufiles.txt", "r").readlines() +top_labels = list(pd.read_csv("purged.csv")["tag_id"]) + +db = sqlite3.connect(r"F:\MLArchives\danbooru2020\danbooru2020.db") +db_cursor = db.cursor() + +accepted = [] +for img in all_files: + img_id = int(img.rsplit("/", 1)[1].replace(".jpg", "")) + query = "SELECT tag_id FROM imageTags WHERE image_id = ?" + db_cursor.execute(query, (img_id,)) + tags = db_cursor.fetchall() + tags = [tag_id[0] for tag_id in tags] + top_tags = np.intersect1d(top_labels, tags) + if len(top_tags) >= 15: + accepted.append(img) + +db.close() + +with open("danboorufiles.txt", "w") as f: + for line in accepted: + f.write(line) diff --git a/Utils/agc.py b/Utils/agc.py new file mode 100644 index 0000000..02c0ffb --- /dev/null +++ b/Utils/agc.py @@ -0,0 +1,51 @@ +""" An implementation of Adaptive Gradient Clipping +@article{brock2021high, + author={Andrew Brock and Soham De and Samuel L. Smith and Karen Simonyan}, + title={High-Performance Large-Scale Image Recognition Without Normalization}, + journal={arXiv preprint arXiv:}, + year={2021} +} +Code references: + * Official JAX implementation (paper authors): https://github.com/deepmind/deepmind-research/tree/master/nfnets + * Ross Wightman's implementation https://github.com/rwightman/pytorch-image-models/blob/master/timm/utils/agc.py +""" + +import tensorflow as tf + + +def compute_norm(x, axis, keepdims): + return tf.math.reduce_sum(x ** 2, axis=axis, keepdims=keepdims) ** 0.5 + + +def unitwise_norm(x): + if len(x.get_shape()) <= 1: # Scalars and vectors + axis = None + keepdims = False + elif len(x.get_shape()) in [2, 3]: # Linear layers of shape IO or multihead linear + axis = 0 + keepdims = True + elif len(x.get_shape()) == 4: # Conv kernels of shape HWIO + axis = [ + 0, + 1, + 2, + ] + keepdims = True + else: + raise ValueError(f"Got a parameter with shape not in [1, 2, 4]! {x}") + return compute_norm(x, axis, keepdims) + + +def adaptive_clip_grad(parameters, gradients, clip_factor=0.01, eps=1e-3): + new_grads = [] + for (params, grads) in zip(parameters, gradients): + if "predictions_dense" not in params.name: + p_norm = unitwise_norm(params) + max_norm = tf.math.maximum(p_norm, eps) * clip_factor + grad_norm = unitwise_norm(grads) + clipped_grad = grads * (max_norm / tf.math.maximum(grad_norm, 1e-6)) + new_grad = tf.where(grad_norm < max_norm, grads, clipped_grad) + else: + new_grad = grads + new_grads.append(new_grad) + return new_grads diff --git a/Utils/dbimutils.py b/Utils/dbimutils.py new file mode 100644 index 0000000..332b25b --- /dev/null +++ b/Utils/dbimutils.py @@ -0,0 +1,54 @@ +# DanBooru IMage Utility functions + +import cv2 +import numpy as np +from PIL import Image + + +def smart_imread(img, flag=cv2.IMREAD_UNCHANGED): + if img.endswith(".gif"): + img = Image.open(img) + img = img.convert("RGB") + img = cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR) + else: + img = cv2.imread(img, flag) + return img + + +def smart_24bit(img): + if img.dtype is np.dtype(np.uint16): + img = (img / 257).astype(np.uint8) + + if len(img.shape) == 2: + img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) + elif img.shape[2] == 4: + trans_mask = img[:, :, 3] == 0 + img[trans_mask] = [255, 255, 255, 255] + img = cv2.cvtColor(img, cv2.COLOR_BGRA2BGR) + return img + + +def make_square(img, target_size): + old_size = img.shape[:2] + desired_size = max(old_size) + desired_size = max(desired_size, target_size) + + delta_w = desired_size - old_size[1] + delta_h = desired_size - old_size[0] + top, bottom = delta_h // 2, delta_h - (delta_h // 2) + left, right = delta_w // 2, delta_w - (delta_w // 2) + + color = [255, 255, 255] + new_im = cv2.copyMakeBorder( + img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color + ) + return new_im + + +def smart_resize(img, size): + # Assumes the image has already gone through make_square + if img.shape[0] > size: + img = cv2.resize(img, (size, size), interpolation=cv2.INTER_AREA) + elif img.shape[0] < size: + img = cv2.resize(img, (size, size), interpolation=cv2.INTER_CUBIC) + return img diff --git a/Utils/mixup.py b/Utils/mixup.py new file mode 100644 index 0000000..61e8a21 --- /dev/null +++ b/Utils/mixup.py @@ -0,0 +1,44 @@ +import tensorflow as tf + + +def sample_beta_distribution(size, concentration_0=0.2, concentration_1=0.2): + gamma_1_sample = tf.random.gamma(shape=[size], alpha=concentration_1) + gamma_2_sample = tf.random.gamma(shape=[size], alpha=concentration_0) + return gamma_1_sample / (gamma_1_sample + gamma_2_sample) + + +def mix_up(ds_one, ds_two, alpha=0.2): + # Unpack two datasets + images_one, labels_one = ds_one + images_two, labels_two = ds_two + batch_size = tf.shape(images_one)[0] + + # Sample lambda and reshape it to do the mixup + l = sample_beta_distribution(batch_size, alpha, alpha) + x_l = tf.reshape(l, (batch_size, 1, 1, 1)) + y_l = tf.reshape(l, (batch_size, 1)) + + # Perform mixup on both images and labels by combining a pair of images/labels + # (one from each dataset) into one image/label + images = images_one * x_l + images_two * (1 - x_l) + labels = labels_one * y_l + labels_two * (1 - y_l) + return (images, labels) + + +def mix_up_single(images, labels, alpha=0.2): + # Unpack one dataset, generate a second by reversing the input one on the batch axis + images_one, labels_one = images, labels + images_two = tf.reverse(images_one, axis=[0]) + labels_two = tf.reverse(labels_one, axis=[0]) + batch_size = tf.shape(images_one)[0] + + # Sample lambda and reshape it to do the mixup + l = sample_beta_distribution(batch_size, alpha, alpha) + x_l = tf.reshape(l, (batch_size, 1, 1, 1)) + y_l = tf.reshape(l, (batch_size, 1)) + + # Perform mixup on both images and labels by combining a pair of images/labels + # (one from each dataset) into one image/label + images = images_one * x_l + images_two * (1 - x_l) + labels = labels_one * y_l + labels_two * (1 - y_l) + return (images, labels) diff --git a/analyze_metrics.py b/analyze_metrics.py new file mode 100644 index 0000000..dce109f --- /dev/null +++ b/analyze_metrics.py @@ -0,0 +1,98 @@ +import numpy as np +from matplotlib import pyplot as plt + + +def find_factor(img_tags, img_probs, start=0.3, end=0.9, points=6001): + prec = [] + rec = [] + f1s = [] + f2s = [] + clip_points = [] + first_time = False + yz = (img_tags > 0).astype(np.uint) + for x in np.linspace(start, end, points): + pos = (img_probs > x).astype(np.uint) + pct = pos + 2 * yz + + TN = np.sum(pct == 0).astype(np.float32) + FP = np.sum(pct == 1).astype(np.float32) + FN = np.sum(pct == 2).astype(np.float32) + TP = np.sum(pct == 3).astype(np.float32) + + recall = TP / (TP + FN) + precision = TP / (TP + FP) + accuracy = (TP + TN) / (TP + TN + FP + FN) + + if precision >= recall and first_time == False: + factor = round(x, 4) + break + print(x) + first_time = True + + F1 = 2 * (precision * recall) / (precision + recall) + F2 = 5 * (precision * recall) / ((4 * precision) + recall) + + clip_points.append(x) + prec.append(precision) + rec.append(recall) + f1s.append(F1) + f2s.append(F2) + + # plt.figure(figsize=(1920 / 96, 1080 / 96), dpi=96) + # plt.plot(clip_points, prec, label="precision") + # plt.plot(clip_points, rec, label="recall") + # plt.plot(clip_points, f1s, label="F1") + # plt.plot(clip_points, f2s, label="F2") + # plt.legend() + # plt.show() + return factor + + +img_probs = np.load("tags_probs_NFNetL1V1-100-0.57141.npy") +img_tags = np.load("2020_0000_0599/encoded_tags_test.npy") + +""" +factor_end = 0.9 +factor_start = 0.3 +points = factor_end * 100 - factor_start * 100 + 1 +factor = find_factor(img_tags, img_probs, factor_start, factor_end, int(points)) +factor_start_step = factor * 100 - 1 +factor_start = factor_start_step / 100 +points = factor_end * 1000 - factor_start * 1000 + 1 +factor = find_factor(img_tags, img_probs, factor_start, factor_end, int(points)) +factor_start_step = factor * 1000 - 1 +factor_start = factor_start_step / 1000 +points = factor_end * 10000 - factor_start * 10000 + 1 +factor = find_factor(img_tags, img_probs, factor_start, factor_end, int(points)) +""" + +factor_L1V1_100L = 0.3485 +factor = factor_L1V1_100L +pos = (img_probs > factor).astype(np.uint) +yz = (img_tags > 0).astype(np.uint) +pct = pos + 2 * yz + +TN = np.sum(pct == 0).astype(np.float32) +FP = np.sum(pct == 1).astype(np.float32) +FN = np.sum(pct == 2).astype(np.float32) +TP = np.sum(pct == 3).astype(np.float32) + +recall = TP / (TP + FN) +precision = TP / (TP + FP) +accuracy = (TP + TN) / (TP + TN + FP + FN) + +F1 = 2 * (precision * recall) / (precision + recall) +F2 = 5 * (precision * recall) / ((4 * precision) + recall) + +MCC = ((TP * TN) - (FP * FN)) / np.sqrt((TP + FP) * (TP + FN) * (TN + FP) * (TN + FN)) + +d = { + "thres": factor, + "F1": round(F1, 4), + "F2": round(F2, 4), + "MCC": round(MCC, 4), + "A": round(accuracy, 4), + "R": round(recall, 4), + "P": round(precision, 4), +} +print(d) diff --git a/analyze_metrics_top.py b/analyze_metrics_top.py new file mode 100644 index 0000000..f1463db --- /dev/null +++ b/analyze_metrics_top.py @@ -0,0 +1,101 @@ +import numpy as np +from matplotlib import pyplot as plt + + +def find_factor(img_tags, img_probs, start=0.3, end=0.9, points=6001): + prec = [] + rec = [] + f1s = [] + f2s = [] + clip_points = [] + first_time = False + yz = (img_tags > 0).astype(np.uint) + for x in np.linspace(start, end, points): + pos = (img_probs > x).astype(np.uint) + pct = pos + 2 * yz + + TN = np.sum(pct == 0).astype(np.float32) + FP = np.sum(pct == 1).astype(np.float32) + FN = np.sum(pct == 2).astype(np.float32) + TP = np.sum(pct == 3).astype(np.float32) + + recall = TP / (TP + FN) + precision = TP / (TP + FP) + accuracy = (TP + TN) / (TP + TN + FP + FN) + + if precision >= recall and first_time == False: + factor = round(x, 4) + break + print(x) + first_time = True + + F1 = 2 * (precision * recall) / (precision + recall) + F2 = 5 * (precision * recall) / ((4 * precision) + recall) + + clip_points.append(x) + prec.append(precision) + rec.append(recall) + f1s.append(F1) + f2s.append(F2) + + # plt.figure(figsize=(1920/96, 1080/96), dpi=96) + # plt.plot(clip_points, prec, label='precision') + # plt.plot(clip_points, rec, label='recall') + # plt.plot(clip_points, f1s, label='F1') + # plt.plot(clip_points, f2s, label='F2') + # plt.legend() + # plt.show() + return factor + + +img_probs = np.load("tags_probs_NFNetL1V1-100-0.57141.npy") +img_tags = np.load("2020_0000_0599/encoded_tags_test.npy") + +img_probs = img_probs[:, 1092:] +img_tags = img_tags[:, 1092:] + +""" +factor_end = 0.9 +factor_start = 0.3 +points = factor_end*100 - factor_start*100 + 1 +factor = find_factor(img_tags, img_probs, factor_start, factor_end, int(points)) +factor_start_step = factor * 100 - 1 +factor_start = factor_start_step / 100 +points = factor_end*1000 - factor_start*1000 + 1 +factor = find_factor(img_tags, img_probs, factor_start, factor_end, int(points)) +factor_start_step = factor * 1000 - 1 +factor_start = factor_start_step / 1000 +points = factor_end*10000 - factor_start*10000 + 1 +factor = find_factor(img_tags, img_probs, factor_start, factor_end, int(points)) +""" + +factor_L1V1_100L = 0.3485 +factor = factor_L1V1_100L +pos = (img_probs > factor).astype(np.uint) +yz = (img_tags > 0).astype(np.uint) +pct = pos + 2 * yz + +TN = np.sum(pct == 0).astype(np.float32) +FP = np.sum(pct == 1).astype(np.float32) +FN = np.sum(pct == 2).astype(np.float32) +TP = np.sum(pct == 3).astype(np.float32) + +recall = TP / (TP + FN) +precision = TP / (TP + FP) +accuracy = (TP + TN) / (TP + TN + FP + FN) + +F1 = 2 * (precision * recall) / (precision + recall) +F2 = 5 * (precision * recall) / ((4 * precision) + recall) + +MCC = ((TP * TN) - (FP * FN)) / np.sqrt((TP + FP) * (TP + FN) * (TN + FP) * (TN + FN)) + +d = { + "thres": factor, + "F1": round(F1, 4), + "F2": round(F2, 4), + "MCC": round(MCC, 4), + "A": round(accuracy, 4), + "R": round(recall, 4), + "P": round(precision, 4), +} +print(d) diff --git a/bench_res_onnx.py b/bench_res_onnx.py new file mode 100644 index 0000000..6535b09 --- /dev/null +++ b/bench_res_onnx.py @@ -0,0 +1,21 @@ +from time import perf_counter + +import numpy as np +import onnxruntime as rt + +dim = 320 +model = rt.InferenceSession("networks/NFNetL1V1-100-0.57141.onnx") + +img = np.random.rand(1, dim, dim, 3).astype(np.float32) + +input_name = model.get_inputs()[0].name +label_name = model.get_outputs()[0].name +probs = model.run([label_name], {input_name: img})[0] + +runs = 100 +times = [] +for _ in range(runs): + start = perf_counter() + _ = model.run([label_name], {input_name: img}) + times.append(perf_counter() - start) +print((sum(times) * 1000) / runs) diff --git a/calc_stats.py b/calc_stats.py new file mode 100644 index 0000000..f778086 --- /dev/null +++ b/calc_stats.py @@ -0,0 +1,45 @@ +import sqlite3 + +import numpy as np +import pandas as pd + +all_files = open("2020_0000_0599/trainlist.txt", "r").readlines() +top_tags = pd.read_csv("2020_0000_0599/selected_tags.csv") +top_counts = list(top_tags["count"]) +top_ids = list(top_tags["tag_id"]) + +db = sqlite3.connect(r"F:\MLArchives\danbooru2020\danbooru2020.db") +db_cursor = db.cursor() + +count = 0 +totalTags = 0 +tagCounts = dict.fromkeys(top_ids, 0) +query = "SELECT tag_id FROM imageTags WHERE image_id = ?" +for img in all_files: + img_id = int(img.rsplit("/", 1)[1].rsplit(".", 1)[0]) + db_cursor.execute(query, (img_id,)) + tags = db_cursor.fetchall() + tags = [tag_id[0] for tag_id in tags] + top_tags = np.intersect1d(top_ids, tags) + totalTags += len(top_tags) + count += 1 + for elem in top_tags: + tagCounts[elem] += 1 + +db.close() + +tagRatios = dict.fromkeys(top_ids, 0) +ratio = len(all_files) / 4226544 +for top_id, top_count in zip(top_ids, top_counts): + tagRatios[top_id] = int(top_count * ratio) + +stuff = sorted(tagCounts, key=lambda x: tagCounts[x], reverse=True) +for elem in stuff: + print( + elem, + tagCounts[elem], + tagRatios[elem], + "delete" if tagCounts[elem] < tagRatios[elem] * 0.5 else "", + ) + +print(totalTags / count) diff --git a/check_images.py b/check_images.py new file mode 100644 index 0000000..46f642e --- /dev/null +++ b/check_images.py @@ -0,0 +1,21 @@ +# Check that training images aren't truncated or damaged +# Log the ones that are to a text file + +from PIL import Image + +images_list = open("2020_0000_0599/trainlist.txt").readlines() +images_list = [x.rstrip() for x in images_list] + +for filename in images_list: + try: + img = Image.open(filename) # open the image file + img.verify() # verify that it is a good image, without decoding it.. quite fast + img.close() + img = Image.open(filename) # open the image file + img.transpose( + Image.FLIP_LEFT_RIGHT + ) # apply a simple transform to trigger all the other checks + img.close() + except Exception as e: + with open("error_images.log", "w") as outfile: + outfile.write("%s: %s\n" % (filename, str(e))) diff --git a/collect_tag_images.py b/collect_tag_images.py new file mode 100644 index 0000000..70eaeb6 --- /dev/null +++ b/collect_tag_images.py @@ -0,0 +1,18 @@ +import numpy as np +import pandas as pd + +threshold = 0.3485 +tag_name = "virtual_youtuber" +path_prefix = r"D:\Images\danbooru2020\original" + +files = [x.rstrip() for x in open("2020_0000_0599/origlist.txt").readlines()] +arr = np.load("2020_0000_0599/encoded_tags_test.npy", allow_pickle=True) +df = pd.read_csv("2020_0000_0599/selected_tags.csv") + +index = np.where(df["name"] == tag_name)[0][0] + +images_indexes = np.where(arr[:, index] > threshold) +image_paths = [files[x] for x in images_indexes[0]] + +for partial_path in image_paths: + print('cp "%s\\%s" test' % (path_prefix, partial_path.replace("/", "\\"))) diff --git a/encode_tags.py b/encode_tags.py new file mode 100644 index 0000000..358cf90 --- /dev/null +++ b/encode_tags.py @@ -0,0 +1,29 @@ +import numpy as np +import pandas as pd +import sqlite3 + +from tqdm import tqdm + +df = pd.read_csv("2020_0000_0599/selected_tags.csv") +labels = df["tag_id"].tolist() + +db = sqlite3.connect(r"F:\MLArchives\danbooru2020\danbooru2020.db") +db_cursor = db.cursor() + +images_list = open("2020_0000_0599/testlist.txt").readlines() +img_ids = [int(image.rsplit("/", 1)[1].rsplit(".", 1)[0]) for image in images_list] + +query = "SELECT tag_id FROM imageTags WHERE image_id = ?" +img_tags = np.empty((len(img_ids), len(labels)), dtype=np.uint8) +for index, img_id in enumerate(tqdm(img_ids)): + db_cursor.execute(query, (img_id,)) + tags = db_cursor.fetchall() + tags = [tag_id[0] for tag_id in tags] + if len(tags) == 0: + print("%s: found 0 tags" % image) + continue + encoded = np.isin(labels, tags).astype(np.uint8) + img_tags[index] = encoded + +db.close() +np.save("2020_0000_0599/encoded_tags_test.npy", img_tags) diff --git a/gen_hydrus.py b/gen_hydrus.py new file mode 100644 index 0000000..ba0e32c --- /dev/null +++ b/gen_hydrus.py @@ -0,0 +1,64 @@ +import os + +import cv2 +import numpy as np +import pandas as pd + +use_GPU = True +if use_GPU == False: + os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 + os.environ["CUDA_VISIBLE_DEVICES"] = "-1" + +os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1" + +import tensorflow as tf + +from Generator.UpscalePred import DataGenerator +from Models.NFNet import NFNetV1 + +gpus = tf.config.experimental.list_physical_devices("GPU") +if gpus: + try: + for gpu in gpus: + tf.config.experimental.set_memory_growth(gpu, True) + except RuntimeError as e: + print(e) + +dim = 320 +thresh = 0.3485 +images_folder = r"C:\images" + +label_names = pd.read_csv("2020_0000_0599/selected_tags.csv") + +model = NFNetV1( + in_shape=(dim, dim, 3), out_classes=2380, definition_name="L1", use_eca=False +) +model.load_weights("networks/NFNetL1V1-100-0.57141/variables/variables") +model.trainable = False + +images_list = [] +for r, d, f in os.walk(images_folder): + for file in f: + if file.endswith(".png") or file.endswith(".jpg") or file.endswith(".jpeg"): + images_list.append(os.path.join(r, file)) + +generator = DataGenerator(images_list, batch_size=4, dim=(dim, dim)) + +probs = model.predict(generator, verbose=1, use_multiprocessing=False, workers=7) + +# Surely there must be a better way - and don't call me Shirley +indexes = [np.where(probs[x, :] > thresh)[0] for x in range(probs.shape[0])] + +for image, index_list in zip(images_list, indexes): + labels_list = [] + extracted = label_names.iloc[index_list][["name", "category"]] + for index, pair in extracted.iterrows(): + if pair["category"] == 0: + labels_list.append(pair["name"]) + # elif pair['category'] == 3: + # labels_list.append('series:%s' % pair['name']) + # elif pair['category'] == 4: + # labels_list.append('character:%s' % pair['name']) + labels_list = "\n".join(labels_list) + with open("%s.txt" % image, "w") as f: + f.writelines(labels_list) diff --git a/img_stats.py b/img_stats.py new file mode 100644 index 0000000..b780a5e --- /dev/null +++ b/img_stats.py @@ -0,0 +1,33 @@ +import pathlib + +import cv2 +import numpy as np + +from Utils import dbimutils + + +def representative_dataset_gen(): + dim = 320 + images = open("2020_0000_0599/origlist.txt", "r").readlines() + images = [r"D:\Images\danbooru2020\original\%s" % x.rstrip() for x in images] + for i in range(8375): + target_img = images[i] + img = dbimutils.smart_imread(target_img) + img = dbimutils.smart_24bit(img) + img = dbimutils.make_square(img, dim) + img = dbimutils.smart_resize(img, dim) + img = img.astype(np.float64) + yield img + + +means = np.zeros((3,)) +stds = np.zeros((3,)) +img_gen = representative_dataset_gen() +for img in img_gen: + means += img.mean(axis=(0, 1)) + stds += img.std(axis=(0, 1)) +print(np.around((means / 8375) / 255, 3), np.around((stds / 8375) / 255, 3)) + +# output: +# mean = np.array([0.747, 0.752, 0.792]) +# std = np.array([0.262, 0.265, 0.249]) diff --git a/make_report.py b/make_report.py new file mode 100644 index 0000000..b331bf5 --- /dev/null +++ b/make_report.py @@ -0,0 +1,40 @@ +import numpy as np +import pandas as pd + +tags = pd.read_csv("2020_0000_0599/selected_tags.csv") +label = np.load("2020_0000_0599/encoded_tags_test.npy") + +factor = 0.3485 +eps = np.finfo(np.float32).eps +preds = np.load("tags_probs_NFNetL1V1-100-0.57141.npy") +preds = (preds > factor).astype(np.uint8) + +actual_freq = np.sum(label, axis=0) / label.shape[0] +predicted_freq = np.sum(preds, axis=0) / label.shape[0] + +pct = preds + 2 * label +TN = np.sum(pct == 0, axis=0).astype(np.float32) +FP = np.sum(pct == 1, axis=0).astype(np.float32) +FN = np.sum(pct == 2, axis=0).astype(np.float32) +TP = np.sum(pct == 3, axis=0).astype(np.float32) + +recall = TP / (TP + FN) +precision = TP / ((TP + FP) + eps) + +F1 = 2 * (precision * recall) / ((precision + recall) + eps) + +df = pd.DataFrame() +df["Tag"] = tags["name"] +df["Category"] = tags["category"] +df["Actual_posts"] = np.sum(label, axis=0) +df["Predicted_posts"] = np.sum(preds, axis=0) +df["Correct_predictions"] = TP.astype(np.uint) +df["Actual_frequency"] = actual_freq +df["Predicted_frequency"] = predicted_freq +df["Precision"] = precision +df["Recall"] = recall +df["F_score"] = F1 + +df = df.sort_values("Actual_posts", ascending=False) +df.to_csv("report.csv", index=False, float_format="%.02f") +df.to_html("report.html", index=False, float_format="%.02f") diff --git a/notes_temp_misc.txt b/notes_temp_misc.txt new file mode 100644 index 0000000..930f992 --- /dev/null +++ b/notes_temp_misc.txt @@ -0,0 +1,29 @@ +All classes, std thresh: +NFNetL1V1-100-0.57141: {'thres': 0.3485, 'F1': 0.6133, 'F2': 0.6133, 'MCC': 0.6094, 'A': 0.9923, 'R': 0.6133, 'P': 0.6133} +NFResNet50V1-50-0.58250: {'thres': 0.3385, 'F1': 0.5941, 'F2': 0.5940, 'MCC': 0.5900, 'A': 0.9919, 'R': 0.5940, 'P': 0.5942} +ResNet50V4-30-0.60595: {'thres': 0.3520, 'F1': 0.5934, 'F2': 0.5934, 'MCC': 0.5893, 'A': 0.9919, 'R': 0.5934, 'P': 0.5934} +NFNetL0V1-50-0.58678: {'thres': 0.3395, 'F1': 0.5923, 'F2': 0.5922, 'MCC': 0.5882, 'A': 0.9919, 'R': 0.5922, 'P': 0.5924} + +Bottom classes, std thresh: +NFNetL1V1-100-0.57141: {'thres': 0.3485, 'F1': 0.4437, 'F2': 0.3764, 'MCC': 0.4642, 'A': 0.9986, 'R': 0.3419, 'P': 0.6319} +NFResNet50V1-50-0.58250: {'thres': 0.3385, 'F1': 0.4086, 'F2': 0.3391, 'MCC': 0.4341, 'A': 0.9986, 'R': 0.3046, 'P': 0.6203} +ResNet50V4-30-0.60595: {'thres': 0.3520, 'F1': 0.4057, 'F2': 0.3483, 'MCC': 0.4212, 'A': 0.9985, 'R': 0.3183, 'P': 0.5591} +NFNetL0V1-50-0.58678: {'thres': 0.3395, 'F1': 0.3979, 'F2': 0.3283, 'MCC': 0.4247, 'A': 0.9985, 'R': 0.2940, 'P': 0.6151} + +All classes, P >= 0.75: +NFNetL1V1-100-0.57141: {'thres': 0.4351, 'F1': 0.5855, 'F2': 0.5174, 'MCC': 0.5970, 'A': 0.9932, 'R': 0.4801, 'P': 0.7501} +NFResNet50V1-50-0.58250: {'thres': 0.4262, 'F1': 0.5614, 'F2': 0.4878, 'MCC': 0.5770, 'A': 0.9930, 'R': 0.4486, 'P': 0.7500} +ResNet50V4-30-0.60595: {'thres': 0.4465, 'F1': 0.5581, 'F2': 0.4838, 'MCC': 0.5742, 'A': 0.9930, 'R': 0.4444, 'P': 0.7500} +NFNetL0V1-50-0.58678: {'thres': 0.4273, 'F1': 0.5599, 'F2': 0.4860, 'MCC': 0.5757, 'A': 0.9930, 'R': 0.4466, 'P': 0.7501} + +Bottom classes, P >= 0.75: +NFNetL1V1-100-0.57141: {'thres': 0.4159, 'F1': 0.4071, 'F2': 0.3195, 'MCC': 0.4573, 'A': 0.9987, 'R': 0.2794, 'P': 0.7500} +NFResNet50V1-50-0.58250: {'thres': 0.4062, 'F1': 0.3689, 'F2': 0.2827, 'MCC': 0.4279, 'A': 0.9986, 'R': 0.2446, 'P': 0.7502} +ResNet50V4-30-0.60595: {'thres': 0.4583, 'F1': 0.3527, 'F2': 0.2676, 'MCC': 0.4154, 'A': 0.9986, 'R': 0.2305, 'P': 0.7502} +NFNetL0V1-50-0.58678: {'thres': 0.4059, 'F1': 0.3577, 'F2': 0.2722, 'MCC': 0.4192, 'A': 0.9986, 'R': 0.2348, 'P': 0.7500} + +All classes, max F2: +NFNetL1V1-100-0.57141: {'thres': 0.2550, 'F1': 0.5541, 'F2': 0.6572, 'MCC': 0.5687, 'A': 0.9880, 'R': 0.7503, 'P': 0.4392} +NFResNet50V1-50-0.58250: {'thres': 0.2563, 'F1': 0.5366, 'F2': 0.6363, 'MCC': 0.5503, 'A': 0.9876, 'R': 0.7261, 'P': 0.4256} +ResNet50V4-30-0.60595: {'thres': 0.2640, 'F1': 0.5342, 'F2': 0.6360, 'MCC': 0.5487, 'A': 0.9874, 'R': 0.7285, 'P': 0.4218} +NFNetL0V1-50-0.58678: {'thres': 0.2561, 'F1': 0.5329, 'F2': 0.6343, 'MCC': 0.5473, 'A': 0.9874, 'R': 0.7264, 'P': 0.4209} diff --git a/notes_temp_train.txt b/notes_temp_train.txt new file mode 100644 index 0000000..99758ca --- /dev/null +++ b/notes_temp_train.txt @@ -0,0 +1,90 @@ +NFResNet50V1: +01: {'thres': 0.3060, 'F1': 0.4320, 'F2': 0.4319, 'MCC': 0.4263, 'A': 0.9887, 'R': 0.4318, 'P': 0.4322} +02: {'thres': 0.3008, 'F1': 0.4750, 'F2': 0.4750, 'MCC': 0.4697, 'A': 0.9896, 'R': 0.4750, 'P': 0.4750} +03: {'thres': 0.3043, 'F1': 0.4986, 'F2': 0.4986, 'MCC': 0.4936, 'A': 0.9900, 'R': 0.4986, 'P': 0.4987} +04: {'thres': 0.3153, 'F1': 0.5096, 'F2': 0.5095, 'MCC': 0.5047, 'A': 0.9903, 'R': 0.5094, 'P': 0.5098} +05: {'thres': 0.3115, 'F1': 0.5178, 'F2': 0.5178, 'MCC': 0.5130, 'A': 0.9904, 'R': 0.5177, 'P': 0.5179} +06: {'thres': 0.3226, 'F1': 0.5227, 'F2': 0.5226, 'MCC': 0.5179, 'A': 0.9905, 'R': 0.5225, 'P': 0.5229} +07: {'thres': 0.3201, 'F1': 0.5267, 'F2': 0.5267, 'MCC': 0.5220, 'A': 0.9906, 'R': 0.5267, 'P': 0.5267} +08: {'thres': 0.3319, 'F1': 0.5430, 'F2': 0.5430, 'MCC': 0.5385, 'A': 0.9909, 'R': 0.5429, 'P': 0.5432} +09: {'thres': 0.3312, 'F1': 0.5472, 'F2': 0.5472, 'MCC': 0.5427, 'A': 0.9910, 'R': 0.5471, 'P': 0.5473} +10: {'thres': 0.3344, 'F1': 0.5483, 'F2': 0.5482, 'MCC': 0.5437, 'A': 0.9910, 'R': 0.5481, 'P': 0.5484} +11: {'thres': 0.3309, 'F1': 0.5546, 'F2': 0.5545, 'MCC': 0.5501, 'A': 0.9912, 'R': 0.5545, 'P': 0.5547} +12: {'thres': 0.3335, 'F1': 0.5496, 'F2': 0.5495, 'MCC': 0.5451, 'A': 0.9911, 'R': 0.5494, 'P': 0.5498} +13: {'thres': 0.3354, 'F1': 0.5620, 'F2': 0.5619, 'MCC': 0.5576, 'A': 0.9913, 'R': 0.5618, 'P': 0.5621} +14: {'thres': 0.3442, 'F1': 0.5658, 'F2': 0.5657, 'MCC': 0.5614, 'A': 0.9914, 'R': 0.5656, 'P': 0.5660} +15: {'thres': 0.3375, 'F1': 0.5732, 'F2': 0.5731, 'MCC': 0.5689, 'A': 0.9915, 'R': 0.5731, 'P': 0.5734} +16: {'thres': 0.3454, 'F1': 0.5754, 'F2': 0.5753, 'MCC': 0.5711, 'A': 0.9916, 'R': 0.5753, 'P': 0.5755} +17: {'thres': 0.3490, 'F1': 0.5758, 'F2': 0.5757, 'MCC': 0.5715, 'A': 0.9916, 'R': 0.5757, 'P': 0.5759} +18: {'thres': 0.3493, 'F1': 0.5877, 'F2': 0.5877, 'MCC': 0.5836, 'A': 0.9918, 'R': 0.5877, 'P': 0.5877} +19: {'thres': 0.3499, 'F1': 0.5894, 'F2': 0.5894, 'MCC': 0.5853, 'A': 0.9918, 'R': 0.5893, 'P': 0.5895} +20: {'thres': 0.3491, 'F1': 0.5899, 'F2': 0.5899, 'MCC': 0.5858, 'A': 0.9919, 'R': 0.5899, 'P': 0.5899} +21: {'thres': 0.3491, 'F1': 0.5913, 'F2': 0.5912, 'MCC': 0.5872, 'A': 0.9919, 'R': 0.5912, 'P': 0.5913} +22: {'thres': 0.3510, 'F1': 0.5909, 'F2': 0.5909, 'MCC': 0.5868, 'A': 0.9919, 'R': 0.5909, 'P': 0.5909} +23: {'thres': 0.3483, 'F1': 0.5927, 'F2': 0.5927, 'MCC': 0.5886, 'A': 0.9919, 'R': 0.5927, 'P': 0.5927} +24: {'thres': 0.3495, 'F1': 0.5911, 'F2': 0.5911, 'MCC': 0.5870, 'A': 0.9919, 'R': 0.5911, 'P': 0.5912} +25: {'thres': 0.3520, 'F1': 0.5922, 'F2': 0.5921, 'MCC': 0.5881, 'A': 0.9919, 'R': 0.5921, 'P': 0.5922} +26: {'thres': 0.3512, 'F1': 0.5926, 'F2': 0.5926, 'MCC': 0.5885, 'A': 0.9919, 'R': 0.5926, 'P': 0.5926} +31: {'thres': 0.3274, 'F1': 0.5637, 'F2': 0.5636, 'MCC': 0.5593, 'A': 0.9913, 'R': 0.5635, 'P': 0.5638} +38: {'thres': 0.3272, 'F1': 0.5797, 'F2': 0.5797, 'MCC': 0.5755, 'A': 0.9917, 'R': 0.5796, 'P': 0.5799} +44: {'thres': 0.3363, 'F1': 0.5929, 'F2': 0.5929, 'MCC': 0.5889, 'A': 0.9919, 'R': 0.5929, 'P': 0.5929} +50: {'thres': 0.3385, 'F1': 0.5940, 'F2': 0.5940, 'MCC': 0.5900, 'A': 0.9919, 'R': 0.5940, 'P': 0.5941} + +ResNet50V4: +08: {'thres': 0.3278, 'F1': 0.5366, 'F2': 0.5366, 'MCC': 0.5320, 'A': 0.9908, 'R': 0.5365, 'P': 0.5367} +12: {'thres': 0.3388, 'F1': 0.5433, 'F2': 0.5432, 'MCC': 0.5387, 'A': 0.9909, 'R': 0.5431, 'P': 0.5435} +18: {'thres': 0.3485, 'F1': 0.5716, 'F2': 0.5715, 'MCC': 0.5673, 'A': 0.9915, 'R': 0.5714, 'P': 0.5717} +25: {'thres': 0.3401, 'F1': 0.5768, 'F2': 0.5767, 'MCC': 0.5725, 'A': 0.9916, 'R': 0.5767, 'P': 0.5768} +30: {'thres': 0.3520, 'F1': 0.5933, 'F2': 0.5933, 'MCC': 0.5893, 'A': 0.9919, 'R': 0.5933, 'P': 0.5934} + +NFNetL0V1: +01: {'thres': 0.3037, 'F1': 0.4455, 'F2': 0.4455, 'MCC': 0.4400, 'A': 0.9890, 'R': 0.4455, 'P': 0.4456} +02: {'thres': 0.3047, 'F1': 0.4859, 'F2': 0.4859, 'MCC': 0.4807, 'A': 0.9898, 'R': 0.4859, 'P': 0.4859} +03: {'thres': 0.3213, 'F1': 0.5037, 'F2': 0.5037, 'MCC': 0.4987, 'A': 0.9901, 'R': 0.5036, 'P': 0.5038} +04: {'thres': 0.3111, 'F1': 0.5185, 'F2': 0.5184, 'MCC': 0.5137, 'A': 0.9904, 'R': 0.5183, 'P': 0.5187} +05: {'thres': 0.3197, 'F1': 0.5245, 'F2': 0.5243, 'MCC': 0.5197, 'A': 0.9906, 'R': 0.5243, 'P': 0.5246} +06: {'thres': 0.3161, 'F1': 0.5373, 'F2': 0.5373, 'MCC': 0.5326, 'A': 0.9908, 'R': 0.5373, 'P': 0.5373} +07: {'thres': 0.3224, 'F1': 0.5405, 'F2': 0.5405, 'MCC': 0.5359, 'A': 0.9909, 'R': 0.5405, 'P': 0.5405} +08: {'thres': 0.3271, 'F1': 0.5413, 'F2': 0.5413, 'MCC': 0.5367, 'A': 0.9909, 'R': 0.5413, 'P': 0.5413} +09: {'thres': 0.3270, 'F1': 0.5483, 'F2': 0.5483, 'MCC': 0.5438, 'A': 0.9910, 'R': 0.5483, 'P': 0.5484} +10: {'thres': 0.3251, 'F1': 0.5498, 'F2': 0.5498, 'MCC': 0.5453, 'A': 0.9911, 'R': 0.5498, 'P': 0.5498} +11: {'thres': 0.3303, 'F1': 0.5538, 'F2': 0.5538, 'MCC': 0.5493, 'A': 0.9911, 'R': 0.5538, 'P': 0.5538} +12: {'thres': 0.3290, 'F1': 0.5568, 'F2': 0.5567, 'MCC': 0.5523, 'A': 0.9912, 'R': 0.5567, 'P': 0.5568} +13: {'thres': 0.3292, 'F1': 0.5584, 'F2': 0.5584, 'MCC': 0.5540, 'A': 0.9912, 'R': 0.5583, 'P': 0.5585} +14: {'thres': 0.3285, 'F1': 0.5627, 'F2': 0.5627, 'MCC': 0.5583, 'A': 0.9913, 'R': 0.5627, 'P': 0.5627} +15: {'thres': 0.3305, 'F1': 0.5631, 'F2': 0.5631, 'MCC': 0.5587, 'A': 0.9913, 'R': 0.5631, 'P': 0.5631} +17: {'thres': 0.3345, 'F1': 0.5772, 'F2': 0.5771, 'MCC': 0.5729, 'A': 0.9916, 'R': 0.5771, 'P': 0.5772} +20: {'thres': 0.3357, 'F1': 0.5799, 'F2': 0.5798, 'MCC': 0.5757, 'A': 0.9917, 'R': 0.5798, 'P': 0.5800} +26: {'thres': 0.3392, 'F1': 0.5807, 'F2': 0.5807, 'MCC': 0.5765, 'A': 0.9917, 'R': 0.5806, 'P': 0.5808} +30: {'thres': 0.3367, 'F1': 0.5828, 'F2': 0.5827, 'MCC': 0.5786, 'A': 0.9917, 'R': 0.5827, 'P': 0.5829} +36: {'thres': 0.3298, 'F1': 0.5761, 'F2': 0.5760, 'MCC': 0.5718, 'A': 0.9916, 'R': 0.5760, 'P': 0.5762} +42: {'thres': 0.3396, 'F1': 0.5891, 'F2': 0.5891, 'MCC': 0.5850, 'A': 0.9918, 'R': 0.5890, 'P': 0.5892} +50: {'thres': 0.3395, 'F1': 0.5923, 'F2': 0.5922, 'MCC': 0.5882, 'A': 0.9919, 'R': 0.5922, 'P': 0.5924} + +NFNetL1V1: +04: {'thres': 0.3212, 'F1': 0.5225, 'F2': 0.5224, 'MCC': 0.5177, 'A': 0.9905, 'R': 0.5223, 'P': 0.5226} +08: {'thres': 0.3280, 'F1': 0.5540, 'F2': 0.5540, 'MCC': 0.5495, 'A': 0.9911, 'R': 0.5539, 'P': 0.5541} +12: {'thres': 0.3308, 'F1': 0.5669, 'F2': 0.5669, 'MCC': 0.5625, 'A': 0.9914, 'R': 0.5669, 'P': 0.5669} +15: {'thres': 0.3385, 'F1': 0.5721, 'F2': 0.5720, 'MCC': 0.5678, 'A': 0.9915, 'R': 0.5720, 'P': 0.5722} +19: {'thres': 0.3358, 'F1': 0.5806, 'F2': 0.5805, 'MCC': 0.5764, 'A': 0.9917, 'R': 0.5805, 'P': 0.5806} +24: {'thres': 0.3400, 'F1': 0.5873, 'F2': 0.5873, 'MCC': 0.5832, 'A': 0.9918, 'R': 0.5873, 'P': 0.5873} +30: {'thres': 0.3431, 'F1': 0.5935, 'F2': 0.5935, 'MCC': 0.5895, 'A': 0.9919, 'R': 0.5934, 'P': 0.5936} +33: {'thres': 0.3359, 'F1': 0.5956, 'F2': 0.5956, 'MCC': 0.5916, 'A': 0.9920, 'R': 0.5956, 'P': 0.5956} +36: {'thres': 0.3459, 'F1': 0.6036, 'F2': 0.6036, 'MCC': 0.5996, 'A': 0.9921, 'R': 0.6036, 'P': 0.6036} +41: {'thres': 0.3480, 'F1': 0.6061, 'F2': 0.6060, 'MCC': 0.6021, 'A': 0.9922, 'R': 0.6059, 'P': 0.6062} +45: {'thres': 0.3458, 'F1': 0.6082, 'F2': 0.6082, 'MCC': 0.6043, 'A': 0.9922, 'R': 0.6082, 'P': 0.6083} +48: {'thres': 0.3473, 'F1': 0.6080, 'F2': 0.6080, 'MCC': 0.6041, 'A': 0.9922, 'R': 0.6079, 'P': 0.6081} +52: {'thres': 0.3485, 'F1': 0.6081, 'F2': 0.6081, 'MCC': 0.6042, 'A': 0.9922, 'R': 0.6080, 'P': 0.6082} +58: {'thres': 0.3474, 'F1': 0.6097, 'F2': 0.6097, 'MCC': 0.6058, 'A': 0.9922, 'R': 0.6097, 'P': 0.6097} +60: {'thres': 0.3485, 'F1': 0.6097, 'F2': 0.6096, 'MCC': 0.6058, 'A': 0.9923, 'R': 0.6096, 'P': 0.6098} +66: {'thres': 0.3358, 'F1': 0.6000, 'F2': 0.5999, 'MCC': 0.5960, 'A': 0.9921, 'R': 0.5998, 'P': 0.6001} +68: {'thres': 0.3423, 'F1': 0.6000, 'F2': 0.6000, 'MCC': 0.5960, 'A': 0.9921, 'R': 0.6000, 'P': 0.6001} +71: {'thres': 0.3471, 'F1': 0.6099, 'F2': 0.6098, 'MCC': 0.6060, 'A': 0.9923, 'R': 0.6097, 'P': 0.6100} +75: {'thres': 0.3489, 'F1': 0.6106, 'F2': 0.6105, 'MCC': 0.6067, 'A': 0.9923, 'R': 0.6104, 'P': 0.6107} +77: {'thres': 0.3489, 'F1': 0.5965, 'F2': 0.5965, 'MCC': 0.5925, 'A': 0.9920, 'R': 0.5965, 'P': 0.5966} +81: {'thres': 0.3398, 'F1': 0.5992, 'F2': 0.5992, 'MCC': 0.5952, 'A': 0.9920, 'R': 0.5991, 'P': 0.5993} +83: {'thres': 0.3469, 'F1': 0.5977, 'F2': 0.5977, 'MCC': 0.5937, 'A': 0.9920, 'R': 0.5977, 'P': 0.5977} +88: {'thres': 0.3471, 'F1': 0.6014, 'F2': 0.6013, 'MCC': 0.5974, 'A': 0.9921, 'R': 0.6013, 'P': 0.6015} +92: {'thres': 0.3482, 'F1': 0.6108, 'F2': 0.6108, 'MCC': 0.6069, 'A': 0.9923, 'R': 0.6108, 'P': 0.6108} +96: {'thres': 0.3487, 'F1': 0.6128, 'F2': 0.6128, 'MCC': 0.6089, 'A': 0.9923, 'R': 0.6128, 'P': 0.6128} +100:{'thres': 0.3485, 'F1': 0.6133, 'F2': 0.6133, 'MCC': 0.6094, 'A': 0.9923, 'R': 0.6133, 'P': 0.6133} diff --git a/tags_server.py b/tags_server.py new file mode 100644 index 0000000..5180eda --- /dev/null +++ b/tags_server.py @@ -0,0 +1,80 @@ +import base64 +import json +import os + +import cv2 +import numpy as np +import pandas as pd +from flask import Flask, jsonify, request + +from Utils import dbimutils + +use_GPU = False +if use_GPU == False: + os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 + os.environ["CUDA_VISIBLE_DEVICES"] = "-1" + os.environ["TF_ENABLE_ONEDNN_OPTS"] = "1" + +import tensorflow as tf + +from Models.NFNet import NFNetV1 + + +class JitModel: + def __init__(self, model): + self.model = model + + @tf.function + def predict(self, x): + return self.model(x, training=False) + + +app = Flask(__name__) + + +@app.route("/api/gettags/", methods=["POST"]) +def gettags(): + """ + Function run at each API call + No need to re-load the model + """ + + request_json = request.get_json() + + thresh = 0.3485 if "thresh" not in request_json else float(request_json["thresh"]) + img = request_json["image"] + txt_img = base64.b64decode(img) + np_img = np.frombuffer(txt_img, np.uint8) + img = cv2.imdecode(np_img, flags=cv2.IMREAD_UNCHANGED) + img = dbimutils.smart_24bit(img) + img = dbimutils.make_square(img, dim) + img = dbimutils.smart_resize(img, dim) + img = img.astype(np.float32) / 255 + img = np.expand_dims(img, 0) + probs = model.predict(img).numpy() + + label_names["probs"] = probs[0] + found_tags = label_names[label_names["probs"] > thresh][["name", "category"]] + + labels_list = [] + for index, pair in found_tags.iterrows(): + if pair["category"] == 0: + labels_list.append(pair["name"]) + elif pair["category"] == 3: + labels_list.append("series:%s" % pair["name"]) + elif pair["category"] == 4: + labels_list.append("character:%s" % pair["name"]) + + return jsonify(labels_list) + + +if __name__ == "__main__": + # Model is loaded when the API is launched + dim = 320 + model = NFNetV1((dim, dim, 3), 2380, "L1", use_eca=False) + model.load_weights(r"networks\NFNetL1V1-100-0.57141\variables\variables") + model = JitModel(model) + + label_names = pd.read_csv("2020_0000_0599/selected_tags.csv") + + app.run(debug=False) diff --git a/test_batch_savedmodel.py b/test_batch_savedmodel.py new file mode 100644 index 0000000..6dc4c1c --- /dev/null +++ b/test_batch_savedmodel.py @@ -0,0 +1,45 @@ +import os + +import cv2 +import numpy as np + +use_GPU = True +if use_GPU == False: + os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 + os.environ["CUDA_VISIBLE_DEVICES"] = "-1" + +os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1" + +import tensorflow as tf + +from Generator.UpscalePred import DataGenerator +from Models.NFNet import NFNetV1 + +gpus = tf.config.experimental.list_physical_devices("GPU") +if gpus: + try: + for gpu in gpus: + tf.config.experimental.set_memory_growth(gpu, True) + except RuntimeError as e: + print(e) + +dim = 320 +images_basepath = r"F:\MLArchives\danbooru2020\512px" + +imagesList = open("2020_0000_0599/testlist.txt").readlines() +imagesList = [x.rstrip() for x in imagesList] +imagesList = ["%s/%s" % (images_basepath, x) for x in imagesList] + +model = NFNetV1( + in_shape=(dim, dim, 3), out_classes=2380, definition_name="L1", use_eca=False +) + +for modelNum in [100]: + model.load_weights("trial/NFNetL1V1-rmc-%02d/variables/variables" % modelNum) + model.trainable = False + + generator = DataGenerator(imagesList, batch_size=32, dim=(dim, dim)) + + probs = model.predict(generator, verbose=1, use_multiprocessing=False, workers=7) + + np.save("tags_probs_%dL.npy" % modelNum, probs) diff --git a/test_res_onnx.py b/test_res_onnx.py new file mode 100644 index 0000000..4b93b1a --- /dev/null +++ b/test_res_onnx.py @@ -0,0 +1,33 @@ +import os +import sys + +import numpy as np +import onnxruntime as rt +import pandas as pd + +from Utils import dbimutils + +pd.set_option("display.max_rows", 1000) + +dim = 320 +thresh = 0.3485 +model = rt.InferenceSession("networks/NFNetL1V1-100-0.57141.onnx") +label_names = pd.read_csv("2020_0000_0599/selected_tags.csv") + +target_img = "82148729_p0.jpg" if len(sys.argv) < 2 else sys.argv[1] + +img = dbimutils.smart_imread(target_img) +img = dbimutils.smart_24bit(img) +img = dbimutils.make_square(img, dim) +img = dbimutils.smart_resize(img, dim) +img = img.astype(np.float32) / 255 +img = np.expand_dims(img, 0) + +input_name = model.get_inputs()[0].name +label_name = model.get_outputs()[0].name +probs = model.run([label_name], {input_name: img})[0] + +label_names["probs"] = probs[0] +found_tags = label_names[label_names["probs"] > thresh][["tag_id", "name", "probs"]] + +print(found_tags) diff --git a/test_res_savedmodel.py b/test_res_savedmodel.py new file mode 100644 index 0000000..b2c6487 --- /dev/null +++ b/test_res_savedmodel.py @@ -0,0 +1,43 @@ +import os +import sys + +import numpy as np +import pandas as pd + +from Utils import dbimutils + +pd.set_option("display.max_rows", 1000) + +use_GPU = True +if use_GPU == False: + os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 + os.environ["CUDA_VISIBLE_DEVICES"] = "-1" + +os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1" + +from tensorflow.keras.models import load_model + +from Models.NFNet import NFNetV1 + +dim = 320 +thresh = 0.3485 +# model = NFNetV1(in_shape=(dim, dim, 3), out_classes=2380, definition_name="L1", use_eca=False) +# model.load_weights("networks/NFNetL1V1-100-0.57141/variables/variables") +model = load_model("networks/NFNetL1V1-100-0.57141") +label_names = pd.read_csv("2020_0000_0599/selected_tags.csv") + +target_img = "82148729_p0.jpg" if len(sys.argv) < 2 else sys.argv[1] + +img = dbimutils.smart_imread(target_img) +img = dbimutils.smart_24bit(img) +img = dbimutils.make_square(img, dim) +img = dbimutils.smart_resize(img, dim) +img = img.astype(np.float32) / 255 +img = np.expand_dims(img, 0) + +probs = model.predict(img) + +label_names["probs"] = probs[0] +found_tags = label_names[label_names["probs"] > thresh][["tag_id", "name", "probs"]] + +print(found_tags) diff --git a/tflite_bench.py b/tflite_bench.py new file mode 100644 index 0000000..b2114e1 --- /dev/null +++ b/tflite_bench.py @@ -0,0 +1,54 @@ +import cv2 +import numpy as np +import pandas as pd +import tensorflow as tf +from tqdm import tqdm + +from Utils import dbimutils + + +def representative_dataset_gen(images=None): + dim = 320 + + if images is None: + images = open("2020_0000_0599/origlist.txt", "r").readlines() + images = [r"D:\Images\danbooru2020\original\%s" % x.rstrip() for x in images] + + for image_path in images: + img = dbimutils.smart_imread(image_path) + img = dbimutils.smart_24bit(img) + img = dbimutils.make_square(img, dim) + img = dbimutils.smart_resize(img, dim) + img = img.astype(np.float32) / 255 + yield img + + +# Helper function to run inference on a TFLite model +def run_tflite_model(tflite_file, test_images_list): + # Initialize the interpreter + interpreter = tf.lite.Interpreter(model_path=str(tflite_file), num_threads=8) + interpreter.allocate_tensors() + + input_details = interpreter.get_input_details()[0] + output_details = interpreter.get_output_details()[0] + + predictions = [] + generator = representative_dataset_gen(test_images_list) + for test_image in tqdm(generator): + # Check if the input type is quantized, then rescale input data to uint8 + if input_details["dtype"] == np.uint8: + input_scale, input_zero_point = input_details["quantization"] + output_scale, output_zero_point = output_details["quantization"] + test_image = test_image / input_scale + input_zero_point + + test_image = np.expand_dims(test_image, axis=0).astype(input_details["dtype"]) + interpreter.set_tensor(input_details["index"], test_image) + interpreter.invoke() + output = interpreter.get_tensor(output_details["index"])[0] + predictions.append(output * output_scale + output_zero_point) + + return np.array(predictions, dtype=np.float32) + + +probs = run_tflite_model("networks_tflite/NFNetL1V1-100-0.57141_u08.tflite", None) +np.save("tflite_preds.npy", probs) diff --git a/tflite_pred.py b/tflite_pred.py new file mode 100644 index 0000000..288a43d --- /dev/null +++ b/tflite_pred.py @@ -0,0 +1,69 @@ +import cv2 +import numpy as np +import pandas as pd +import tensorflow as tf + +from Utils import dbimutils + + +def representative_dataset_gen(images=None): + dim = 320 + + if images is None: + images = open("2020_0000_0599/origlist.txt", "r").readlines() + images = [r"D:\Images\danbooru2020\original\%s" % x.rstrip() for x in images] + + for image_path in images: + img = dbimutils.smart_imread(image_path) + img = dbimutils.smart_24bit(img) + img = dbimutils.make_square(img, dim) + img = dbimutils.smart_resize(img, dim) + img = img.astype(np.float32) / 255 + yield img + + +# Helper function to run inference on a TFLite model +def run_tflite_model(tflite_file, test_images_list): + global test_images + + # Initialize the interpreter + interpreter = tf.lite.Interpreter(model_path=str(tflite_file), num_threads=8) + interpreter.allocate_tensors() + + input_details = interpreter.get_input_details()[0] + output_details = interpreter.get_output_details()[0] + + predictions = [] + generator = representative_dataset_gen(test_images_list) + for test_image in generator: + # Check if the input type is quantized, then rescale input data to uint8 + if input_details["dtype"] == np.uint8: + input_scale, input_zero_point = input_details["quantization"] + output_scale, output_zero_point = output_details["quantization"] + test_image = test_image / input_scale + input_zero_point + + test_image = np.expand_dims(test_image, axis=0).astype(input_details["dtype"]) + interpreter.set_tensor(input_details["index"], test_image) + interpreter.invoke() + output = interpreter.get_tensor(output_details["index"])[0] + predictions.append(output * output_scale + output_zero_point) + + return np.array(predictions) + + +dim = 320 +thresh = 0.3485 +test_images = [] + +# images = open('2020_0000_0599/origlist.txt', 'r').readlines() +# images = ['D:\\Images\\danbooru2020\\original\\%s' % x.rstrip() for x in images] +images = ["82148729_p0.jpg"] + +label_names = pd.read_csv("2020_0000_0599/selected_tags.csv") + +probs = run_tflite_model("networks_tflite/NFNetL1V1-100-0.57141_u08.tflite", images) + +label_names["probs"] = probs[0] +found_tags = label_names[label_names["probs"] > thresh][["tag_id", "name", "probs"]] + +print(found_tags) diff --git a/tflite_quant.py b/tflite_quant.py new file mode 100644 index 0000000..023d659 --- /dev/null +++ b/tflite_quant.py @@ -0,0 +1,42 @@ +import pathlib + +import cv2 +import numpy as np +import tensorflow as tf + +from Utils import dbimutils + + +def representative_dataset_gen(): + dim = 320 + images = open("2020_0000_0599/origlist.txt", "r").readlines() + images = [r"D:\Images\danbooru2020\original\%s" % x.rstrip() for x in images] + + rng = np.random.default_rng(1249) + rng.shuffle(images) + for i in range(1000): + target_img = images[i] + img = dbimutils.smart_imread(target_img) + img = dbimutils.smart_24bit(img) + img = dbimutils.make_square(img, dim) + img = dbimutils.smart_resize(img, dim) + img = img.astype(np.float32) / 255 + img = np.expand_dims(img, 0) + yield [img.astype(np.float32)] + + +converter = tf.lite.TFLiteConverter.from_saved_model("checkpoints/openvino-tflite") +converter.optimizations = [tf.lite.Optimize.DEFAULT] +converter.representative_dataset = representative_dataset_gen +converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] +converter.inference_input_type = tf.uint8 +converter.inference_output_type = tf.uint8 + +tflite_model_quant = converter.convert() + +tflite_models_dir = pathlib.Path("networks_tflite") +tflite_models_dir.mkdir(parents=True, exist_ok=True) + +# Save the quantized model: +tflite_model_quant_file = tflite_models_dir / "NFResNet50V1-50-0.58250_u08.tflite" +tflite_model_quant_file.write_bytes(tflite_model_quant) diff --git a/topkimages.py b/topkimages.py new file mode 100644 index 0000000..665988d --- /dev/null +++ b/topkimages.py @@ -0,0 +1,15 @@ +import numpy as np +import pandas as pd + +lines = open("2020_0000_0599/testlist.txt").readlines() +lines = [x.rstrip() for x in lines] + +stats = np.load("tags_probs_NFNetL1V1-100-0.57141.npy") + +args = np.argmax(stats, axis=0) +top1 = [lines[x] for x in args] + +df = pd.read_csv("2020_0000_0599/selected_tags.csv") +df["top1"] = top1 + +df.to_csv("top2k.csv", index=False) diff --git a/train_nfnetL0V1_focal.py b/train_nfnetL0V1_focal.py new file mode 100644 index 0000000..1c6c9fc --- /dev/null +++ b/train_nfnetL0V1_focal.py @@ -0,0 +1,121 @@ +import numpy as np +import pandas as pd +import tensorflow as tf +from tensorflow.keras import backend as K +from tensorflow.keras.optimizers import SGD +from tensorflow_addons.losses import SigmoidFocalCrossEntropy +from tensorflow_addons.metrics import F1Score + +from Generator.Upscale_DB import DataGenerator +from Models.NFNet import NFNetV1 +from Utils import agc + + +def scheduler(epoch, lr): + if epoch == 40: + return lr * 0.1 + if epoch == 47: + return lr * 0.1 + else: + return lr + + +class AGCModel(tf.keras.Model): + def __init__(self, inner_model, clip_factor=0.02, eps=1e-3): + super(AGCModel, self).__init__() + self.inner_model = inner_model + self.clip_factor = clip_factor + self.eps = eps + + def train_step(self, data): + images, labels = data + + with tf.GradientTape() as tape: + predictions = self.inner_model(images, training=True) + loss = self.compiled_loss(labels, predictions) + trainable_params = self.inner_model.trainable_weights + gradients = tape.gradient(loss, trainable_params) + agc_gradients = agc.adaptive_clip_grad( + trainable_params, gradients, clip_factor=self.clip_factor, eps=self.eps + ) + self.optimizer.apply_gradients(zip(agc_gradients, trainable_params)) + + self.compiled_metrics.update_state(labels, predictions) + return {m.name: m.result() for m in self.metrics} + + def test_step(self, data): + images, labels = data + predictions = self.inner_model(images, training=False) + loss = self.compiled_loss(labels, predictions) + self.compiled_metrics.update_state(labels, predictions) + return {m.name: m.result() for m in self.metrics} + + def save_weights(self, filepath, *args, **kwargs): + super(AGCModel, self).save_weights(filepath=filepath + "_train") + self.inner_model.save(filepath=filepath) + + def call(self, inputs, *args, **kwargs): + return self.inner_model(inputs) + + +if __name__ == "__main__": + dim = 320 + miniBatch = 64 + f1 = F1Score(2380, "micro", 0.4) + model = NFNetV1( + in_shape=(dim, dim, 3), out_classes=2380, definition_name="L0", use_eca=False + ) + model.load_weights( + "checkpoints/NFNetL0V1-rmc-30-0.56855/NFNetL0V1-rmc-30-0.56855/variables/variables" + ) + + model = AGCModel(model) + + loss = SigmoidFocalCrossEntropy( + reduction=tf.keras.losses.Reduction.SUM_OVER_BATCH_SIZE + ) + + opt = SGD(learning_rate=0.5, momentum=0.9, nesterov=True) + model.compile(optimizer=opt, loss=loss, metrics=[f1]) + + arg = np.random.rand(miniBatch, dim, dim, 3) + model.predict(arg) + + # resumeModel = 'NFNetL0V1-rmc-30-0.56855' + # if resumeModel != '': + # model.load_weights('checkpoints/%s/%s_train' % (resumeModel, resumeModel)) + # K.set_value(model.optimizer.lr, 0.5) + + print("Number of parameters: %d" % model.count_params()) + + trainList = open("2020_0000_0599/trainlist.txt", "r").readlines() + trainList = [x.rstrip() for x in trainList] + + labels_list = pd.read_csv("2020_0000_0599/selected_tags.csv")["tag_id"].tolist() + + training_generator = DataGenerator( + trainList, labels_list, noise_level=2, dim=(dim, dim) + ) + + training_dataset = training_generator.genDS() + training_dataset = training_dataset.shuffle(len(trainList)) + training_dataset = training_dataset.map( + training_generator.wrap_func, num_parallel_calls=tf.data.AUTOTUNE + ) + training_dataset = training_dataset.batch(miniBatch, drop_remainder=True) + training_dataset = training_dataset.prefetch(tf.data.AUTOTUNE) + + sched = tf.keras.callbacks.LearningRateScheduler(scheduler, verbose=True) + rmc = tf.keras.callbacks.ModelCheckpoint( + "checkpoints/NFNetL0V1-rmc-{epoch:02d}-{f1_score:.5f}/NFNetL0V1-rmc-{epoch:02d}-{f1_score:.5f}", + save_best_only=False, + save_freq="epoch", + ) + + model.fit( + training_dataset, + validation_data=None, + initial_epoch=30, + epochs=50, + callbacks=[sched, rmc], + ) diff --git a/train_nfnetL1V1_focal.py b/train_nfnetL1V1_focal.py new file mode 100644 index 0000000..056ebb7 --- /dev/null +++ b/train_nfnetL1V1_focal.py @@ -0,0 +1,130 @@ +import numpy as np +import pandas as pd +import tensorflow as tf +from tensorflow.keras import backend as K +from tensorflow.keras.optimizers import SGD +from tensorflow_addons.losses import SigmoidFocalCrossEntropy +from tensorflow_addons.metrics import F1Score + +from Generator.Upscale_DB import DataGenerator +from Models.NFNet import NFNetV1 +from Utils import agc, mixup + + +def scheduler(epoch, lr): + if epoch == 35: + return lr * 0.1 + if epoch == 53: + return lr * 0.1 + if epoch == 68: + return lr * 0.1 + if epoch == 73: + return lr * 0.1 + if epoch == 90: + return lr * 0.1 + if epoch == 97: + return lr * 0.1 + else: + return lr + + +class AGCModel(tf.keras.Model): + def __init__(self, inner_model, clip_factor=0.02, eps=1e-3): + super(AGCModel, self).__init__() + self.inner_model = inner_model + self.clip_factor = clip_factor + self.eps = eps + + def train_step(self, data): + images, labels = data + + with tf.GradientTape() as tape: + predictions = self.inner_model(images, training=True) + loss = self.compiled_loss(labels, predictions) + trainable_params = self.inner_model.trainable_weights + gradients = tape.gradient(loss, trainable_params) + agc_gradients = agc.adaptive_clip_grad( + trainable_params, gradients, clip_factor=self.clip_factor, eps=self.eps + ) + self.optimizer.apply_gradients(zip(agc_gradients, trainable_params)) + + self.compiled_metrics.update_state(labels, predictions) + return {m.name: m.result() for m in self.metrics} + + def test_step(self, data): + images, labels = data + predictions = self.inner_model(images, training=False) + loss = self.compiled_loss(labels, predictions) + self.compiled_metrics.update_state(labels, predictions) + return {m.name: m.result() for m in self.metrics} + + def save_weights(self, filepath, *args, **kwargs): + super(AGCModel, self).save_weights(filepath=filepath + "_train") + self.inner_model.save(filepath=filepath) + + def call(self, inputs, *args, **kwargs): + return self.inner_model(inputs) + + +if __name__ == "__main__": + dim = 320 + miniBatch = 32 + f1 = F1Score(2380, "micro", 0.4) + model = NFNetV1( + in_shape=(dim, dim, 3), out_classes=2380, definition_name="L1", use_eca=False + ) + model.load_weights( + "checkpoints/NFNetL1V1-rmc-83-0.54438/NFNetL1V1-rmc-83-0.54438/variables/variables" + ) + + model = AGCModel(model) + + loss = SigmoidFocalCrossEntropy( + reduction=tf.keras.losses.Reduction.SUM_OVER_BATCH_SIZE + ) + + opt = SGD(learning_rate=0.3, momentum=0.9, nesterov=True) + model.compile(optimizer=opt, loss=loss, metrics=[f1]) + + arg = np.random.rand(miniBatch, dim, dim, 3) + model.predict(arg) + + print("Number of parameters: %d" % model.count_params()) + + trainList = open("2020_0000_0599/trainlist.txt", "r").readlines() + trainList = [x.rstrip() for x in trainList] + + labels_list = pd.read_csv("2020_0000_0599/selected_tags.csv")["tag_id"].tolist() + + training_generator = DataGenerator( + trainList, labels_list, noise_level=2, dim=(dim, dim) + ) + + training_dataset = training_generator.genDS() + training_dataset = training_dataset.shuffle(len(trainList)) + training_dataset = training_dataset.map( + training_generator.wrap_func, num_parallel_calls=tf.data.AUTOTUNE + ) + training_dataset = training_dataset.batch(miniBatch, drop_remainder=True) + + training_dataset_mu = training_dataset.map( + lambda images, labels: mixup.mix_up_single(images, labels, alpha=0.3), + num_parallel_calls=tf.data.AUTOTUNE, + ) + training_dataset_mu = training_dataset_mu.prefetch(tf.data.AUTOTUNE) + + sched = tf.keras.callbacks.LearningRateScheduler(scheduler, verbose=True) + rmc = tf.keras.callbacks.ModelCheckpoint( + "checkpoints/NFNetL1V1-rmc-{epoch:02d}-{f1_score:.5f}/NFNetL1V1-rmc-{epoch:02d}-{f1_score:.5f}", + save_best_only=False, + save_freq="epoch", + save_weights_only=True + ) + + model.fit( + training_dataset_mu, + validation_data=None, + initial_epoch=83, + epochs=100, + callbacks=[sched, rmc], + ) diff --git a/train_nfresnet50V1_focal.py b/train_nfresnet50V1_focal.py new file mode 100644 index 0000000..85467fc --- /dev/null +++ b/train_nfresnet50V1_focal.py @@ -0,0 +1,109 @@ +import numpy as np +import tensorflow as tf +from tensorflow.keras import backend as K +from tensorflow.keras.optimizers import SGD +from tensorflow_addons.losses import SigmoidFocalCrossEntropy +from tensorflow_addons.metrics import F1Score + +from Generator.Upscale_DB import DataGenerator +from Models.NFResNet import NFResNet50V1 +from Utils import agc + + +def scheduler(epoch, lr): + if epoch == 40: + return lr * 0.1 + if epoch == 47: + return lr * 0.1 + else: + return lr + + +class AGCModel(tf.keras.Model): + def __init__(self, inner_model, clip_factor=0.02, eps=1e-3): + super(AGCModel, self).__init__() + self.inner_model = inner_model + self.clip_factor = clip_factor + self.eps = eps + + def train_step(self, data): + images, labels = data + + with tf.GradientTape() as tape: + predictions = self.inner_model(images, training=True) + loss = self.compiled_loss(labels, predictions) + trainable_params = self.inner_model.trainable_weights + gradients = tape.gradient(loss, trainable_params) + agc_gradients = agc.adaptive_clip_grad( + trainable_params, gradients, clip_factor=self.clip_factor, eps=self.eps + ) + self.optimizer.apply_gradients(zip(agc_gradients, trainable_params)) + + self.compiled_metrics.update_state(labels, predictions) + return {m.name: m.result() for m in self.metrics} + + def test_step(self, data): + images, labels = data + predictions = self.inner_model(images, training=False) + loss = self.compiled_loss(labels, predictions) + self.compiled_metrics.update_state(labels, predictions) + return {m.name: m.result() for m in self.metrics} + + def save_weights(self, filepath, *args, **kwargs): + super(AGCModel, self).save_weights(filepath=filepath + "_train") + self.inner_model.save(filepath=filepath) + + def call(self, inputs, *args, **kwargs): + return self.inner_model(inputs) + + +if __name__ == "__main__": + dim = 320 + miniBatch = 64 + f1 = F1Score(2380, "micro", 0.4) + model = NFResNet50V1(in_shape=(dim, dim, 3), out_classes=2380) + + model = AGCModel(model) + + loss = SigmoidFocalCrossEntropy( + reduction=tf.keras.losses.Reduction.SUM_OVER_BATCH_SIZE + ) + + opt = SGD(learning_rate=0.25, momentum=0.9, nesterov=True) + model.compile(optimizer=opt, loss=loss, metrics=[f1]) + + arg = np.random.rand(miniBatch, dim, dim, 3) + model.predict(arg) + + resumeModel = "NFResNet50V1-rmc-26-0.61612" + if resumeModel != "": + model.load_weights("checkpoints/%s/%s_train" % (resumeModel, resumeModel)) + K.set_value(model.optimizer.lr, 0.5) + + print("Number of parameters: %d" % model.count_params()) + + trainList = open("2020_0000_0599/trainlist.txt", "r").readlines() + trainList = [x.rstrip() for x in trainList] + + labels_list = pd.read_csv("2020_0000_0599/selected_tags.csv")["tag_id"].tolist() + + training_generator = DataGenerator( + trainList, labels_list, noise_level=2, dim=(dim, dim), batch_size=miniBatch + ) + + sched = tf.keras.callbacks.LearningRateScheduler(scheduler, verbose=True) + rmc = tf.keras.callbacks.ModelCheckpoint( + "checkpoints/NFResNet50V1-rmc-{epoch:02d}-{f1_score:.5f}/NFResNet50V1-rmc-{epoch:02d}-{f1_score:.5f}", + save_best_only=False, + save_freq="epoch", + ) + + model.fit_generator( + generator=training_generator, + validation_data=None, + initial_epoch=26, + epochs=50, + use_multiprocessing=False, + workers=8, + callbacks=[sched, rmc], + ) diff --git a/train_resnet50V4_focal.py b/train_resnet50V4_focal.py new file mode 100644 index 0000000..dbca29b --- /dev/null +++ b/train_resnet50V4_focal.py @@ -0,0 +1,67 @@ +import numpy as np +import tensorflow as tf +from tensorflow.keras import backend as K +from tensorflow.keras.optimizers import SGD +from tensorflow_addons.losses import SigmoidFocalCrossEntropy +from tensorflow_addons.metrics import F1Score + +from Generator.Upscale_DB import DataGenerator +from Models.ResNet import ResNet50V4 + + +def scheduler(epoch, lr): + if epoch == 15: + return lr * 0.1 + if epoch == 26: + return lr * 0.1 + else: + return lr + + +if __name__ == "__main__": + dim = 320 + miniBatch = 64 + f1 = F1Score(2380, "micro", 0.4) + model = ResNet50V4(in_shape=(dim, dim, 3), out_classes=2380) + + loss = SigmoidFocalCrossEntropy( + reduction=tf.keras.losses.Reduction.SUM_OVER_BATCH_SIZE + ) + + opt = SGD(learning_rate=0.025, momentum=0.9, nesterov=True) + model.compile(optimizer=opt, loss=loss, metrics=[f1]) + + resumeModel = "ResNet50V4-rmc-12-0.52158" + if resumeModel != "": + model.load_weights( + "checkpoints/%s/%s/variables/variables" % (resumeModel, resumeModel) + ) + K.set_value(model.optimizer.lr, 0.05) + + print("Number of parameters: %d" % model.count_params()) + + trainList = open("2020_0000_0599/trainlist.txt", "r").readlines() + trainList = [x.rstrip() for x in trainList] + + labels_list = pd.read_csv("2020_0000_0599/selected_tags.csv")["tag_id"].tolist() + + training_generator = DataGenerator( + trainList, labels_list, noise_level=2, dim=(dim, dim), batch_size=miniBatch + ) + + sched = tf.keras.callbacks.LearningRateScheduler(scheduler, verbose=True) + rmc = tf.keras.callbacks.ModelCheckpoint( + "checkpoints/ResNet50V4-rmc-{epoch:02d}-{f1_score:.5f}/ResNet50V4-rmc-{epoch:02d}-{f1_score:.5f}", + save_best_only=False, + save_freq="epoch", + ) + + model.fit_generator( + generator=training_generator, + validation_data=None, + initial_epoch=12, + epochs=30, + use_multiprocessing=False, + workers=8, + callbacks=[sched, rmc], + )