Codibook Recommender System Project - 2. Image Processing

Image Processing

from PIL import Image
import cv2
import numpy as np
import pandas as pd
import pickle
import os
from itertools import repeat, chain
from glob import glob
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader, random_split
from torch import optim
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torchvision import transforms, models
from torchsummary import summary
import copy
import time
from sklearn.metrics import accuracy_score, confusion_matrix
from tqdm import tqdm
import copy

from sklearn.manifold import TSNE
from sklearn.cluster import KMeans
import seaborn as sns
import matplotlib.pyplot as plt
from numpy.linalg import norm
from gensim.models import Word2Vec
import torchvision
torchvision.__version__
'0.8.2+cu101'

0. 불러오기 테스트

이미지 불러오기

np.array(Image.open('data/img/codi_1/상의.png')).shape
(716, 600, 4)
Image.open('data/img/codi_1/상의.png')

png

해시태그 딕셔너리 불러오기

with open('data/tags.dat', 'rb') as f:
    hashtags = pickle.load(f)
hashtags
{1: ['심플코디', '2021autunm', '코디', '무채색'],
 2: ['깔끔', '편안함', '간단', '캐주얼룩'],
 3: ['데일리룩', '간단', '간편', '가을코디', '깔끔'],
 4: ['코디', '간단', '귀여운', '프레피룩'],
 5: ['20대룩', '심플'],
 6: ['누구나', '가을', '심플룩', '간편하게', '20대코디', '대학생', '자켓'],
 7: ['오오티디', '로퍼', '20대코디'],
 8: ['일상', '20대코디', '프레피룩'],
 9: ['깔끔', '모던시크', '간단'],
 10: ['가을코디', '베이직', '심플', '깔끔'],
 11: ['일상', '깔끔', '대학생', '편하게', '캐주얼'],
 12: ['심플', '숄더백', '오피스룩', '20대코디', '깔끔'],
 13: ['모던', '프레피룩', '블랙'],
 14: ['간편하게', '편안함', '일상', '캐주얼룩'],
 15: ['일상', '데일리', '일상적', '캐주얼'],
 16: ['심플베이직', '코디', '데일리룩', '심플룩'],
 17: ['대학생룩', '블라우스', '심플룩'],
 18: ['데님', '가디건', '심플룩', '데일리룩'],
 19: ['가을', '데이트룩', '깔끔'],
 20: ['간편', '20대코디', '가을데일리', '오오티디'],
 21: ['블라우스', '데이트', '20대코디', '데일리'],
 22: ['심플룩', '일상적', '심플베이직'],
 23: ['데일리룩', '심플코디', '예쁨', '자켓', '팬츠'],
 24: ['패션', '프레피룩', '20대코디'],
 25: ['20대', '스커트', '데일리룩'],
 26: ['팬츠', '시크룩', '일상', '20대코디', '프레피룩'],
 27: ['20대코디', '프레피룩', '브라운코디', '아가일패턴'],
 28: ['20대코디', '가을', '프레피룩'],
 29: ['블랙', '블라우스', '프레피룩', '20대룩', '러블리'],
 30: ['예쁨', '가을', '20대', '프레피룩'],
 31: ['프레피룩', '가을', '브라운코디', '데일리'],
 32: ['양말', '깔끔', '프레피룩'],
 33: ['양말', '일상', '20대코디', '프레피룩', '모자'],
 34: ['니트', '20대룩', '간편'],
 35: ['스커트', '20대코디', '핑크'],
 36: ['오오티디', '가을', '플렛', '20대코디', '프레피룩'],
 37: ['20대룩', '가을', '프레피룩', '깔끔단정'],
 38: ['20대룩', '프레피룩', '오피스룩'],
 39: ['일상', '20대코디', '가디건&베스트', '오오티디'],
 40: ['일상룩', '심플룩', '모던'],
 41: ['모던', '프레피룩', '러블리룩', '오오티디'],
 42: ['브라운코디', '프레피룩', '유니크룩'],
 43: ['시크', '모던시크', '프레피룩'],
 44: ['간편하게', '꾸안꾸', '모던'],
 45: ['숄더백', '20대코디', '일상'],
 46: ['여름', '꾸안꾸', '심플룩', '캐주얼', '간편하게'],
 47: ['네크리스', '20대', '오오티디', '데이트룩'],
 48: ['20대코디', '편안함', '꾸안꾸', '가디건'],
 49: ['20대코디', '베이직', '일상룩', '심플룩'],
 50: ['대학생룩', '꾸안꾸', '20대', '심플코디'],
 51: ['꾸안꾸', '일상룩', '심플', '20대코디'],
 52: ['반바지', '20대코디', '유니크룩', '깅엄체크'],
 53: ['일상룩', '대학생룩', '스니커즈', '데님'],
 54: ['블랙', '모노톤', '유니크룩', '깅엄체크'],
 55: ['깔끔단정', '일상적', '캐주얼룩', '20대코디'],
 56: ['패션', '코디', '간단', '대학생'],
 57: ['숄더백', '여름', '20대코디', '유니크'],
 58: ['일상적', '코디', '20대코디', '미리가을'],
 59: ['심플룩', '블랙', '유니크룩'],
 60: ['꾸안꾸', '예쁨', '심플베이직', '누구나', '데일리'],
 61: ['캐주얼룩', '여름룩', '편안함'],
 62: ['하이틴', '유니크', '20대코디', '블랙', '깅엄체크'],
 63: ['간편', '일상', '20대', '오오티디', '심플코디'],
 64: ['꾸안꾸', '간편하게', '데일리', '심플코디'],
 65: ['심플코디', '베이직', '20대코디'],
 66: ['심플코디', '20대코디', '꾸안꾸', '간편하게'],
 67: ['코디', '예쁨', '심플', '20대'],
 68: ['여름룩', '패션', '20대룩', '캐주얼룩', '캠퍼스룩'],
 69: ['심플룩', '대학생', '코디', '아가일베스트', '오오티디'],
 70: ['사진', '간단', '예쁨', '오피스룩', '20대코디'],
 71: ['일상적', '대학생룩', '로퍼', '꾸안꾸'],
 72: ['깔끔', '코디', '일상룩', '린넨'],
 73: ['데이트', '깔끔단정', '이쁨', '20대코디'],
 74: ['샌들&슬리퍼', '심플', '간편'],
 75: ['폰케이스', '심플룩', '20대코디', '데일리'],
 76: ['심플베이직', '모던', '예쁨', '일상적', '심플'],
 77: ['코디', '심플코디', '일상'],
 78: ['오오티디', '심플룩', '여름코디', '간편하게'],
 79: ['스니커즈', '폰케이스', '깔끔단정'],
 80: ['20대룩', '일상', '귀여운', '대학생룩', '캐주얼'],
 81: ['일상', '누구나', '개강룩'],
 82: ['간편하게', '20대코디', '귀여운'],
 83: ['여성스러운', '20대룩', '숄더백', '개강룩'],
 84: ['코디', '패션', '스커트', '20대룩'],
 85: ['대학생', '캐쥬얼룩', '일상', '데일리', '심플베이직'],
 86: ['예쁨', '샌들&슬리퍼', '깔끔', '심플룩', '데일리', '20대코디'],
 87: ['유니크', '블랙', '러블리그런지룩'],
 88: ['20대', '블라우스', '데이트룩'],
 89: ['20대룩', '간편하게', '모던'],
 90: ['20대코디', '하이틴룩'],
 91: ['대학생', '일상적', '20대코디', '간단'],
 92: ['심플', '20대코디', '가디건&베스트'],
 93: ['20대코디', '예쁨', '여행'],
 94: ['깔끔단정', '심플', '깔끔'],
 95: ['패션', '20대코디', '여름코디', '네크리스'],
 96: ['20대룩', '간단', '데님스커트', '볼레로'],
 97: ['여름코디', '누구나', '일상적', '가디건&베스트'],
 98: ['간편', '여름룩', '심플'],
 99: ['간편하게', '심플룩', '모노톤'],
 100: ['간단', '샌들&슬리퍼', '여름'],
 101: ['오오티디', '간단', '20대코디'],
 102: ['오오티디', '슬리브리스', '시크룩', '블랙', '편안함'],
 103: ['원마일룩', '캐주얼룩', '심플룩', '20대코디'],
 104: ['편하게', '편안함', '심플코디', '팬츠'],
 105: ['팬츠', '데이트', '데일리코디', '일상룩'],
 106: ['일상', '누구나', '대학생'],
 107: ['오오티디', '캐쥬얼', '캐쥬얼룩', '심플'],
 108: ['깔끔단정', '간편하게', '일상', '가디건&베스트'],
 109: ['심플코디', '누구나', '일상적', '패션'],
 110: ['깔끔', '20대코디', '모던'],
 111: ['시원', '스트라이프', '심플'],
 112: ['간편하게', '캐주얼룩', '간편', '일상룩'],
 113: ['간단', '일상', '샌들&슬리퍼', '심플룩'],
 114: ['간편하게', '20대코디', '여름'],
 115: ['심플코디', '20대룩', '오오티디', '심플룩'],
 116: ['코디', '20대코디', '유니크', '여름'],
 117: ['숄더백', '일상', '20대코디', '모던'],
 118: ['꾸안꾸', '간편', '블랙&화이트'],
 119: ['데이트', '편하게', '20대코디'],
 120: ['데이트룩', '슬리브리스', '20대코디'],
 121: ['패션', '20대', '여행'],
 122: ['20대룩', '반팔티', '오오티디'],
 123: ['여름코디', '여행룩', '20대코디'],
 124: ['오오티디', '누구나'],
 125: ['꾸안꾸', '일상', '반바지', '캐쥬얼', '간단'],
 126: ['20대룩', '누구나', '볼레로', '샌들&슬리퍼'],
 127: ['오오티디', '20대', '패션', '심플룩'],
 128: ['오오티디', '여름', '니트조끼', '걸리쉬'],
 129: ['반팔티', '캐주얼룩', '심플룩', '톤온톤', '여름', '숄더백'],
 130: ['20대룩', '편안함', '여행'],
 131: ['블랙', '모노톤', '오피스룩'],
 132: ['20대', '모던', '20대코디'],
 133: ['일상적', '데이트룩', '오피스룩'],
 134: ['여름룩', '친구랑놀러갈때', '모던'],
 135: ['일상적', '코디', '누구나'],
 136: ['심플', '20대코디', '코디'],
 137: ['20대코디', '팬츠', '심플베이직', '캔버스백&에코백'],
 138: ['오오티디', '슬리브리스', '여행룩'],
 139: ['일상', '여행', '캐주얼'],
 140: ['간편하게', '일상룩', '20대코디', '오피스룩'],
 141: ['누구나', '오오티디', '간편', '20대코디'],
 142: ['일상룩', '깔끔', '심플룩', '블랙'],
 143: ['모던', '20대코디', '심플코디'],
 144: ['일상룩', '심플코디', '일상적', '캐주얼룩'],
 145: ['간편', '블랙', '일상룩', '코디', '하이틴룩'],
 146: ['데이트룩', '일상적', '20대코디'],
 147: ['편하게', '대학생', '캐쥬얼', '일상', '누구나'],
 148: ['데이트', '블라우스', '여름코디', '팬츠'],
 149: ['여행', '귀여운', '상큼'],
 150: ['오오티디', '친구랑놀러갈때', '블랙', '캐주얼룩'],
 151: ['일상룩', '예쁨', '간편', '모던', '캐주얼룩'],
 152: ['반팔티', '캐쥬얼룩', '대학생', '간단'],
 153: ['패션', '데일리룩', '20대코디', '깔끔'],
 154: ['심플룩', '팬츠', '심플코디'],
 155: ['심플코디', '깔끔단정', '오피스룩', '블라우스'],
 156: ['여름코디', '심플룩', '캔버스백&에코백', '간단'],
 157: ['심플베이직', '데이트코디', '깔끔', '베이직'],
 158: ['패션', '20대코디', '데일리', '모던'],
 159: ['하이틴', '예쁨', '귀여운', '러블리'],
 160: ['블랙', '친구랑놀러갈때', '페이즐리'],
 161: ['편안함', '캐주얼룩', '간단', '심플룩', '데일리룩'],
 162: ['일상룩', '심플룩', '캐주얼', '심플코디'],
 163: ['간편', '편안함', '20대룩'],
 164: ['심플', '패션', '오오티디', '코디', '모던시크'],
 165: ['심플룩', '여름코디', '베이직'],
 166: ['귀여운', '스니커즈', '20대코디', '귀여운', '스커트'],
 167: ['20대', '심플룩', '숄더백', '간편하게'],
 168: ['20대코디', '캐쥬얼룩', '20대코디', '심플룩'],
 169: ['모던시크', '20대코디', '간단', '샌들&슬리퍼'],
 170: ['심플코디', '간편하게', '러블리데일리룩', '일상적', '간단'],
 171: ['심플', '여름코디', '패션', '20대', '예쁨'],
 172: ['블랙', '스커트', '여행'],
 173: ['러블리룩', '20대코디', '모던', '톤온톤'],
 174: ['샌들&슬리퍼', '대학생', '반바지', '일상적'],
 175: ['여름', '여행', '패턴스커트'],
 176: ['대학생', '20대코디', '원피스'],
 177: ['깔끔', '간편', '숄더백'],
 178: ['오오티디', '20대코디', '데일리'],
 179: ['간편', '20대', '심플코디', '여행'],
 180: ['간편하게', '하이틴', '캐주얼', '여름룩'],
 181: ['유니크', '20대룩', '여행'],
 182: ['오오티디', '20대코디', '모던', '깔끔단정', '데일리'],
 183: ['일상룩', '간편', '대학생', '누구나'],
 184: ['꾸안꾸', '편하게', '깔끔', '일상'],
 185: ['데일리룩', '베이직', '반바지', '일상룩', '캔버스백&에코백'],
 186: ['베이직', '코디', '20대룩', '간편하게'],
 187: ['심플코디', '20대코디'],
 188: ['간단', '깔끔', '여름코디', '하객룩'],
 189: ['코디', '일상적', '데일리', '데일리룩', '깔끔단정'],
 190: ['일상적', '시크룩', '하객룩'],
 191: ['여대생룩', '여름코디', '코디', '하객룩'],
 192: ['하객룩', '일상적', '팬츠', '깔끔', '편안함'],
 193: ['심플코디', '데일리룩', '하객룩'],
 194: ['누구나', '심플코디', '20대코디', '하객룩'],
 195: ['일상적', '힐', '20대', '여름', '여름코디'],
 196: ['일상적', '사랑스러운', '20대코디', '하객룩'],
 197: ['팬츠', '시크룩', '20대코디', '하객룩'],
 198: ['깔끔', '여름코디', '패션', '시크룩'],
 199: ['심플코디', '간편하게', '심플룩', '여름', '블라우스'],
 200: ['누구나', '일상적', '블라우스'],
 201: ['누구나', '일상적', '패션', '하객룩'],
 202: ['코디', '간편하게', '심플코디', '데이트'],
 203: ['깔끔단정', '반바지', '여름코디', '베이직'],
 204: ['여대생룩', '20대코디', '일상룩', '데님'],
 205: ['이어링', '일상', '시크룩', '진주백'],
 206: ['코디', '캐주얼룩', '여름코디', '니트조끼'],
 207: ['패션', '캐주얼룩', '대학생룩'],
 208: ['모던', '간단', '꾸안꾸', '패션'],
 209: ['오오티디', '여름코디', '일상'],
 210: ['데이트룩', '하객룩', '심플룩'],
 211: ['시크', '20대코디', '데일리', '하객룩'],
 212: ['누구나', '간편하게', '모던시크', '하객룩'],
 213: ['모던시크', '심플', '20대'],
 214: ['데일리', '일상', '모던'],
 215: ['데일리', '샌들&슬리퍼', '심플룩', '캐주얼'],
 216: ['간단', '샌들&슬리퍼', '빈티지'],
 217: ['스커트', '하이틴', '블랙'],
 218: ['예쁨', '패션', '슬랙스', '여행', '심플'],
 219: ['가디건&베스트', '심플룩', '심플코디'],
 220: ['20대', '블랙', '패션'],
 221: ['러블리', '패션', '스커트'],
 222: ['데이트룩', '심플', '팬츠'],
 223: ['귀여운', '일상', '패션'],
 224: ['샌들&슬리퍼', '데이트룩', '20대코디', '귀여운'],
 225: ['캐주얼룩', '베이직', '심플코디', '캠퍼스룩'],
 226: ['간편', '편안함', '여름'],
 227: ['오오티디', '오렌지', '상큼'],
 228: ['20대코디', '간편', '블랙&화이트'],
 229: ['간편', '귀여운', '여름코디', '패션', '꾸안꾸'],
 230: ['캐쥬얼', '20대룩', '일상', '편안함', 'ootd'],
 231: ['20대코디', '네크리스', '캐주얼', '20대코디'],
 232: ['간편', '모던', '깔끔', '편하게'],
 233: ['편하게', '오오티디', '심플', '캐주얼룩'],
 234: ['꾸안꾸', '심플룩', '모던', '여름', '일상룩'],
 235: ['데이트', '대학생룩', '20대코디', '러블리룩'],
 236: ['일상적', '간편하게', '20대코디', '포인트룩'],
 237: ['깔끔단정', '대학생', '여름코디', '일상'],
 238: ['일상적', '블랙', '20대코디', '빈티지'],
 239: ['간편', '캐주얼', '예쁨'],
 240: ['깔끔단정', '편안함', '미니원피스', '누구나'],
 241: ['꾸안꾸', '블라우스'],
 242: ['샌들&슬리퍼', '누구나', '일상룩'],
 243: ['심플룩', '데님', '캐주얼', '반바지', '일상룩'],
 244: ['데이트코디', '심플코디', '샌들&슬리퍼'],
 245: ['오오티디', '모던시크', '일상'],
 246: ['심플코디', '일상적', '일상', '편안함'],
 247: ['자켓', '반바지', '모던', '일상', '시크'],
 248: ['ootd', '20대코디', '심플룩'],
 249: ['20대', '간단', '깔끔단정', '누구나'],
 250: ['시크', '심플코디', '누구나', '일상룩'],
 251: ['오오티디', '20대룩', '플라워스커트'],
 252: ['꾸안꾸', '깔끔', '간단', '캠퍼스'],
 253: ['꾸안꾸', '데이트코디', '간편하게', '예쁨'],
 254: ['일상적', '깔끔'],
 255: ['여름', '블라우스', '간편하게', '오오티디'],
 256: ['20대코디', '깔끔단정'],
 257: ['20대', '편안함', '20대코디'],
 258: ['심플코디', '니트', '네크리스'],
 259: ['일상적', '간편', '오오티디'],
 260: ['일상룩', '시크', '심플코디', '누구나'],
 261: ['20대', '데님', '심플룩'],
 262: ['심플코디', '꾸안꾸', '러블리데일리룩', '모던시크', '심플'],
 263: ['심플베이직', '여름', '데일리룩'],
 264: ['심플코디', '핑크', '20대코디'],
 265: ['配飾', '無袖', '裙子'],
 266: ['牛仔褲', '腰帶', '球鞋/布鞋'],
 267: ['靴子', '裙子', '針織外套'],
 268: ['短袖上衣', '短褲', '項鍊'],
 269: ['戒指', '襯衫', '長洋裝'],
 270: ['帆布包', '短褲', '長袖上衣'],
 271: ['襪子', '戒指', '針織衫'],
 272: ['夾克', '無袖', '跟鞋'],
 273: ['無袖', '裙子', '夾克'],
 274: ['photo', '長袖上衣', '牛仔褲'],
 275: ['長褲', '長袖上衣', '針織外套'],
 276: ['及膝洋裝', '夾克', '襪子'],
 277: ['牛仔褲', '針織衫', '襯衫'],
 278: ['photo', '長袖上衣', '裙子'],
 279: ['牛仔褲', '襯衫', '無袖'],
 280: ['襪子', '裙子', '針織衫'],
 281: ['手錶', '大衣', '針織衫'],
 282: ['拉鍊外套', '襯衫', '裙子'],
 283: ['牛仔褲', '針織衫', '腰帶'],
 284: ['短褲', '長袖上衣', '球鞋/布鞋'],
 285: ['裙子', '無袖', '襯衫'],
 286: ['襪子', '牛仔褲', '針織衫'],
 287: ['針織外套', '迷你短洋裝', '平底鞋'],
 288: ['photo', '長褲', '針織外套'],
 289: ['無袖', '裙子', '針織外套'],
 290: ['套裝', '項鍊', '靴子'],
 291: ['長褲', '無袖', '襯衫'],
 292: ['短袖上衣', '牛仔褲', '襯衫'],
 293: ['針織外套', '襯衫', '牛仔褲'],
 294: ['長袖上衣', '裙子', '針織外套'],
 295: ['襪子', '迷你短洋裝', '夾克'],
 296: ['無袖', '牛仔褲', '夾克'],
 297: ['長袖上衣', '長褲', '拉鍊外套'],
 298: ['長袖上衣', '裙子', '項鍊'],
 299: ['夾克', '針織衫', '裙子'],
 300: ['針織衫', '長褲', '腰帶'],
 301: ['長袖上衣', '牛仔褲', '靴子'],
 302: ['夾克', '針織衫', '裙子'],
 303: ['項鍊', '襯衫', '短褲'],
 304: ['針織外套', '牛仔褲', '腰帶'],
 305: ['長袖上衣', '長褲', '靴子'],
 306: ['長袖上衣', '牛仔褲', '項鍊'],
 307: ['長袖上衣', '裙子', '肩背包'],
 308: ['夾克', '襯衫', '短褲'],
 309: ['夾克', '長洋裝', '項鍊'],
 310: ['無袖', '裙子', '針織外套'],
 311: ['帽子', '短袖上衣', '長褲'],
 312: ['迷你短洋裝', '針織外套', '靴子'],
 313: ['配飾', '襯衫', '短褲'],
 314: ['針織衫', '牛仔褲', '涼鞋'],
 315: ['針織外套', '長洋裝', '項鍊'],
 316: ['夾克', '長洋裝', '靴子'],
 317: ['牛仔褲', '襯衫', '球鞋/布鞋'],
 318: ['針織外套', '托特包', '無袖'],
 319: ['針織外套', '襯衫', '牛仔褲'],
 320: ['短袖上衣', '短褲', '配飾'],
 321: ['裙子', '針織衫', '夾克'],
 322: ['無袖', '裙子', '針織外套'],
 323: ['photo', '裙子', '短袖上衣'],
 324: ['短褲', '針織衫', '平底鞋'],
 325: ['襪子', '襯衫', '裙子'],
 326: ['長袖上衣', '裙子', '襪子'],
 327: ['迷你短洋裝', '針織外套', '涼鞋'],
 328: ['牛仔褲', '針織外套', '樂福鞋'],
 329: ['大衣', '針織衫', '裙子'],
 330: ['球鞋/布鞋', '裙子', '短袖上衣'],
 331: ['牛仔褲', '針織外套', '球鞋/布鞋'],
 332: ['長褲', '短袖上衣', '針織外套'],
 333: ['photo', '襯衫', '短褲'],
 334: ['針織衫', '裙子', '夾克'],
 335: ['photo', '長袖上衣', '牛仔褲'],
 336: ['長褲', '無袖', '手錶'],
 337: ['長褲', '針織衫', '球鞋/布鞋'],
 338: ['項鍊', '長袖上衣', '短褲'],
 339: ['夾克', '長洋裝', '涼鞋'],
 340: ['針織外套', '牛仔褲', '靴子'],
 341: ['手機殼', '配飾', '襯衫'],
 342: ['無袖', '裙子', '針織外套'],
 343: ['裙子', '短袖上衣', '針織衫'],
 344: ['大衣', '無袖', '牛仔褲'],
 345: ['photo', '針織衫', '裙子'],
 346: ['牛仔褲', '針織衫', '球鞋/布鞋'],
 347: ['無袖', '牛仔褲', '夾克'],
 348: ['樂福鞋', 'photo', '針織衫'],
 349: ['眼鏡', '長褲', '襯衫'],
 350: ['photo', '短袖上衣', '短褲'],
 351: ['配飾', '裙子', '襯衫'],
 352: ['裙子', '短袖上衣', '針織外套'],
 353: ['長褲', '無袖', '耳環'],
 354: ['針織外套', '配飾', '長洋裝'],
 355: ['襯衫', '無袖', '牛仔褲'],
 356: ['襯衫', '牛仔褲', '及膝洋裝'],
 357: ['裙子', '襯衫', '平底鞋'],
 358: ['長袖上衣', '裙子', '夾克'],
 359: ['夾克', '無袖', '長褲'],
 360: ['photo', '襯衫', '短褲'],
 361: ['迷你短洋裝', '針織外套', '涼鞋'],
 362: ['長褲', '襯衫', '手鐲'],
 363: ['無袖', '牛仔褲', '針織外套'],
 364: ['長褲', '短袖上衣', '涼鞋'],
 365: ['photo', '帽子', '長袖上衣'],
 366: ['photo', '長袖上衣', '裙子'],
 367: ['無袖', '托特包', '針織外套'],
 368: ['裙子', '無袖', '夾克'],
 369: ['photo', '短袖上衣', '牛仔褲'],
 370: ['photo', '襯衫', '短褲'],
 371: ['photo', '襯衫', '牛仔褲'],
 372: ['襯衫', '無袖', '裙子'],
 373: ['photo', '夾克', '長洋裝'],
 374: ['夾克', '無袖', '牛仔褲'],
 375: ['迷你短洋裝', '針織外套', '涼鞋'],
 376: ['針織外套', '及膝洋裝', '肩背包'],
 377: ['長褲', '短袖上衣', '針織外套'],
 378: ['戒指', '無袖', '針織外套'],
 379: ['涼鞋', 'photo', '針織外套'],
 380: ['photo', '針織衫', '牛仔褲'],
 381: ['photo', '裙子', '針織外套'],
 382: ['photo', '長袖上衣', '牛仔褲'],
 383: ['夾克', '套裝', '涼鞋'],
 384: ['無袖', '裙子', '靴子'],
 385: ['長褲', '短袖上衣', '球鞋/布鞋'],
 386: ['針織外套', '長洋裝', '項鍊'],
 387: ['夾克', '短袖上衣', '裙子'],
 388: ['長袖上衣', '牛仔褲', '平底鞋'],
 389: ['背包', '打底褲', '短袖上衣'],
 390: ['戒指', '針織衫', '裙子'],
 391: ['短袖上衣', '短褲', '涼鞋'],
 392: ['symbols&text', '長褲', '長袖上衣'],
 393: ['photo', '襯衫', '牛仔褲'],
 394: ['夾克', '及膝洋裝', '靴子'],
 395: ['裙子', '針織衫', '襪子'],
 396: ['靴子', '針織外套', '針織衫'],
 397: ['裙子', '長袖上衣', '平底鞋'],
 398: ['photo', '短袖上衣', '迷你短洋裝'],
 399: ['襯衫', '無袖', '短褲'],
 400: ['短袖上衣', '長褲', '針織外套'],
 401: ['photo', '及膝洋裝', '跟鞋'],
 402: ['牛仔褲', '無袖', '針織外套'],
 403: ['襯衫', '牛仔褲', '球鞋/布鞋'],
 404: ['肩背包', '無袖', '短褲'],
 405: ['及膝洋裝', '帽子', '肩背包'],
 406: ['手機殼', '裙子', '針織外套'],
 407: ['戒指', '裙子', '襯衫'],
 408: ['拉鍊外套', '短袖上衣', '短褲'],
 409: ['無袖', '牛仔褲', '針織外套'],
 410: ['無袖', '短褲', '肩背包'],
 411: ['長褲', '針織外套', '球鞋/布鞋'],
 412: ['短袖上衣', '短褲', '帽子'],
 413: ['photo', '裙子', '長袖上衣'],
 414: ['夾克', '迷你短洋裝', '涼鞋'],
 415: ['針織衫', '帆布包', '短褲'],
 416: ['牛仔褲', '球鞋/布鞋', '針織外套'],
 417: ['襯衫', '項鍊', '裙子'],
 418: ['長褲', '針織外套', '球鞋/布鞋'],
 419: ['photo', '短褲', '腰帶'],
 420: ['襯衫', '無袖', '牛仔褲'],
 421: ['photo', '針織衫', '裙子'],
 422: ['短袖上衣', '短褲', '拉鍊外套'],
 423: ['photo', '襯衫', '裙子'],
 424: ['針織衫', '牛仔褲', '樂福鞋'],
 425: ['長袖上衣', '配飾', '長洋裝'],
 426: ['photo', '短褲', '襯衫'],
 427: ['短袖上衣', '短褲', '腰帶'],
 428: ['短袖上衣', '短褲', '帽子'],
 429: ['無袖', '裙子', '涼鞋'],
 430: ['戒指', '針織衫', '長褲'],
 431: ['photo', '襯衫', '無袖'],
 432: ['美體美髮/美甲', '配飾', '短褲'],
 433: ['襯衫', '戒指', 'photo'],
 434: ['短袖上衣', '裙子', '耳環'],
 435: ['手鐲', '針織外套', '裙子'],
 436: ['無袖', '裙子', '針織外套'],
 437: ['長袖上衣', '牛仔褲', '球鞋/布鞋'],
 438: ['長袖上衣', '短褲', '項鍊'],
 439: ['photo', '針織衫', '牛仔褲'],
 440: ['拉鍊外套', '無袖', '裙子'],
 441: ['配飾', '針織衫', '長褲'],
 442: ['夾克', '無袖', '及膝洋裝'],
 443: ['耳環', '無袖', '牛仔褲'],
 444: ['針織衫', '牛仔褲', '項鍊'],
 445: ['襯衫', '及膝洋裝', '涼鞋'],
 446: ['針織外套', '長褲', '球鞋/布鞋'],
 447: ['夾克', '涼鞋', '襯衫'],
 448: ['photo', '襯衫', '短褲'],
 449: ['無袖', '夾克', '短褲'],
 450: ['photo', '迷你短洋裝', '跟鞋'],
 451: ['耳環', '短袖上衣', '牛仔褲'],
 452: ['配飾', '及膝洋裝', '夾克'],
 453: ['牛仔褲', '襯衫', '涼鞋'],
 454: ['裙子', '短袖上衣', '平底鞋'],
 455: ['photo', '跟鞋', '耳環'],
 456: ['牛仔褲', '短袖上衣', '涼鞋'],
 457: ['襯衫', '裙子', '夾克'],
 458: ['牛仔褲', '腰帶', '短袖上衣'],
 459: ['短袖上衣', '裙子', '夾克'],
 460: ['短袖上衣', '短褲', '拉鍊外套'],
 461: ['夾克', '長洋裝', '樂福鞋'],
 462: ['牛仔褲', '短袖上衣', '帽子'],
 463: ['短袖上衣', '裙子', '靴子'],
 464: ['photo', '無袖', '長褲'],
 465: ['夾克', '及膝洋裝', '樂福鞋'],
 466: ['針織衫', '牛仔褲', '樂福鞋'],
 467: ['長褲', '針織外套', '托特包'],
 468: ['photo', '長袖上衣', '牛仔褲'],
 469: ['手機殼', '襯衫', '裙子'],
 470: ['photo', '短袖上衣', '牛仔褲'],
 471: ['photo', '襯衫', '短褲'],
 472: ['短袖上衣', '襯衫', '裙子'],
 473: ['牛仔褲', '襯衫', '球鞋/布鞋'],
 474: ['牛仔褲', '針織外套', '涼鞋'],
 475: ['肩背包', '牛仔褲', '無袖'],
 476: ['帽子', '針織外套', '平底鞋'],
 477: ['牛仔褲', '手鐲', '短袖上衣'],
 478: ['photo', '襯衫', '長洋裝'],
 479: ['配飾', '短褲', '無袖'],
 480: ['photo', '及膝洋裝', '肩背包'],
 481: ['photo', '無袖', '長褲'],
 482: ['針織衫', '牛仔褲', '夾克'],
 483: ['針織衫', '裙子', '樂福鞋'],
 484: ['短袖上衣', '短褲', '肩背包'],
 485: ['短褲', '襯衫', '針織外套'],
 486: ['襯衫', '裙子', '托特包'],
 487: ['項鍊', '襯衫', '長褲'],
 488: ['大衣外套', '襯衫', '短褲'],
 489: ['襯衫', '牛仔褲', '跟鞋'],
 490: ['夾克外套', '迷你短洋裝', '靴子'],
 491: ['長袖上衣', '短褲', '拉鍊外套'],
 492: ['裙子', '開襟衫', '跟鞋'],
 493: ['無袖', '短褲', '涼鞋'],
 494: ['牛仔褲', '針織衫', '帽子'],
 495: ['短袖上衣', '裙子', '夾克外套'],
 496: ['長袖上衣', '襪子', '樂福鞋'],
 497: ['針織衫', '無袖', '牛仔褲'],
 498: ['襯衫', '長褲', '夾克外套'],
 499: ['photo', '套裝', '靴子'],
 500: ['裙子', '襯衫', '平底鞋'],
 501: ['無袖', '牛仔褲', '襯衫'],
 502: ['短袖上衣', '牛仔褲', '拉鍊外套'],
 503: ['大衣外套', '裙子', '針織衫'],
 504: ['牛仔褲', '襯衫', '平底鞋'],
 505: ['牛仔褲', '長袖上衣', '球鞋/布鞋'],
 506: ['戒指', '開襟衫', '短褲'],
 507: ['短袖上衣', '裙子', '夾克外套'],
 508: ['長褲', '開襟衫', '球鞋/布鞋'],
 509: ['長褲', '短袖上衣', '開襟衫'],
 510: ['夾克外套', '及膝洋裝', '涼鞋'],
 511: ['開襟衫', '裙子', '戒指'],
 512: ['夾克外套', '及膝洋裝', '項鍊'],
 513: ['裙子', '長袖上衣', '靴子'],
 514: ['夾克外套', '短袖上衣', '裙子'],
 515: ['長袖上衣', '長褲', '夾克外套'],
 516: ['及膝洋裝', '開襟衫', '涼鞋'],
 517: ['短袖上衣', '牛仔褲', '開襟衫'],
 518: ['開襟衫', '牛仔褲', '球鞋/布鞋'],
 519: ['夾克外套', '裙子', '襯衫'],
 520: ['迷你短洋裝', '開襟衫', '靴子'],
 521: ['夾克外套', '襯衫', '裙子'],
 522: ['針織衫', '牛仔褲', '球鞋/布鞋'],
 523: ['開襟衫', '大衣外套', '牛仔褲'],
 524: ['戒指', '襯衫', '短褲'],
 525: ['夾克外套', '開襟衫', '牛仔褲'],
 526: ['大衣外套', '襯衫', '裙子'],
 527: ['夾克外套', '開襟衫', '牛仔褲'],
 528: ['大衣外套', '長袖上衣', '裙子'],
 529: ['모던시크'],
 530: ['대학생'],
 531: ['기타악세서리'],
 532: ['귀여운'],
 533: ['긴팔티'],
 534: ['코디'],
 535: ['10~20대'],
 536: ['데님'],
 537: ['슬리브리스'],
 538: ['심플'],
 539: ['가을코디'],
 540: ['자켓', '슬리브리스', '스커트'],
 541: ['모자'],
 542: ['예쁨'],
 543: ['오오티디', '오오티디'],
 544: ['가을코디'],
 545: ['牛仔褲'],
 546: ['스커트'],
 547: ['패션'],
 548: ['니트', '팬츠', '샌들&슬리퍼'],
 549: ['10대~20대'],
 550: ['デニムパンツ'],
 551: ['반팔티'],
 552: ['가디건&베스트', '슬리브리스', '스커트'],
 553: ['미디원피스'],
 554: ['양말'],
 555: ['배경'],
 556: ['無袖'],
 557: ['니트'],
 558: ['반바지'],
 559: ['일상룩'],
 560: ['미디원피스'],
 561: ['네크리스'],
 562: ['데이트'],
 563: ['로퍼'],
 564: ['캐주얼'],
 565: ['대학생룩'],
 566: ['短褲'],
 567: ['여름코디'],
 568: ['심플베이직'],
 569: ['대학생'],
 570: ['오오티디'],
 571: ['편안함'],
 572: ['미니원피스'],
 573: ['토트백'],
 574: ['토트백'],
 575: ['일상적'],
 576: ['데님'],
 577: ['러블리', '러블리'],
 578: ['러블리'],
 579: ['スカート'],
 580: ['롱원피스'],
 581: ['편안함'],
 582: ['룩'],
 583: ['깔끔'],
 584: ['短裙'],
 585: ['러블리'],
 586: ['短裙'],
 587: ['간단'],
 588: ['심플코디'],
 589: ['스커트'],
 590: ['데이트코디'],
 591: ['캐주얼'],
 592: ['슬리브리스'],
 593: ['스니커즈'],
 594: ['오오티디'],
 595: ['이어링'],
 596: ['귀여운'],
 597: ['꾸안꾸'],
 598: ['사진'],
 599: ['깔끔'],
 600: ['스니커즈'],
 601: ['세트'],
 602: ['캔버스백&에코백'],
 603: ['블랙'],
 604: ['캐쥬얼룩'],
 605: ['꾸안꾸'],
 606: ['일상룩'],
 607: ['블라우스'],
 608: ['데이트'],
 609: ['여름코디'],
 610: ['스니커즈'],
 611: ['캐주얼'],
 612: ['폰케이스'],
 613: ['여름룩'],
 614: ['여름'],
 615: ['靴子'],
 616: ['여름'],
 617: ['牛仔褲'],
 618: ['힐'],
 619: ['パンツ'],
 620: ['샌들&슬리퍼'],
 621: ['캐쥬얼룩'],
 622: ['간단'],
 623: ['양말'],
 624: ['平底鞋'],
 625: ['반바지'],
 626: ['裙子'],
 627: ['봄코디'],
 628: ['일상룩'],
 629: ['일상룩'],
 630: ['룩'],
 631: ['심플코디'],
 632: ['러블리룩'],
 633: ['봄데일리룩'],
 634: ['봄'],
 635: ['여름코디'],
 636: ['예쁨'],
 637: ['반바지'],
 638: ['부츠'],
 639: ['10대'],
 640: ['블랙'],
 641: ['사랑스러운'],
 642: ['반바지'],
 643: ['시크'],
 644: ['10대~20대'],
 645: ['靴子'],
 646: ['토트백'],
 647: ['집업&점퍼'],
 648: ['短褲'],
 649: ['일상룩'],
 650: ['스커트'],
 651: ['하객룩'],
 652: ['봄'],
 653: ['베이직'],
 654: ['무채색'],
 655: ['하객룩'],
 656: ['캐주얼'],
 657: ['20대'],
 658: ['플렛'],
 659: ['코트'],
 660: ['가디건'],
 661: ['편안함'],
 662: ['사랑스러운'],
 663: ['폰케이스'],
 664: ['여름코디'],
 665: ['심플룩'],
 666: ['심플룩'],
 667: ['가디건&베스트'],
 668: ['모던시크'],
 669: ['사랑스러운'],
 670: ['가디건&베스트'],
 671: ['10~20대'],
 672: ['캐쥬얼룩'],
 673: ['심플베이직'],
 674: ['여성스러운'],
 675: ['데이트'],
 676: ['가디건'],
 677: ['러블리'],
 678: ['코디'],
 679: ['데이트'],
 680: ['세트'],
 681: ['시크룩'],
 682: ['이어링'],
 683: ['친구랑놀러갈때'],
 684: ['유니크'],
 685: ['가디건&베스트', '미니원피스', '부츠'],
 686: ['여성스러운'],
 687: ['시크'],
 688: ['가디건'],
 689: ['가디건&베스트'],
 690: ['귀여운'],
 691: ['캔버스백&에코백'],
 692: ['로퍼'],
 693: ['모던시크'],
 694: ['여름코디'],
 695: ['러블리데일리룩'],
 696: ['이어링'],
 697: ['데이트코디'],
 698: ['캠퍼스'],
 699: ['부츠'],
 700: ['간편'],
 701: ['일상룩'],
 702: ['봄데이트룩'],
 703: ['배경'],
 704: ['팬츠'],
 705: ['팬츠'],
 706: ['데이트'],
 707: ['여대생룩'],
 708: ['스커트'],
 709: ['캠퍼스'],
 710: ['캔버스백&에코백'],
 711: ['심플코디'],
 712: ['플렛'],
 713: ['봄코디'],
 714: ['10~20대'],
 715: ['팬츠'],
 716: ['캐쥬얼룩'],
 717: ['봄데이트룩'],
 718: ['심플베이직'],
 719: ['러블리데일리룩'],
 720: ['깔끔단정'],
 721: ['10대~20대'],
 722: ['심플코디'],
 723: ['러블리룩'],
 724: ['캐주얼'],
 725: ['러블리룩'],
 726: ['모던'],
 727: ['スカート'],
 728: ['플렛'],
 729: ['세트'],
 730: ['친구랑놀러갈때'],
 731: ['폰케이스'],
 732: ['심플베이직'],
 733: ['심플베이직'],
 734: ['캐주얼룩'],
 735: ['집업&점퍼'],
 736: ['데일리'],
 737: ['니트'],
 738: ['니트'],
 739: ['반팔티'],
 740: ['니트'],
 741: ['꾸꾸꾸'],
 742: ['심플코디'],
 743: ['デニムパンツ'],
 744: ['캐주얼룩'],
 745: ['간편'],
 746: ['여성스러운'],
 747: ['니트'],
 748: ['가디건'],
 749: ['일상적'],
 750: ['러블리룩'],
 751: ['대학생'],
 752: ['데이트코디'],
 753: ['ブラウス'],
 754: ['컬러'],
 755: ['숄더백'],
 756: ['유니크'],
 757: ['ブーツ'],
 758: ['스커트'],
 759: ['데이트코디'],
 760: ['10~20대'],
 761: ['ブーツ'],
 762: ['코트'],
 763: ['모던'],
 764: ['간단'],
 765: ['예쁨'],
 766: ['데이트룩'],
 767: ['꾸안꾸'],
 768: ['편하게'],
 769: ['심플룩'],
 770: ['여대생룩'],
 771: ['친구랑놀러갈때'],
 772: ['대학생룩'],
 773: ['베이직'],
 774: ['베이직'],
 775: ['데이트룩'],
 776: ['デニムパンツ'],
 777: ['심플룩'],
 778: ['데일리룩'],
 779: ['부츠'],
 780: ['집업&점퍼'],
 781: ['데이트코디'],
 782: ['캐쥬얼'],
 783: ['로맨틱'],
 784: ['가디건'],
 785: ['오오티디'],
 786: ['10대~20대'],
 787: ['학생룩'],
 788: ['스커트'],
 789: ['꾸안꾸'],
 790: ['모던시크'],
 791: ['하이틴룩'],
 792: ['일상적'],
 793: ['숄더백', '스커트', '긴팔티'],
 794: ['니트', '데님', '벨트'],
 795: ['데님', '니트', '스니커즈'],
 796: ['데님', '집업&점퍼', '스니커즈'],
 797: ['반팔티', '데님', '집업&점퍼'],
 798: ['가디건&베스트', '팬츠', '자켓'],
 799: ['반팔티', '팬츠', '집업&점퍼'],
 800: ['팬츠', '긴팔티', '모자'],
 801: ['스니커즈', '팬츠', '긴팔티'],
 802: ['데님', '니트', '스니커즈'],
 803: ['양말', '데님', '로퍼'],
 804: ['블라우스', '데님', '부츠'],
 805: ['슬리브리스', '데님', '가디건&베스트'],
 806: ['팬츠', '집업&점퍼', '스니커즈'],
 807: ['팬츠', '블라우스', '스니커즈'],
 808: ['데님', '긴팔티', '모자'],
 809: ['반팔티', '데님', '스니커즈'],
 810: ['반팔티', '팬츠', '스니커즈'],
 811: ['반팔티', '팬츠', '자켓'],
 812: ['반팔티', '반바지', '자켓'],
 813: ['팬츠', '자켓', '모자'],
 814: ['반팔티', '데님', '자켓'],
 815: ['팬츠', '반팔티', '니트'],
 816: ['팬츠', '니트', '스니커즈'],
 817: ['슬리브리스', '팬츠', '블라우스'],
 818: ['스커트', '긴팔티', '숄더백'],
 819: ['니트', '팬츠', '자켓'],
 820: ['스커트', '긴팔티', '스니커즈'],
 821: ['backtoschool'],
 822: ['니트', '데님', '자켓'],
 823: ['팬츠', '집업&점퍼', '스니커즈'],
 824: ['팬츠', '자켓', '로퍼'],
 825: ['기타악세서리', '팬츠', '스니커즈'],
 826: ['데님', '집업&점퍼', '스니커즈'],
 827: ['긴팔티', '데님', '집업&점퍼'],
 828: ['반팔티', '팬츠', '가디건&베스트'],
 829: ['팬츠', '스니커즈', '벨트'],
 830: ['반팔티', '반바지', '가디건&베스트'],
 831: ['데님', '가디건&베스트', '스니커즈'],
 832: ['반팔티', '스커트', '자켓'],
 833: ['반팔티', '가디건&베스트', '스커트'],
 834: ['데님', '벨트', '스니커즈'],
 835: ['데님', '가디건&베스트', '긴팔티', '꾸안꾸', '그린'],
 836: ['숄더백', '반바지', '긴팔티', '스웨트셔츠', '맨투맨'],
 837: ['반팔티', '스커트', '자켓'],
 838: ['긴팔티', '팬츠', '자켓'],
 839: ['가디건&베스트', '스커트', '숄더백'],
 840: ['토트백', '니트', '스커트'],
 841: ['숄더백', '니트', '데님'],
 842: ['블라우스', '팬츠', '로퍼'],
 843: ['긴팔티', '팬츠', '집업&점퍼'],
 844: ['블라우스', '데님', '로퍼'],
 845: ['데님', '자켓', '모자'],
 846: ['로퍼', '블라우스', '데님'],
 847: ['토트백', '블라우스', '팬츠'],
 848: ['블라우스', '팬츠', '로퍼'],
 849: ['숄더백', '니트', '스커트'],
 850: ['반바지', '모자', '니트'],
 851: ['반팔티', '팬츠', '로퍼'],
 852: ['숄더백', '긴팔티', '롱원피스'],
 853: ['캔버스백&에코백', '긴팔티', '팬츠'],
 854: ['캔버스백&에코백', '긴팔티', '데님', '베레모', 'beret'],
 855: ['팬츠', '블라우스', '모자'],
 856: ['팬츠', '로퍼', '가디건&베스트'],
 857: ['스커트', '가디건&베스트', '모자'],
 858: ['스커트', '가디건&베스트', '숄더백', '베레모', 'beret'],
 859: ['데님', '자켓', '스니커즈'],
 860: ['데님', '스니커즈', '긴팔티'],
 861: ['팬츠', '긴팔티', '플렛'],
 862: ['스커트', '집업&점퍼', '양말'],
 863: ['스커트', '집업&점퍼', '모자'],
 864: ['스커트', '집업&점퍼', '모자'],
 865: ['데님', '집업&점퍼', '스니커즈'],
 866: ['스커트', '긴팔티', '스니커즈'],
 867: ['스커트', '숄더백', '긴팔티'],
 868: ['니트', '스커트', '자켓'],
 869: ['반바지', '가디건&베스트', '숄더백'],
 870: ['숄더백', '블라우스', '미디원피스'],
 871: ['반팔티', '스커트', '블라우스'],
 872: ['긴팔티', '데님', '벨트'],
 873: ['셔츠'],
 874: ['스커트', '긴팔티', '부츠'],
 875: ['팬츠', '블라우스', '스니커즈'],
 876: ['데님', '블라우스', '스니커즈', '꾸안꾸', 'simple', 'neutral'],
 877: ['반바지', '긴팔티', '숄더백'],
 878: ['스커트', '집업&점퍼', '부츠'],
 879: ['가디건&베스트', '데님', '로퍼'],
 880: ['데님', '블라우스', '로퍼'],
 881: ['데님', '가디건&베스트', '벨트'],
 882: ['반팔티', '데님', '가디건&베스트'],
 883: ['팬츠', '니트', '스니커즈'],
 884: ['슬리브리스', '팬츠', '블라우스'],
 885: ['슬리브리스', '데님', '블라우스'],
 886: ['슬리브리스', '데님', '집업&점퍼'],
 887: ['세트', '스니커즈', '팬츠'],
 888: ['팬츠', '긴팔티', '집업&점퍼'],
 889: ['반팔티', '스커트', '자켓'],
 890: ['반팔티', '자켓', '데님'],
 891: ['블라우스', '팬츠', '백팩'],
 892: ['긴팔티', '팬츠', '자켓'],
 893: ['반팔티', '자켓', '팬츠'],
 894: ['슬리브리스', '팬츠', '가디건&베스트'],
 895: ['니트', '스커트', '숄더백'],
 896: ['긴팔티', '스커트', '자켓'],
 897: ['블라우스', '스커트', '자켓'],
 898: ['팬츠', '긴팔티', '자켓'],
 899: ['반팔티', '팬츠', '자켓'],
 900: ['데님', '반팔티', '자켓'],
 901: ['팬츠', '반팔티', '집업&점퍼'],
 902: ['긴팔티', '팬츠', '집업&점퍼'],
 903: ['슬리브리스', '팬츠', '블라우스'],
 904: ['슬리브리스', '팬츠', '가디건&베스트'],
 905: ['미니원피스', '니트', '부츠'],
 906: ['양말', '스커트', '블라우스'],
 907: ['니트', '팬츠', '집업&점퍼'],
 908: ['팬츠', '가디건&베스트', '숄더백'],
 909: ['반팔티',
  '팬츠',
  '스니커즈',
  '가죽재킷',
  'plaid',
  '체크',
  '체크바지',
  'plaidpants',
  '90s',
  '90년대',
  'leatherjacket'],
 910: ['반팔티', '데님', '집업&점퍼'],
 911: ['슬리브리스', '반바지', '블라우스'],
 912: ['숄더백', '스커트', '긴팔티'],
 913: ['블라우스', '스커트', '자켓'],
 914: ['긴팔티', '스커트', '부츠'],
 915: ['긴팔티', '스커트', '집업&점퍼'],
 916: ['레깅스', '긴팔티', '모자'],
 917: ['스니커즈', '팬츠', '가디건&베스트'],
 918: ['니트', '데님', '로퍼'],
 919: ['반팔티', '팬츠', '집업&점퍼'],
 920: ['롱원피스', '가디건&베스트', '로퍼'],
 921: ['긴팔티', '데님', '집업&점퍼'],
 922: ['스커트', '긴팔티', '스니커즈'],
 923: ['팬츠', '긴팔티', '스니커즈'],
 924: ['스커트', '스니커즈', '반팔티'],
 925: ['팬츠', '니트', '스니커즈'],
 926: ['반팔티', '팬츠', '집업&점퍼'],
 927: ['반팔티', '롱원피스', '가디건&베스트'],
 928: ['스커트', '가디건&베스트', '플렛'],
 929: ['팬츠', '반팔티', '가디건&베스트'],
 930: ['반팔티', '가디건&베스트', '스커트'],
 931: ['팬츠', '가디건&베스트', '로퍼'],
 932: ['니트', '숄더백', '반바지', '꾸안꾸'],
 933: ['데님', '가디건&베스트', '스니커즈'],
 934: ['슬리브리스', '팬츠', '가디건&베스트'],
 935: ['팬츠', '가디건&베스트', '스니커즈'],
 936: ['슬리브리스', '데님', '가디건&베스트'],
 937: ['슬리브리스', '데님', '가디건&베스트'],
 938: ['스커트', '니트', '숄더백'],
 939: ['데님', '반팔티', '가디건&베스트'],
 940: ['데님', '니트', '로퍼'],
 941: ['숄더백', '양말', '스커트'],
 942: ['데님', '가디건&베스트', '스니커즈'],
 943: ['팬츠', '가디건&베스트', '스니커즈'],
 944: ['숄더백', '스커트', '니트'],
 945: ['데일리', '니트', '꾸안꾸', '간절기', 'simple', '스웨터', '청바지'],
 946: ['슬리브리스', '데님', '니트'],
 947: ['슬리브리스', '데님', '샌들&슬리퍼', '간절기니트'],
 948: ['스커트', '니트', '부츠'],
 949: ['반팔티', '스커트', '집업&점퍼'],
 950: ['스커트', '블라우스', '로퍼'],
 951: ['블라우스', '스커트', '벨트'],
 952: ['팬츠', '가디건&베스트', '로퍼', 'office'],
 953: ['숄더백', '반팔티', '가디건&베스트'],
 954: ['팬츠', '블라우스', '로퍼'],
 955: ['office', '오피스룩'],
 956: ['캔버스백&에코백', '롱원피스', '가디건&베스트', '롱원피스'],
 957: ['데님', '블라우스', '숄더백'],
 958: ['가디건&베스트', '스커트', '샌들&슬리퍼'],
 959: ['긴팔티', '팬츠', '샌들&슬리퍼', 'office'],
 960: ['슬리브리스', '스커트', '가디건&베스트'],
 961: ['슬리브리스', '반바지', '가디건&베스트'],
 962: ['office', '오피스룩'],
 963: ['블라우스', '데님', '부츠'],
 964: ['팬츠', '반팔티', '스니커즈'],
 965: ['팬츠', '긴팔티', '스니커즈'],
 966: ['슬리브리스', '팬츠', '가디건&베스트', '꾸안꾸'],
 967: ['팬츠', '긴팔티', '가디건&베스트'],
 968: ['팬츠', '긴팔티', '스니커즈'],
 969: ['팬츠', '반팔티', '스니커즈', '꾸안꾸'],
 970: ['팬츠', '스니커즈', '가디건&베스트', 'backtoschool'],
 971: ['숄더백', '니트', '팬츠'],
 972: ['기타악세서리', '니트', '팬츠'],
 973: ['가디건&베스트', '슬리브리스', '팬츠'],
 974: ['팬츠', '가디건&베스트', '기타악세서리', '꾸안꾸'],
 975: ['반팔티', '스커트', '가디건&베스트'],
 976: ['스커트', '반팔티', '숄더백'],
 977: ['팬츠', '반팔티', '스니커즈', 'school'],
 978: ['숄더백', '양말', '스커트'],
 979: ['스커트', '반팔티', '숄더백'],
 980: ['스커트', '니트', '숄더백'],
 981: ['스커트', '반팔티', '샌들&슬리퍼'],
 982: ['반팔티', '팬츠', '자켓'],
 983: ['inbetweenseasons'],
 984: ['숄더백', '스커트', '긴팔티', 'fall', 'Autmn', '가을', '맨투맨', 'sweatshirt'],
 985: ['숄더백', '스커트', '니트', 'fall', '가을', 'autmn'],
 986: ['fall', 'Autmn', '가을'],
 987: ['팬츠', '블라우스', '숄더백'],
 988: ['숄더백', '블라우스', '반바지'],
 989: ['팬츠', '블라우스', '샌들&슬리퍼'],
 990: ['데님', '숄더백', '블라우스'],
 991: ['블라우스', '스니커즈', '캔버스백&에코백'],
 992: ['반팔티', '가디건&베스트', '데님'],
 993: ['팬츠', '가디건&베스트', '스니커즈'],
 994: ['팬츠', '반팔티', '가디건&베스트'],
 995: ['데님', '반팔티', '가디건&베스트'],
 996: ['반팔티', '데님', '가디건&베스트'],
 997: ['샌들&슬리퍼', '스커트', '반팔티'],
 998: ['반팔티', '가디건&베스트', '스커트'],
 999: ['반팔티', '가디건&베스트', '팬츠'],
 1000: ['숄더백', '가디건&베스트', '데님', 'university'],
 ...}
len(hashtags)
4494

1. 수집된 코디 이미지 데이터 전처리

전체 코디 개수 확인하기

img_length = len([x for x in os.listdir('data/img') if not x.startswith('.')])
img_length
772

상의, 하의, 신발 & 각각의 label을 위한 데이터셋 구축

상의, 하의, 신발 각각 256 * 256 사이즈의 3개 채널(R, G, B)을 가진 이미지 array로 구성

x_top = np.zeros((img_length, 256, 256, 3), dtype=np.int32)
x_bottom = np.zeros((img_length, 256, 256, 3), dtype=np.int32)
x_shoes = np.zeros((img_length, 256, 256, 3), dtype=np.int32)
# 0, 1, 2(각각 상의, 하의, 신발 의미)를 전체 코디 개수만큼 반복하여 이어 붙여서 image_length * 3 길이 만큼의 label array 만들기
y_label = np.array(list(chain.from_iterable((repeat(number, img_length) for number in [0, 1, 2]))))
# torch의 one hot encoding function으로 label array 전체 변환
y_label = F.one_hot(torch.tensor(y_label), num_classes=3)
y_label
tensor([[1, 0, 0],
        [1, 0, 0],
        [1, 0, 0],
        ...,
        [0, 0, 1],
        [0, 0, 1],
        [0, 0, 1]])

전체 코디 리스트 확인하기

# .으로 시작하지 않는 것을 통해 .DS_Store 리스트에 포함되지 않도록 하고, 코디 뒤의 인덱스를 기준으로 순서대로 리스트 정렬
codi_list = sorted([x for x in os.listdir('data/img') if not x.startswith('.')], key=lambda x: int(x.split('_')[1]))

상의, 하의, 신발 이미지 array 각각 만들기

# 상의, 하의, 신발 이미지를 반복문에서 각각의 array에 삽입할 때 설정할 인덱스 변수 설정 
count = 0
# 이미지 채널 수 등이 맞지 않을 경우를 확인하기 위해 for문 반복 시 인덱스 추가로 리턴하도록 설정
for i, codi in enumerate(codi_list):
    # codi_list에서 상의, 하의, 신발 이미지의 상대 경로 리스트를 glob으로 생성 후 한 종류의 item씩 반복
    for item in glob('data/img/' + codi + '/*.*'):
        # PIL을 이용하여 Image.open으로 item 경로에 있는 이미지를 불러와서 array로 변경
        item_arr = np.array(Image.open(item))
        
        # 이미지가 3차원이 아닌 4차원으로 되어있는 경우 마지막 채널은 alpha(투명도)
        if item_arr.shape[2] == 4:
            # alpha channel만 발라내기
            alpha = item_arr[:, :, 3]
            # alpha channel의 값 중 0인 것들만 True로 발라내기
            mask = (alpha == 0)
            # 이미지의 alpha channel의 값이 0인 위치에 red, green, blue, alpha 값 모두 255로 넣어주기
            item_arr[:, :, :4][mask] = [255, 255, 255, 255]
        # cv2의 resize를 이용하여 256 * 256 * 3 사이즈의 이미지로 변경
        resized_item = cv2.resize(item_arr[:, :, :3], dsize=(256, 256), interpolation=cv2.INTER_LINEAR)
        
        # 상의, 하의, 신발 png 파일 경로 리스트에서 각각의 카테고리 문자만 추출
        category = item.split('/')[-1].split('.')[0]
        
        # 이미지의 채널이 3개가 되지 않는 경우 예외 출력 후 continue
        if resized_item.shape[2] < 3:
            print(i, category, resized_item.shape)
            continue
        
        # 카테고리가 상의, 하의, 신발일 경우 각각의 이미지 array에 값 삽입 (count의 index 이용)
        if category == '상의':
            x_top[count, :, :, :] = resized_item
        elif category == '하의':
            x_bottom[count, :, :, :] = resized_item
        else:
            x_shoes[count, :, :, :] = resized_item
        
    # 다음 index로 넘어가기 위해 count 변수에 1 추가
    count += 1
93 하의 (256, 256, 2)
102 상의 (256, 256, 2)
124 신발 (256, 256, 2)
150 신발 (256, 256, 2)
254 하의 (256, 256, 2)
278 하의 (256, 256, 2)
327 신발 (256, 256, 2)
348 하의 (256, 256, 2)
449 신발 (256, 256, 2)
458 신발 (256, 256, 2)
538 상의 (256, 256, 2)
553 하의 (256, 256, 2)
555 하의 (256, 256, 2)
605 하의 (256, 256, 2)
669 하의 (256, 256, 2)

예외로 제거해야 할 코디 인덱스 리스트

except_list = [93, 102, 124, 150, 254, 278, 327, 348, 449, 458, 538, 553, 555, 605, 669]
len(except_list)
15

상의, 하의, 신발 & 각각의 label을 위한 데이터셋 재구축 (예외 개수 만큼 제외)

x_top = np.zeros((img_length - 15, 256, 256, 3), dtype=np.int32)
x_bottom = np.zeros((img_length - 15, 256, 256, 3), dtype=np.int32)
x_shoes = np.zeros((img_length - 15, 256, 256, 3), dtype=np.int32)
# 0, 1, 2(각각 상의, 하의, 신발 의미)를 전체 코디 개수만큼 반복하여 이어 붙여서 (image_length - 15) * 3 길이 만큼의 label array 만들기
y_label = np.array(list(chain.from_iterable((repeat(number, img_length - 15) for number in [0, 1, 2]))))
# torch의 one hot encoding function으로 label array 전체 변환
# y_label = F.one_hot(torch.tensor(y_label), num_classes=3)

상의, 하의, 신발 이미지 array 각각 다시 만들기 (예외 개수 만큼 제외)

# 상의, 하의, 신발 이미지를 반복문에서 각각의 array에 삽입할 때 설정할 인덱스 변수 설정 
count = 0
# 이미지 채널 수 등이 맞지 않을 경우를 확인하기 위해 for문 반복 시 인덱스 추가로 리턴하도록 설정
for i, codi in enumerate(codi_list):
    # 예외 코디 인덱스 리스트에 포함되는 경우 continue로 건너뛰기
    if i in except_list:
        continue
    
    # codi_list에서 상의, 하의, 신발 이미지의 상대 경로 리스트를 glob으로 생성 후 한 종류의 item씩 반복
    for item in glob('data/img/' + codi + '/*.*'):
        # PIL을 이용하여 Image.open으로 item 경로에 있는 이미지를 불러와서 array로 변경
        item_arr = np.array(Image.open(item))
        
        # 이미지가 3차원이 아닌 4차원으로 되어있는 경우 마지막 채널은 alpha(투명도)
        if item_arr.shape[2] == 4:
            # alpha channel만 발라내기
            alpha = item_arr[:, :, 3]
            # alpha channel의 값 중 0인 것들만 True로 발라내기
            mask = (alpha == 0)
            # 이미지의 alpha channel의 값이 0인 위치에 red, green, blue, alpha 값 모두 255로 넣어주기
            item_arr[:, :, :4][mask] = [255, 255, 255, 255]
        # cv2의 resize를 이용하여 256 * 256 * 3 사이즈의 이미지로 변경
        resized_item = cv2.resize(item_arr[:, :, :3], dsize=(256, 256), interpolation=cv2.INTER_LINEAR)
        
        # 상의, 하의, 신발 png 파일 경로 리스트에서 각각의 카테고리 문자만 추출
        category = item.split('/')[-1].split('.')[0]
        
        # 이미지의 채널이 3개가 되지 않는 경우 예외 출력 후 continue
        if resized_item.shape[2] < 3:
            print(i, category, resized_item.shape)
            continue
        
        # 카테고리가 상의, 하의, 신발일 경우 각각의 이미지 array에 값 삽입 (count의 index 이용)
        if category == '상의':
            x_top[count, :, :, :] = resized_item
        elif category == '하의':
            x_bottom[count, :, :, :] = resized_item
        else:
            x_shoes[count, :, :, :] = resized_item
        
    # 다음 index로 넘어가기 위해 count 변수에 1 추가
    count += 1

최종 데이터셋 shape 확인하기

print(x_top.shape, x_bottom.shape, x_shoes.shape)
(757, 256, 256, 3) (757, 256, 256, 3) (757, 256, 256, 3)

최종 상의, 하의, 신발 데이터셋 이어붙이기

# PIL의 Image로 불러와서 transforms를 통해 변환하기 위해서는 array type이 uint8이어야 해서 변환
x_data = np.vstack((x_top, x_bottom, x_shoes)).astype('uint8')
# torch.tensor로 변환
# (채널, R, G, B) 순서로 tensor를 구성하기 위해 이 방법 대신 transforms.ToTensor() 이용
# x_data = torch.tensor(x_data)

연결된 데이터셋과 label 데이터셋 shape 확인하기

# GPU 사용 가능한 서버에서 데이터만 옮겨서 학습하기 위해 불러오기
# with open('data/x_data.pickle', 'rb') as f:
#     x_data = pickle.load(f)

# with open('data/y_label.pickle', 'rb') as f:
#     y_label = pickle.load(f)
print(x_data.shape, y_label.shape)
(2271, 256, 256, 3) (2271,)
x_data[0]
array([[[255, 255, 255],
        [255, 255, 255],
        [255, 255, 255],
        ...,
        [255, 255, 255],
        [255, 255, 255],
        [255, 255, 255]],

       [[255, 255, 255],
        [255, 255, 255],
        [255, 255, 255],
        ...,
        [255, 255, 255],
        [255, 255, 255],
        [255, 255, 255]],

       [[255, 255, 255],
        [255, 255, 255],
        [255, 255, 255],
        ...,
        [255, 255, 255],
        [255, 255, 255],
        [255, 255, 255]],

       ...,

       [[255, 255, 255],
        [255, 255, 255],
        [255, 255, 255],
        ...,
        [255, 255, 255],
        [255, 255, 255],
        [255, 255, 255]],

       [[255, 255, 255],
        [255, 255, 255],
        [255, 255, 255],
        ...,
        [255, 255, 255],
        [255, 255, 255],
        [255, 255, 255]],

       [[255, 255, 255],
        [255, 255, 255],
        [255, 255, 255],
        ...,
        [255, 255, 255],
        [255, 255, 255],
        [255, 255, 255]]], dtype=uint8)

pytorch 데이터셋 화

class codiDataset(Dataset):
    def __init__(self, transform, x_array, y_tensor):
        self.x = x_array
        self.y = y_tensor
        self.transform = transform
    
    def __len__(self):
        return len(self.x)
    
    def __getitem__(self, idx):
        image = self.transform(Image.fromarray(self.x[idx]))
        return image, self.y[idx]

PIL 이미지를 pytorch tensor로 변환하는 transformation 정의

data_transformer = transforms.Compose([transforms.ToTensor()])

tensor 데이터셋 구축

codi_dataset = codiDataset(data_transformer, x_data, y_label)
len(codi_dataset)
2271

샘플 확인하기

img, label = codi_dataset[0]
print(img.shape)
img
torch.Size([3, 256, 256])





tensor([[[1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         ...,
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.]],

        [[1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         ...,
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.]],

        [[1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         ...,
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.],
         [1., 1., 1.,  ..., 1., 1., 1.]]])
tp = transforms.ToPILImage()
img_t = tp(img)
img_t

png

# print(label.shape)
# label
label
0

2. Pre-trained 모델 이용하여 3-class image classification 문제 풀기

학습/검증 데이터셋 나누기

len_codi = len(codi_dataset)
len_train = int(0.8 * len_codi)
len_val = len_codi - len_train
train_ds, val_ds = random_split(codi_dataset, [len_train, len_val])

샘플 확인

for x, y in train_ds:
    print(x.shape, y)
    break
torch.Size([3, 256, 256]) 0
for x, y in val_ds:
    print(x.shape, y)
    break
torch.Size([3, 256, 256]) 2

DataLoader 화

BATCH_SIZE = 64
train_dl = DataLoader(train_ds, batch_size=BATCH_SIZE, shuffle=True)
val_dl = DataLoader(val_ds, batch_size=BATCH_SIZE, shuffle=True)
for x, y in train_dl:
    print(x.shape)
    print(y.shape)
    break
torch.Size([64, 3, 256, 256])
torch.Size([64])

pytorch pretrained model 가져오기

resnet152의 pretrained weights를 포함하여 base_model 구성

base_model = torchvision.models.resnet152(num_classes=3)
# base_model = torch.hub.load('pytorch/vision:v0.8.2', 'resnet152', pretrained=True)
base_model
ResNet(
  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (relu): ReLU(inplace=True)
  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  (layer1): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer2): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer3): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (8): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (9): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (10): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (11): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (12): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (13): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (14): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (15): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (16): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (17): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (18): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (19): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (20): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (21): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (22): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (23): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (24): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (25): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (26): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (27): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (28): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (29): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (30): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (31): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (32): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (33): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (34): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (35): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer4): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))
  (fc): Linear(in_features=2048, out_features=3, bias=True)
)
# GPU 사용 가능한 서버인 경우
if torch.cuda.is_available():
    device = torch.device('cuda')
    base_model = base_model.to(device)

output layer 수정하기

마지막 linear layer의 output을 3 class로 설정 (torchvision.models를 사용하면서 num_classes를 미리 설정)

# NUM_CLASSES = 3
# NUM_FTRS = base_model.fc.in_features
# base_model.fc = nn.Linear(NUM_FTRS, NUM_CLASSES)
summary(base_model, input_size=(3, 256, 256))
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1         [-1, 64, 128, 128]           9,408
       BatchNorm2d-2         [-1, 64, 128, 128]             128
              ReLU-3         [-1, 64, 128, 128]               0
         MaxPool2d-4           [-1, 64, 64, 64]               0
            Conv2d-5           [-1, 64, 64, 64]           4,096
       BatchNorm2d-6           [-1, 64, 64, 64]             128
              ReLU-7           [-1, 64, 64, 64]               0
            Conv2d-8           [-1, 64, 64, 64]          36,864
       BatchNorm2d-9           [-1, 64, 64, 64]             128
             ReLU-10           [-1, 64, 64, 64]               0
           Conv2d-11          [-1, 256, 64, 64]          16,384
      BatchNorm2d-12          [-1, 256, 64, 64]             512
           Conv2d-13          [-1, 256, 64, 64]          16,384
      BatchNorm2d-14          [-1, 256, 64, 64]             512
             ReLU-15          [-1, 256, 64, 64]               0
       Bottleneck-16          [-1, 256, 64, 64]               0
           Conv2d-17           [-1, 64, 64, 64]          16,384
      BatchNorm2d-18           [-1, 64, 64, 64]             128
             ReLU-19           [-1, 64, 64, 64]               0
           Conv2d-20           [-1, 64, 64, 64]          36,864
      BatchNorm2d-21           [-1, 64, 64, 64]             128
             ReLU-22           [-1, 64, 64, 64]               0
           Conv2d-23          [-1, 256, 64, 64]          16,384
      BatchNorm2d-24          [-1, 256, 64, 64]             512
             ReLU-25          [-1, 256, 64, 64]               0
       Bottleneck-26          [-1, 256, 64, 64]               0
           Conv2d-27           [-1, 64, 64, 64]          16,384
      BatchNorm2d-28           [-1, 64, 64, 64]             128
             ReLU-29           [-1, 64, 64, 64]               0
           Conv2d-30           [-1, 64, 64, 64]          36,864
      BatchNorm2d-31           [-1, 64, 64, 64]             128
             ReLU-32           [-1, 64, 64, 64]               0
           Conv2d-33          [-1, 256, 64, 64]          16,384
      BatchNorm2d-34          [-1, 256, 64, 64]             512
             ReLU-35          [-1, 256, 64, 64]               0
       Bottleneck-36          [-1, 256, 64, 64]               0
           Conv2d-37          [-1, 128, 64, 64]          32,768
      BatchNorm2d-38          [-1, 128, 64, 64]             256
             ReLU-39          [-1, 128, 64, 64]               0
           Conv2d-40          [-1, 128, 32, 32]         147,456
      BatchNorm2d-41          [-1, 128, 32, 32]             256
             ReLU-42          [-1, 128, 32, 32]               0
           Conv2d-43          [-1, 512, 32, 32]          65,536
      BatchNorm2d-44          [-1, 512, 32, 32]           1,024
           Conv2d-45          [-1, 512, 32, 32]         131,072
      BatchNorm2d-46          [-1, 512, 32, 32]           1,024
             ReLU-47          [-1, 512, 32, 32]               0
       Bottleneck-48          [-1, 512, 32, 32]               0
           Conv2d-49          [-1, 128, 32, 32]          65,536
      BatchNorm2d-50          [-1, 128, 32, 32]             256
             ReLU-51          [-1, 128, 32, 32]               0
           Conv2d-52          [-1, 128, 32, 32]         147,456
      BatchNorm2d-53          [-1, 128, 32, 32]             256
             ReLU-54          [-1, 128, 32, 32]               0
           Conv2d-55          [-1, 512, 32, 32]          65,536
      BatchNorm2d-56          [-1, 512, 32, 32]           1,024
             ReLU-57          [-1, 512, 32, 32]               0
       Bottleneck-58          [-1, 512, 32, 32]               0
           Conv2d-59          [-1, 128, 32, 32]          65,536
      BatchNorm2d-60          [-1, 128, 32, 32]             256
             ReLU-61          [-1, 128, 32, 32]               0
           Conv2d-62          [-1, 128, 32, 32]         147,456
      BatchNorm2d-63          [-1, 128, 32, 32]             256
             ReLU-64          [-1, 128, 32, 32]               0
           Conv2d-65          [-1, 512, 32, 32]          65,536
      BatchNorm2d-66          [-1, 512, 32, 32]           1,024
             ReLU-67          [-1, 512, 32, 32]               0
       Bottleneck-68          [-1, 512, 32, 32]               0
           Conv2d-69          [-1, 128, 32, 32]          65,536
      BatchNorm2d-70          [-1, 128, 32, 32]             256
             ReLU-71          [-1, 128, 32, 32]               0
           Conv2d-72          [-1, 128, 32, 32]         147,456
      BatchNorm2d-73          [-1, 128, 32, 32]             256
             ReLU-74          [-1, 128, 32, 32]               0
           Conv2d-75          [-1, 512, 32, 32]          65,536
      BatchNorm2d-76          [-1, 512, 32, 32]           1,024
             ReLU-77          [-1, 512, 32, 32]               0
       Bottleneck-78          [-1, 512, 32, 32]               0
           Conv2d-79          [-1, 128, 32, 32]          65,536
      BatchNorm2d-80          [-1, 128, 32, 32]             256
             ReLU-81          [-1, 128, 32, 32]               0
           Conv2d-82          [-1, 128, 32, 32]         147,456
      BatchNorm2d-83          [-1, 128, 32, 32]             256
             ReLU-84          [-1, 128, 32, 32]               0
           Conv2d-85          [-1, 512, 32, 32]          65,536
      BatchNorm2d-86          [-1, 512, 32, 32]           1,024
             ReLU-87          [-1, 512, 32, 32]               0
       Bottleneck-88          [-1, 512, 32, 32]               0
           Conv2d-89          [-1, 128, 32, 32]          65,536
      BatchNorm2d-90          [-1, 128, 32, 32]             256
             ReLU-91          [-1, 128, 32, 32]               0
           Conv2d-92          [-1, 128, 32, 32]         147,456
      BatchNorm2d-93          [-1, 128, 32, 32]             256
             ReLU-94          [-1, 128, 32, 32]               0
           Conv2d-95          [-1, 512, 32, 32]          65,536
      BatchNorm2d-96          [-1, 512, 32, 32]           1,024
             ReLU-97          [-1, 512, 32, 32]               0
       Bottleneck-98          [-1, 512, 32, 32]               0
           Conv2d-99          [-1, 128, 32, 32]          65,536
     BatchNorm2d-100          [-1, 128, 32, 32]             256
            ReLU-101          [-1, 128, 32, 32]               0
          Conv2d-102          [-1, 128, 32, 32]         147,456
     BatchNorm2d-103          [-1, 128, 32, 32]             256
            ReLU-104          [-1, 128, 32, 32]               0
          Conv2d-105          [-1, 512, 32, 32]          65,536
     BatchNorm2d-106          [-1, 512, 32, 32]           1,024
            ReLU-107          [-1, 512, 32, 32]               0
      Bottleneck-108          [-1, 512, 32, 32]               0
          Conv2d-109          [-1, 128, 32, 32]          65,536
     BatchNorm2d-110          [-1, 128, 32, 32]             256
            ReLU-111          [-1, 128, 32, 32]               0
          Conv2d-112          [-1, 128, 32, 32]         147,456
     BatchNorm2d-113          [-1, 128, 32, 32]             256
            ReLU-114          [-1, 128, 32, 32]               0
          Conv2d-115          [-1, 512, 32, 32]          65,536
     BatchNorm2d-116          [-1, 512, 32, 32]           1,024
            ReLU-117          [-1, 512, 32, 32]               0
      Bottleneck-118          [-1, 512, 32, 32]               0
          Conv2d-119          [-1, 256, 32, 32]         131,072
     BatchNorm2d-120          [-1, 256, 32, 32]             512
            ReLU-121          [-1, 256, 32, 32]               0
          Conv2d-122          [-1, 256, 16, 16]         589,824
     BatchNorm2d-123          [-1, 256, 16, 16]             512
            ReLU-124          [-1, 256, 16, 16]               0
          Conv2d-125         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-126         [-1, 1024, 16, 16]           2,048
          Conv2d-127         [-1, 1024, 16, 16]         524,288
     BatchNorm2d-128         [-1, 1024, 16, 16]           2,048
            ReLU-129         [-1, 1024, 16, 16]               0
      Bottleneck-130         [-1, 1024, 16, 16]               0
          Conv2d-131          [-1, 256, 16, 16]         262,144
     BatchNorm2d-132          [-1, 256, 16, 16]             512
            ReLU-133          [-1, 256, 16, 16]               0
          Conv2d-134          [-1, 256, 16, 16]         589,824
     BatchNorm2d-135          [-1, 256, 16, 16]             512
            ReLU-136          [-1, 256, 16, 16]               0
          Conv2d-137         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-138         [-1, 1024, 16, 16]           2,048
            ReLU-139         [-1, 1024, 16, 16]               0
      Bottleneck-140         [-1, 1024, 16, 16]               0
          Conv2d-141          [-1, 256, 16, 16]         262,144
     BatchNorm2d-142          [-1, 256, 16, 16]             512
            ReLU-143          [-1, 256, 16, 16]               0
          Conv2d-144          [-1, 256, 16, 16]         589,824
     BatchNorm2d-145          [-1, 256, 16, 16]             512
            ReLU-146          [-1, 256, 16, 16]               0
          Conv2d-147         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-148         [-1, 1024, 16, 16]           2,048
            ReLU-149         [-1, 1024, 16, 16]               0
      Bottleneck-150         [-1, 1024, 16, 16]               0
          Conv2d-151          [-1, 256, 16, 16]         262,144
     BatchNorm2d-152          [-1, 256, 16, 16]             512
            ReLU-153          [-1, 256, 16, 16]               0
          Conv2d-154          [-1, 256, 16, 16]         589,824
     BatchNorm2d-155          [-1, 256, 16, 16]             512
            ReLU-156          [-1, 256, 16, 16]               0
          Conv2d-157         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-158         [-1, 1024, 16, 16]           2,048
            ReLU-159         [-1, 1024, 16, 16]               0
      Bottleneck-160         [-1, 1024, 16, 16]               0
          Conv2d-161          [-1, 256, 16, 16]         262,144
     BatchNorm2d-162          [-1, 256, 16, 16]             512
            ReLU-163          [-1, 256, 16, 16]               0
          Conv2d-164          [-1, 256, 16, 16]         589,824
     BatchNorm2d-165          [-1, 256, 16, 16]             512
            ReLU-166          [-1, 256, 16, 16]               0
          Conv2d-167         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-168         [-1, 1024, 16, 16]           2,048
            ReLU-169         [-1, 1024, 16, 16]               0
      Bottleneck-170         [-1, 1024, 16, 16]               0
          Conv2d-171          [-1, 256, 16, 16]         262,144
     BatchNorm2d-172          [-1, 256, 16, 16]             512
            ReLU-173          [-1, 256, 16, 16]               0
          Conv2d-174          [-1, 256, 16, 16]         589,824
     BatchNorm2d-175          [-1, 256, 16, 16]             512
            ReLU-176          [-1, 256, 16, 16]               0
          Conv2d-177         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-178         [-1, 1024, 16, 16]           2,048
            ReLU-179         [-1, 1024, 16, 16]               0
      Bottleneck-180         [-1, 1024, 16, 16]               0
          Conv2d-181          [-1, 256, 16, 16]         262,144
     BatchNorm2d-182          [-1, 256, 16, 16]             512
            ReLU-183          [-1, 256, 16, 16]               0
          Conv2d-184          [-1, 256, 16, 16]         589,824
     BatchNorm2d-185          [-1, 256, 16, 16]             512
            ReLU-186          [-1, 256, 16, 16]               0
          Conv2d-187         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-188         [-1, 1024, 16, 16]           2,048
            ReLU-189         [-1, 1024, 16, 16]               0
      Bottleneck-190         [-1, 1024, 16, 16]               0
          Conv2d-191          [-1, 256, 16, 16]         262,144
     BatchNorm2d-192          [-1, 256, 16, 16]             512
            ReLU-193          [-1, 256, 16, 16]               0
          Conv2d-194          [-1, 256, 16, 16]         589,824
     BatchNorm2d-195          [-1, 256, 16, 16]             512
            ReLU-196          [-1, 256, 16, 16]               0
          Conv2d-197         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-198         [-1, 1024, 16, 16]           2,048
            ReLU-199         [-1, 1024, 16, 16]               0
      Bottleneck-200         [-1, 1024, 16, 16]               0
          Conv2d-201          [-1, 256, 16, 16]         262,144
     BatchNorm2d-202          [-1, 256, 16, 16]             512
            ReLU-203          [-1, 256, 16, 16]               0
          Conv2d-204          [-1, 256, 16, 16]         589,824
     BatchNorm2d-205          [-1, 256, 16, 16]             512
            ReLU-206          [-1, 256, 16, 16]               0
          Conv2d-207         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-208         [-1, 1024, 16, 16]           2,048
            ReLU-209         [-1, 1024, 16, 16]               0
      Bottleneck-210         [-1, 1024, 16, 16]               0
          Conv2d-211          [-1, 256, 16, 16]         262,144
     BatchNorm2d-212          [-1, 256, 16, 16]             512
            ReLU-213          [-1, 256, 16, 16]               0
          Conv2d-214          [-1, 256, 16, 16]         589,824
     BatchNorm2d-215          [-1, 256, 16, 16]             512
            ReLU-216          [-1, 256, 16, 16]               0
          Conv2d-217         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-218         [-1, 1024, 16, 16]           2,048
            ReLU-219         [-1, 1024, 16, 16]               0
      Bottleneck-220         [-1, 1024, 16, 16]               0
          Conv2d-221          [-1, 256, 16, 16]         262,144
     BatchNorm2d-222          [-1, 256, 16, 16]             512
            ReLU-223          [-1, 256, 16, 16]               0
          Conv2d-224          [-1, 256, 16, 16]         589,824
     BatchNorm2d-225          [-1, 256, 16, 16]             512
            ReLU-226          [-1, 256, 16, 16]               0
          Conv2d-227         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-228         [-1, 1024, 16, 16]           2,048
            ReLU-229         [-1, 1024, 16, 16]               0
      Bottleneck-230         [-1, 1024, 16, 16]               0
          Conv2d-231          [-1, 256, 16, 16]         262,144
     BatchNorm2d-232          [-1, 256, 16, 16]             512
            ReLU-233          [-1, 256, 16, 16]               0
          Conv2d-234          [-1, 256, 16, 16]         589,824
     BatchNorm2d-235          [-1, 256, 16, 16]             512
            ReLU-236          [-1, 256, 16, 16]               0
          Conv2d-237         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-238         [-1, 1024, 16, 16]           2,048
            ReLU-239         [-1, 1024, 16, 16]               0
      Bottleneck-240         [-1, 1024, 16, 16]               0
          Conv2d-241          [-1, 256, 16, 16]         262,144
     BatchNorm2d-242          [-1, 256, 16, 16]             512
            ReLU-243          [-1, 256, 16, 16]               0
          Conv2d-244          [-1, 256, 16, 16]         589,824
     BatchNorm2d-245          [-1, 256, 16, 16]             512
            ReLU-246          [-1, 256, 16, 16]               0
          Conv2d-247         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-248         [-1, 1024, 16, 16]           2,048
            ReLU-249         [-1, 1024, 16, 16]               0
      Bottleneck-250         [-1, 1024, 16, 16]               0
          Conv2d-251          [-1, 256, 16, 16]         262,144
     BatchNorm2d-252          [-1, 256, 16, 16]             512
            ReLU-253          [-1, 256, 16, 16]               0
          Conv2d-254          [-1, 256, 16, 16]         589,824
     BatchNorm2d-255          [-1, 256, 16, 16]             512
            ReLU-256          [-1, 256, 16, 16]               0
          Conv2d-257         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-258         [-1, 1024, 16, 16]           2,048
            ReLU-259         [-1, 1024, 16, 16]               0
      Bottleneck-260         [-1, 1024, 16, 16]               0
          Conv2d-261          [-1, 256, 16, 16]         262,144
     BatchNorm2d-262          [-1, 256, 16, 16]             512
            ReLU-263          [-1, 256, 16, 16]               0
          Conv2d-264          [-1, 256, 16, 16]         589,824
     BatchNorm2d-265          [-1, 256, 16, 16]             512
            ReLU-266          [-1, 256, 16, 16]               0
          Conv2d-267         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-268         [-1, 1024, 16, 16]           2,048
            ReLU-269         [-1, 1024, 16, 16]               0
      Bottleneck-270         [-1, 1024, 16, 16]               0
          Conv2d-271          [-1, 256, 16, 16]         262,144
     BatchNorm2d-272          [-1, 256, 16, 16]             512
            ReLU-273          [-1, 256, 16, 16]               0
          Conv2d-274          [-1, 256, 16, 16]         589,824
     BatchNorm2d-275          [-1, 256, 16, 16]             512
            ReLU-276          [-1, 256, 16, 16]               0
          Conv2d-277         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-278         [-1, 1024, 16, 16]           2,048
            ReLU-279         [-1, 1024, 16, 16]               0
      Bottleneck-280         [-1, 1024, 16, 16]               0
          Conv2d-281          [-1, 256, 16, 16]         262,144
     BatchNorm2d-282          [-1, 256, 16, 16]             512
            ReLU-283          [-1, 256, 16, 16]               0
          Conv2d-284          [-1, 256, 16, 16]         589,824
     BatchNorm2d-285          [-1, 256, 16, 16]             512
            ReLU-286          [-1, 256, 16, 16]               0
          Conv2d-287         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-288         [-1, 1024, 16, 16]           2,048
            ReLU-289         [-1, 1024, 16, 16]               0
      Bottleneck-290         [-1, 1024, 16, 16]               0
          Conv2d-291          [-1, 256, 16, 16]         262,144
     BatchNorm2d-292          [-1, 256, 16, 16]             512
            ReLU-293          [-1, 256, 16, 16]               0
          Conv2d-294          [-1, 256, 16, 16]         589,824
     BatchNorm2d-295          [-1, 256, 16, 16]             512
            ReLU-296          [-1, 256, 16, 16]               0
          Conv2d-297         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-298         [-1, 1024, 16, 16]           2,048
            ReLU-299         [-1, 1024, 16, 16]               0
      Bottleneck-300         [-1, 1024, 16, 16]               0
          Conv2d-301          [-1, 256, 16, 16]         262,144
     BatchNorm2d-302          [-1, 256, 16, 16]             512
            ReLU-303          [-1, 256, 16, 16]               0
          Conv2d-304          [-1, 256, 16, 16]         589,824
     BatchNorm2d-305          [-1, 256, 16, 16]             512
            ReLU-306          [-1, 256, 16, 16]               0
          Conv2d-307         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-308         [-1, 1024, 16, 16]           2,048
            ReLU-309         [-1, 1024, 16, 16]               0
      Bottleneck-310         [-1, 1024, 16, 16]               0
          Conv2d-311          [-1, 256, 16, 16]         262,144
     BatchNorm2d-312          [-1, 256, 16, 16]             512
            ReLU-313          [-1, 256, 16, 16]               0
          Conv2d-314          [-1, 256, 16, 16]         589,824
     BatchNorm2d-315          [-1, 256, 16, 16]             512
            ReLU-316          [-1, 256, 16, 16]               0
          Conv2d-317         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-318         [-1, 1024, 16, 16]           2,048
            ReLU-319         [-1, 1024, 16, 16]               0
      Bottleneck-320         [-1, 1024, 16, 16]               0
          Conv2d-321          [-1, 256, 16, 16]         262,144
     BatchNorm2d-322          [-1, 256, 16, 16]             512
            ReLU-323          [-1, 256, 16, 16]               0
          Conv2d-324          [-1, 256, 16, 16]         589,824
     BatchNorm2d-325          [-1, 256, 16, 16]             512
            ReLU-326          [-1, 256, 16, 16]               0
          Conv2d-327         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-328         [-1, 1024, 16, 16]           2,048
            ReLU-329         [-1, 1024, 16, 16]               0
      Bottleneck-330         [-1, 1024, 16, 16]               0
          Conv2d-331          [-1, 256, 16, 16]         262,144
     BatchNorm2d-332          [-1, 256, 16, 16]             512
            ReLU-333          [-1, 256, 16, 16]               0
          Conv2d-334          [-1, 256, 16, 16]         589,824
     BatchNorm2d-335          [-1, 256, 16, 16]             512
            ReLU-336          [-1, 256, 16, 16]               0
          Conv2d-337         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-338         [-1, 1024, 16, 16]           2,048
            ReLU-339         [-1, 1024, 16, 16]               0
      Bottleneck-340         [-1, 1024, 16, 16]               0
          Conv2d-341          [-1, 256, 16, 16]         262,144
     BatchNorm2d-342          [-1, 256, 16, 16]             512
            ReLU-343          [-1, 256, 16, 16]               0
          Conv2d-344          [-1, 256, 16, 16]         589,824
     BatchNorm2d-345          [-1, 256, 16, 16]             512
            ReLU-346          [-1, 256, 16, 16]               0
          Conv2d-347         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-348         [-1, 1024, 16, 16]           2,048
            ReLU-349         [-1, 1024, 16, 16]               0
      Bottleneck-350         [-1, 1024, 16, 16]               0
          Conv2d-351          [-1, 256, 16, 16]         262,144
     BatchNorm2d-352          [-1, 256, 16, 16]             512
            ReLU-353          [-1, 256, 16, 16]               0
          Conv2d-354          [-1, 256, 16, 16]         589,824
     BatchNorm2d-355          [-1, 256, 16, 16]             512
            ReLU-356          [-1, 256, 16, 16]               0
          Conv2d-357         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-358         [-1, 1024, 16, 16]           2,048
            ReLU-359         [-1, 1024, 16, 16]               0
      Bottleneck-360         [-1, 1024, 16, 16]               0
          Conv2d-361          [-1, 256, 16, 16]         262,144
     BatchNorm2d-362          [-1, 256, 16, 16]             512
            ReLU-363          [-1, 256, 16, 16]               0
          Conv2d-364          [-1, 256, 16, 16]         589,824
     BatchNorm2d-365          [-1, 256, 16, 16]             512
            ReLU-366          [-1, 256, 16, 16]               0
          Conv2d-367         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-368         [-1, 1024, 16, 16]           2,048
            ReLU-369         [-1, 1024, 16, 16]               0
      Bottleneck-370         [-1, 1024, 16, 16]               0
          Conv2d-371          [-1, 256, 16, 16]         262,144
     BatchNorm2d-372          [-1, 256, 16, 16]             512
            ReLU-373          [-1, 256, 16, 16]               0
          Conv2d-374          [-1, 256, 16, 16]         589,824
     BatchNorm2d-375          [-1, 256, 16, 16]             512
            ReLU-376          [-1, 256, 16, 16]               0
          Conv2d-377         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-378         [-1, 1024, 16, 16]           2,048
            ReLU-379         [-1, 1024, 16, 16]               0
      Bottleneck-380         [-1, 1024, 16, 16]               0
          Conv2d-381          [-1, 256, 16, 16]         262,144
     BatchNorm2d-382          [-1, 256, 16, 16]             512
            ReLU-383          [-1, 256, 16, 16]               0
          Conv2d-384          [-1, 256, 16, 16]         589,824
     BatchNorm2d-385          [-1, 256, 16, 16]             512
            ReLU-386          [-1, 256, 16, 16]               0
          Conv2d-387         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-388         [-1, 1024, 16, 16]           2,048
            ReLU-389         [-1, 1024, 16, 16]               0
      Bottleneck-390         [-1, 1024, 16, 16]               0
          Conv2d-391          [-1, 256, 16, 16]         262,144
     BatchNorm2d-392          [-1, 256, 16, 16]             512
            ReLU-393          [-1, 256, 16, 16]               0
          Conv2d-394          [-1, 256, 16, 16]         589,824
     BatchNorm2d-395          [-1, 256, 16, 16]             512
            ReLU-396          [-1, 256, 16, 16]               0
          Conv2d-397         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-398         [-1, 1024, 16, 16]           2,048
            ReLU-399         [-1, 1024, 16, 16]               0
      Bottleneck-400         [-1, 1024, 16, 16]               0
          Conv2d-401          [-1, 256, 16, 16]         262,144
     BatchNorm2d-402          [-1, 256, 16, 16]             512
            ReLU-403          [-1, 256, 16, 16]               0
          Conv2d-404          [-1, 256, 16, 16]         589,824
     BatchNorm2d-405          [-1, 256, 16, 16]             512
            ReLU-406          [-1, 256, 16, 16]               0
          Conv2d-407         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-408         [-1, 1024, 16, 16]           2,048
            ReLU-409         [-1, 1024, 16, 16]               0
      Bottleneck-410         [-1, 1024, 16, 16]               0
          Conv2d-411          [-1, 256, 16, 16]         262,144
     BatchNorm2d-412          [-1, 256, 16, 16]             512
            ReLU-413          [-1, 256, 16, 16]               0
          Conv2d-414          [-1, 256, 16, 16]         589,824
     BatchNorm2d-415          [-1, 256, 16, 16]             512
            ReLU-416          [-1, 256, 16, 16]               0
          Conv2d-417         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-418         [-1, 1024, 16, 16]           2,048
            ReLU-419         [-1, 1024, 16, 16]               0
      Bottleneck-420         [-1, 1024, 16, 16]               0
          Conv2d-421          [-1, 256, 16, 16]         262,144
     BatchNorm2d-422          [-1, 256, 16, 16]             512
            ReLU-423          [-1, 256, 16, 16]               0
          Conv2d-424          [-1, 256, 16, 16]         589,824
     BatchNorm2d-425          [-1, 256, 16, 16]             512
            ReLU-426          [-1, 256, 16, 16]               0
          Conv2d-427         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-428         [-1, 1024, 16, 16]           2,048
            ReLU-429         [-1, 1024, 16, 16]               0
      Bottleneck-430         [-1, 1024, 16, 16]               0
          Conv2d-431          [-1, 256, 16, 16]         262,144
     BatchNorm2d-432          [-1, 256, 16, 16]             512
            ReLU-433          [-1, 256, 16, 16]               0
          Conv2d-434          [-1, 256, 16, 16]         589,824
     BatchNorm2d-435          [-1, 256, 16, 16]             512
            ReLU-436          [-1, 256, 16, 16]               0
          Conv2d-437         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-438         [-1, 1024, 16, 16]           2,048
            ReLU-439         [-1, 1024, 16, 16]               0
      Bottleneck-440         [-1, 1024, 16, 16]               0
          Conv2d-441          [-1, 256, 16, 16]         262,144
     BatchNorm2d-442          [-1, 256, 16, 16]             512
            ReLU-443          [-1, 256, 16, 16]               0
          Conv2d-444          [-1, 256, 16, 16]         589,824
     BatchNorm2d-445          [-1, 256, 16, 16]             512
            ReLU-446          [-1, 256, 16, 16]               0
          Conv2d-447         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-448         [-1, 1024, 16, 16]           2,048
            ReLU-449         [-1, 1024, 16, 16]               0
      Bottleneck-450         [-1, 1024, 16, 16]               0
          Conv2d-451          [-1, 256, 16, 16]         262,144
     BatchNorm2d-452          [-1, 256, 16, 16]             512
            ReLU-453          [-1, 256, 16, 16]               0
          Conv2d-454          [-1, 256, 16, 16]         589,824
     BatchNorm2d-455          [-1, 256, 16, 16]             512
            ReLU-456          [-1, 256, 16, 16]               0
          Conv2d-457         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-458         [-1, 1024, 16, 16]           2,048
            ReLU-459         [-1, 1024, 16, 16]               0
      Bottleneck-460         [-1, 1024, 16, 16]               0
          Conv2d-461          [-1, 256, 16, 16]         262,144
     BatchNorm2d-462          [-1, 256, 16, 16]             512
            ReLU-463          [-1, 256, 16, 16]               0
          Conv2d-464          [-1, 256, 16, 16]         589,824
     BatchNorm2d-465          [-1, 256, 16, 16]             512
            ReLU-466          [-1, 256, 16, 16]               0
          Conv2d-467         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-468         [-1, 1024, 16, 16]           2,048
            ReLU-469         [-1, 1024, 16, 16]               0
      Bottleneck-470         [-1, 1024, 16, 16]               0
          Conv2d-471          [-1, 256, 16, 16]         262,144
     BatchNorm2d-472          [-1, 256, 16, 16]             512
            ReLU-473          [-1, 256, 16, 16]               0
          Conv2d-474          [-1, 256, 16, 16]         589,824
     BatchNorm2d-475          [-1, 256, 16, 16]             512
            ReLU-476          [-1, 256, 16, 16]               0
          Conv2d-477         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-478         [-1, 1024, 16, 16]           2,048
            ReLU-479         [-1, 1024, 16, 16]               0
      Bottleneck-480         [-1, 1024, 16, 16]               0
          Conv2d-481          [-1, 512, 16, 16]         524,288
     BatchNorm2d-482          [-1, 512, 16, 16]           1,024
            ReLU-483          [-1, 512, 16, 16]               0
          Conv2d-484            [-1, 512, 8, 8]       2,359,296
     BatchNorm2d-485            [-1, 512, 8, 8]           1,024
            ReLU-486            [-1, 512, 8, 8]               0
          Conv2d-487           [-1, 2048, 8, 8]       1,048,576
     BatchNorm2d-488           [-1, 2048, 8, 8]           4,096
          Conv2d-489           [-1, 2048, 8, 8]       2,097,152
     BatchNorm2d-490           [-1, 2048, 8, 8]           4,096
            ReLU-491           [-1, 2048, 8, 8]               0
      Bottleneck-492           [-1, 2048, 8, 8]               0
          Conv2d-493            [-1, 512, 8, 8]       1,048,576
     BatchNorm2d-494            [-1, 512, 8, 8]           1,024
            ReLU-495            [-1, 512, 8, 8]               0
          Conv2d-496            [-1, 512, 8, 8]       2,359,296
     BatchNorm2d-497            [-1, 512, 8, 8]           1,024
            ReLU-498            [-1, 512, 8, 8]               0
          Conv2d-499           [-1, 2048, 8, 8]       1,048,576
     BatchNorm2d-500           [-1, 2048, 8, 8]           4,096
            ReLU-501           [-1, 2048, 8, 8]               0
      Bottleneck-502           [-1, 2048, 8, 8]               0
          Conv2d-503            [-1, 512, 8, 8]       1,048,576
     BatchNorm2d-504            [-1, 512, 8, 8]           1,024
            ReLU-505            [-1, 512, 8, 8]               0
          Conv2d-506            [-1, 512, 8, 8]       2,359,296
     BatchNorm2d-507            [-1, 512, 8, 8]           1,024
            ReLU-508            [-1, 512, 8, 8]               0
          Conv2d-509           [-1, 2048, 8, 8]       1,048,576
     BatchNorm2d-510           [-1, 2048, 8, 8]           4,096
            ReLU-511           [-1, 2048, 8, 8]               0
      Bottleneck-512           [-1, 2048, 8, 8]               0
AdaptiveAvgPool2d-513           [-1, 2048, 1, 1]               0
          Linear-514                    [-1, 3]           6,147
================================================================
Total params: 58,149,955
Trainable params: 58,149,955
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.75
Forward/backward pass size (MB): 792.27
Params size (MB): 221.82
Estimated Total Size (MB): 1014.84
----------------------------------------------------------------

loss function / optimizer 정의

loss_func = nn.CrossEntropyLoss(reduction='sum')
opt = optim.Adam(base_model.parameters(), lr=0.001)

현재 learning rate 계산 함수 정의

# get learning rate
def get_lr(opt):
    for param_group in opt.param_groups:
        return param_group['lr']

테스트

current_lr = get_lr(opt)
print('current lr={}'.format(current_lr))
current lr=0.001

learning scheduler 생성

# define learning rate scheduler
lr_scheduler = ReduceLROnPlateau(opt, mode='min', factor=0.1, patience=10, verbose=1)

테스트

for i in range(100):
    lr_scheduler.step(1)
Epoch    12: reducing learning rate of group 0 to 1.0000e-04.
Epoch    23: reducing learning rate of group 0 to 1.0000e-05.
Epoch    34: reducing learning rate of group 0 to 1.0000e-06.
Epoch    45: reducing learning rate of group 0 to 1.0000e-07.
Epoch    56: reducing learning rate of group 0 to 1.0000e-08.

배치당 loss와 metric 계산 함수 정의

# function to calculate metric per mini-batch
def metrics_batch(output, target):
    # get output class
    pred = output.argmax(dim=1, keepdim=True)
    # compare output class with target class
    corrects = pred.eq(target.view_as(pred)).sum().item()
    return corrects
# function to calculate loss per mini=batch
def loss_batch(loss_func, output, target, opt=None):
    loss = loss_func(output, target)
    with torch.no_grad():
        metric_b = metrics_batch(output, target)
    if opt is not None:
        opt.zero_grad()
        loss.backward()
        opt.step()
    return loss.item(), metric_b

epoch당 loss 정의 함수

# function to calculate loss and metric per epoch
def loss_epoch(model, loss_func, dataset_dl, sanity_check=False, opt=None):
    running_loss = 0.0
    running_metric = 0.0
    len_data = len(dataset_dl.dataset)
    
    for xb, yb in dataset_dl:
        # move batch to device
        xb = xb.to(device)
        yb = yb.to(device)
        # get model output
        output = model(xb)
        # get loss per batch
        loss_b, metric_b = loss_batch(loss_func, output, yb, opt)
        
        # update running loss
        running_loss += loss_b
        # update running metric
        if metric_b is not None:
            running_metric += metric_b
            
        # break the loop in case of sanity check
        if sanity_check is True:
            break
    
    # average loss value
    loss = running_loss/float(len_data)
    # average metric value
    metric = running_metric/float(len_data)
    return loss, metric

학습 함수 정의

# function to start training
def train_val(model, params):
    # extract model parameters
    num_epochs = params['num_epochs']
    loss_func = params['loss_func']
    opt = params['optimizer']
    train_dl = params['train_dl']
    val_dl = params['val_dl']
    sanity_check = params['sanity_check']
    lr_scheduler = params['lr_scheduler']
    path2weights = params['path2weights']
    
    # history of loss values in each epoch
    loss_history = {
        'train': [],
        'val': []
    }
    # history of metric values in each epoch
    metric_history = {
        'train': [],
        'val': []
    }
    
    # a deep copy of weights for the best performing model
    best_model_wts = copy.deepcopy(model.state_dict())
    
    # initialize the best loss to an infinite value
    best_loss = float('inf')
    
    # define a loop that will calculate the training loss over an epoch
    for epoch in range(num_epochs):
        # get current learning rate
        current_lr = get_lr(opt)
        print('Epoch {}/{}, current lr={}'.format(epoch, num_epochs - 1, current_lr))
        
        # train_model on training dataset
        model.train()
        train_loss, train_metric = loss_epoch(model, loss_func, train_dl, sanity_check, opt)
        
        # collect loss and metric for training dataset
        loss_history['train'].append(train_loss)
        metric_history['train'].append(train_metric)
    
        # evaluate the model on the validation dataset
        model.eval()
        with torch.no_grad():
            val_loss, val_metric = loss_epoch(model, loss_func, val_dl, sanity_check)
        
        # collect loss and metric for validation dataset
        loss_history['val'].append(val_loss)
        metric_history['val'].append(val_metric)
        
        # store best model
        if val_loss < best_loss:
            best_loss = val_loss
            best_model_wts = copy.deepcopy(model.state_dict())
            # store weights into a local file
            torch.save(model.state_dict(), path2weights)
            print('Copied best model weights!')
        
        # update the learning rate if needed
        # learning rate schedule
        lr_scheduler.step(val_loss)
        if current_lr != get_lr(opt):
            print('Loading best model weights!')
            model.load_state_dict(best_model_wts)
        
        # print the loss and accuracy values and return the trained model
        print('train loss: %.6f, dev loss: %.6f, accuracy: %.2f' %(train_loss, val_loss, 100 * val_metric))
        print('-' * 10)
    
    # load best model weights
    model.load_state_dict(best_model_wts)
    return model, loss_history, metric_history

Sanity Check

# define parameters
params_train = {
    'num_epochs': 100,
    'optimizer': opt,
    'loss_func': loss_func,
    'train_dl': train_dl,
    'val_dl': val_dl,
    'sanity_check': True,
    'lr_scheduler': lr_scheduler,
    'path2weights': './models/weights.pt'
}
# train and validate the model
cnn_model, loss_hist, metric_hist = train_val(base_model, params_train)
Epoch 0/99, current lr=0.001
Copied best model weights!
train loss: 0.038285, dev loss: 0.535744, accuracy: 4.40
----------
Epoch 1/99, current lr=0.001
train loss: 0.058171, dev loss: 0.603408, accuracy: 4.62
----------
Epoch 2/99, current lr=0.001
Copied best model weights!
train loss: 0.156985, dev loss: 0.378114, accuracy: 5.49
----------
Epoch 3/99, current lr=0.001
Copied best model weights!
train loss: 0.068155, dev loss: 0.290053, accuracy: 4.18
----------
Epoch 4/99, current lr=0.001
Copied best model weights!
train loss: 0.042587, dev loss: 0.249838, accuracy: 5.49
----------
Epoch 5/99, current lr=0.001
Copied best model weights!
train loss: 0.033151, dev loss: 0.221175, accuracy: 4.84
----------
Epoch 6/99, current lr=0.001
Copied best model weights!
train loss: 0.070909, dev loss: 0.173743, accuracy: 4.18
----------
Epoch 7/99, current lr=0.001
train loss: 0.074510, dev loss: 0.180010, accuracy: 4.84
----------
Epoch 8/99, current lr=0.001
train loss: 0.038432, dev loss: 0.195137, accuracy: 5.49
----------
Epoch 9/99, current lr=0.001
train loss: 0.035852, dev loss: 0.208255, accuracy: 5.05
----------
Epoch 10/99, current lr=0.001
train loss: 0.031206, dev loss: 0.222006, accuracy: 4.18
----------
Epoch 11/99, current lr=0.001
Copied best model weights!
train loss: 0.033303, dev loss: 0.170638, accuracy: 5.49
----------
Epoch 12/99, current lr=0.001
Copied best model weights!
train loss: 0.030360, dev loss: 0.169874, accuracy: 5.71
----------
Epoch 13/99, current lr=0.001
train loss: 0.039237, dev loss: 0.173054, accuracy: 5.27
----------
Epoch 14/99, current lr=0.001
train loss: 0.028433, dev loss: 0.186821, accuracy: 5.27
----------
Epoch 15/99, current lr=0.001
train loss: 0.026914, dev loss: 0.259674, accuracy: 5.05
----------
Epoch 16/99, current lr=0.001
train loss: 0.039021, dev loss: 0.263071, accuracy: 3.30
----------
Epoch 17/99, current lr=0.001
train loss: 0.022763, dev loss: 0.381885, accuracy: 3.74
----------
Epoch 18/99, current lr=0.001
train loss: 0.027691, dev loss: 0.318268, accuracy: 4.40
----------
Epoch 19/99, current lr=0.001
train loss: 0.026593, dev loss: 0.205193, accuracy: 6.81
----------
Epoch 20/99, current lr=0.001
train loss: 0.030593, dev loss: 0.206823, accuracy: 5.93
----------
Epoch 21/99, current lr=0.001
Copied best model weights!
train loss: 0.026307, dev loss: 0.165531, accuracy: 7.69
----------
Epoch 22/99, current lr=0.001
train loss: 0.022024, dev loss: 0.191377, accuracy: 7.25
----------
Epoch 23/99, current lr=0.001
train loss: 0.023931, dev loss: 0.181972, accuracy: 6.15
----------
Epoch 24/99, current lr=0.001
train loss: 0.022137, dev loss: 0.227168, accuracy: 5.93
----------
Epoch 25/99, current lr=0.001
train loss: 0.020255, dev loss: 0.231113, accuracy: 6.15
----------
Epoch 26/99, current lr=0.001
train loss: 0.021998, dev loss: 0.234696, accuracy: 5.27
----------
Epoch 27/99, current lr=0.001
train loss: 0.026090, dev loss: 0.260081, accuracy: 5.05
----------
Epoch 28/99, current lr=0.001
train loss: 0.020479, dev loss: 0.207592, accuracy: 6.37
----------
Epoch 29/99, current lr=0.001
train loss: 0.021636, dev loss: 0.237372, accuracy: 6.37
----------
Epoch 30/99, current lr=0.001
train loss: 0.021528, dev loss: 0.234974, accuracy: 5.93
----------
Epoch 31/99, current lr=0.001
train loss: 0.015733, dev loss: 0.288375, accuracy: 7.03
----------
Epoch 32/99, current lr=0.001
Epoch    33: reducing learning rate of group 0 to 1.0000e-04.
Loading best model weights!
train loss: 0.023228, dev loss: 0.196872, accuracy: 7.47
----------
Epoch 33/99, current lr=0.0001
train loss: 0.023593, dev loss: 0.205578, accuracy: 5.05
----------
Epoch 34/99, current lr=0.0001
train loss: 0.025515, dev loss: 0.186584, accuracy: 6.37
----------
Epoch 35/99, current lr=0.0001
train loss: 0.027581, dev loss: 0.177441, accuracy: 6.15
----------
Epoch 36/99, current lr=0.0001
train loss: 0.021844, dev loss: 0.166960, accuracy: 7.03
----------
Epoch 37/99, current lr=0.0001
train loss: 0.023319, dev loss: 0.174608, accuracy: 6.59
----------
Epoch 38/99, current lr=0.0001
Copied best model weights!
train loss: 0.024916, dev loss: 0.148145, accuracy: 6.81
----------
Epoch 39/99, current lr=0.0001
train loss: 0.022475, dev loss: 0.155646, accuracy: 7.25
----------
Epoch 40/99, current lr=0.0001
train loss: 0.019483, dev loss: 0.155375, accuracy: 9.01
----------
Epoch 41/99, current lr=0.0001
Copied best model weights!
train loss: 0.020123, dev loss: 0.117868, accuracy: 9.01
----------
Epoch 42/99, current lr=0.0001
train loss: 0.024514, dev loss: 0.129248, accuracy: 9.01
----------
Epoch 43/99, current lr=0.0001
train loss: 0.019085, dev loss: 0.130672, accuracy: 8.79
----------
Epoch 44/99, current lr=0.0001
train loss: 0.024114, dev loss: 0.124281, accuracy: 8.35
----------
Epoch 45/99, current lr=0.0001
train loss: 0.020937, dev loss: 0.117870, accuracy: 8.57
----------
Epoch 46/99, current lr=0.0001
Copied best model weights!
train loss: 0.020020, dev loss: 0.113654, accuracy: 9.01
----------
Epoch 47/99, current lr=0.0001
Copied best model weights!
train loss: 0.018060, dev loss: 0.092398, accuracy: 9.01
----------
Epoch 48/99, current lr=0.0001
train loss: 0.016410, dev loss: 0.102796, accuracy: 9.01
----------
Epoch 49/99, current lr=0.0001
train loss: 0.017570, dev loss: 0.115363, accuracy: 8.35
----------
Epoch 50/99, current lr=0.0001
train loss: 0.017828, dev loss: 0.103880, accuracy: 9.01
----------
Epoch 51/99, current lr=0.0001
train loss: 0.018108, dev loss: 0.098669, accuracy: 9.67
----------
Epoch 52/99, current lr=0.0001
train loss: 0.015096, dev loss: 0.111911, accuracy: 9.23
----------
Epoch 53/99, current lr=0.0001
train loss: 0.012213, dev loss: 0.116494, accuracy: 9.01
----------
Epoch 54/99, current lr=0.0001
train loss: 0.020667, dev loss: 0.102831, accuracy: 9.23
----------
Epoch 55/99, current lr=0.0001
train loss: 0.010766, dev loss: 0.100796, accuracy: 9.45
----------
Epoch 56/99, current lr=0.0001
train loss: 0.016036, dev loss: 0.106449, accuracy: 9.45
----------
Epoch 57/99, current lr=0.0001
Copied best model weights!
train loss: 0.012234, dev loss: 0.089830, accuracy: 10.77
----------
Epoch 58/99, current lr=0.0001
Copied best model weights!
train loss: 0.017174, dev loss: 0.071427, accuracy: 11.65
----------
Epoch 59/99, current lr=0.0001
train loss: 0.015056, dev loss: 0.089822, accuracy: 10.99
----------
Epoch 60/99, current lr=0.0001
train loss: 0.012007, dev loss: 0.093106, accuracy: 10.33
----------
Epoch 61/99, current lr=0.0001
train loss: 0.013091, dev loss: 0.101184, accuracy: 9.45
----------
Epoch 62/99, current lr=0.0001
train loss: 0.015485, dev loss: 0.090287, accuracy: 10.33
----------
Epoch 63/99, current lr=0.0001
train loss: 0.014864, dev loss: 0.110771, accuracy: 9.23
----------
Epoch 64/99, current lr=0.0001
train loss: 0.021146, dev loss: 0.099709, accuracy: 10.11
----------
Epoch 65/99, current lr=0.0001
train loss: 0.010261, dev loss: 0.092079, accuracy: 10.99
----------
Epoch 66/99, current lr=0.0001
train loss: 0.020413, dev loss: 0.087317, accuracy: 10.33
----------
Epoch 67/99, current lr=0.0001
train loss: 0.015217, dev loss: 0.092870, accuracy: 9.89
----------
Epoch 68/99, current lr=0.0001
Copied best model weights!
train loss: 0.013696, dev loss: 0.054639, accuracy: 12.09
----------
Epoch 69/99, current lr=0.0001
train loss: 0.014132, dev loss: 0.066439, accuracy: 11.87
----------
Epoch 70/99, current lr=0.0001
train loss: 0.016805, dev loss: 0.068510, accuracy: 11.43
----------
Epoch 71/99, current lr=0.0001
train loss: 0.016143, dev loss: 0.055108, accuracy: 12.31
----------
Epoch 72/99, current lr=0.0001
train loss: 0.015156, dev loss: 0.060666, accuracy: 11.65
----------
Epoch 73/99, current lr=0.0001
train loss: 0.011835, dev loss: 0.062364, accuracy: 10.77
----------
Epoch 74/99, current lr=0.0001
train loss: 0.012554, dev loss: 0.066319, accuracy: 11.43
----------
Epoch 75/99, current lr=0.0001
Copied best model weights!
train loss: 0.013126, dev loss: 0.048302, accuracy: 12.53
----------
Epoch 76/99, current lr=0.0001
train loss: 0.010819, dev loss: 0.070582, accuracy: 10.77
----------
Epoch 77/99, current lr=0.0001
train loss: 0.012550, dev loss: 0.061255, accuracy: 11.21
----------
Epoch 78/99, current lr=0.0001
train loss: 0.009415, dev loss: 0.061891, accuracy: 11.87
----------
Epoch 79/99, current lr=0.0001
Copied best model weights!
train loss: 0.013712, dev loss: 0.045361, accuracy: 12.97
----------
Epoch 80/99, current lr=0.0001
Copied best model weights!
train loss: 0.012509, dev loss: 0.044886, accuracy: 12.09
----------
Epoch 81/99, current lr=0.0001
Copied best model weights!
train loss: 0.013161, dev loss: 0.036875, accuracy: 12.53
----------
Epoch 82/99, current lr=0.0001
train loss: 0.015139, dev loss: 0.043923, accuracy: 12.09
----------
Epoch 83/99, current lr=0.0001
train loss: 0.012107, dev loss: 0.084444, accuracy: 10.33
----------
Epoch 84/99, current lr=0.0001
train loss: 0.009243, dev loss: 0.060064, accuracy: 11.21
----------
Epoch 85/99, current lr=0.0001
train loss: 0.013419, dev loss: 0.053784, accuracy: 11.43
----------
Epoch 86/99, current lr=0.0001
train loss: 0.019480, dev loss: 0.072486, accuracy: 12.31
----------
Epoch 87/99, current lr=0.0001
train loss: 0.010694, dev loss: 0.038685, accuracy: 12.09
----------
Epoch 88/99, current lr=0.0001
Copied best model weights!
train loss: 0.011376, dev loss: 0.035829, accuracy: 12.53
----------
Epoch 89/99, current lr=0.0001
train loss: 0.014024, dev loss: 0.069788, accuracy: 10.99
----------
Epoch 90/99, current lr=0.0001
train loss: 0.016960, dev loss: 0.054966, accuracy: 11.87
----------
Epoch 91/99, current lr=0.0001
train loss: 0.010635, dev loss: 0.055664, accuracy: 11.87
----------
Epoch 92/99, current lr=0.0001
train loss: 0.006357, dev loss: 0.056445, accuracy: 11.87
----------
Epoch 93/99, current lr=0.0001
train loss: 0.009935, dev loss: 0.064379, accuracy: 11.21
----------
Epoch 94/99, current lr=0.0001
train loss: 0.013995, dev loss: 0.045181, accuracy: 12.31
----------
Epoch 95/99, current lr=0.0001
train loss: 0.007741, dev loss: 0.047700, accuracy: 12.09
----------
Epoch 96/99, current lr=0.0001
train loss: 0.018070, dev loss: 0.078851, accuracy: 11.65
----------
Epoch 97/99, current lr=0.0001
train loss: 0.008296, dev loss: 0.079951, accuracy: 11.21
----------
Epoch 98/99, current lr=0.0001
train loss: 0.009320, dev loss: 0.053806, accuracy: 12.09
----------
Epoch 99/99, current lr=0.0001
Epoch   100: reducing learning rate of group 0 to 1.0000e-05.
Loading best model weights!
train loss: 0.012811, dev loss: 0.061093, accuracy: 11.87
----------

Sanity Check 끄고 실제 학습

loss_func = nn.CrossEntropyLoss(reduction='sum')
opt = optim.Adam(base_model.parameters(), lr=0.001)
lr_scheduler = ReduceLROnPlateau(opt, mode='min', factor=0.1, patience=10, verbose=1)
# define parameters
params_train = {
    'num_epochs': 100,
    'optimizer': opt,
    'loss_func': loss_func,
    'train_dl': train_dl,
    'val_dl': val_dl,
    'sanity_check': False,
    'lr_scheduler': lr_scheduler,
    'path2weights': './models/weights.pt'
}
# train and validate the model
cnn_model, loss_hist, metric_hist = train_val(base_model, params_train)
Epoch 0/99, current lr=0.001
Copied best model weights!
train loss: 1.063725, dev loss: 50.934386, accuracy: 35.60
----------
Epoch 1/99, current lr=0.001
Copied best model weights!
train loss: 0.648684, dev loss: 1.248598, accuracy: 57.14
----------
Epoch 2/99, current lr=0.001
train loss: 0.523237, dev loss: 2.914959, accuracy: 40.88
----------
Epoch 3/99, current lr=0.001
Copied best model weights!
train loss: 0.490257, dev loss: 1.176852, accuracy: 65.49
----------
Epoch 4/99, current lr=0.001
Copied best model weights!
train loss: 0.327827, dev loss: 0.751535, accuracy: 80.22
----------
Epoch 5/99, current lr=0.001
Copied best model weights!
train loss: 0.137471, dev loss: 0.217770, accuracy: 92.97
----------
Epoch 6/99, current lr=0.001
train loss: 0.115594, dev loss: 1.109533, accuracy: 65.93
----------
Epoch 7/99, current lr=0.001
train loss: 0.072951, dev loss: 0.777101, accuracy: 74.51
----------
Epoch 8/99, current lr=0.001
train loss: 0.073822, dev loss: 2.600806, accuracy: 67.25
----------
Epoch 9/99, current lr=0.001
train loss: 0.098573, dev loss: 2.829926, accuracy: 62.42
----------
Epoch 10/99, current lr=0.001
train loss: 0.139809, dev loss: 0.652236, accuracy: 80.88
----------
Epoch 11/99, current lr=0.001
Copied best model weights!
train loss: 0.045700, dev loss: 0.177329, accuracy: 96.48
----------
Epoch 12/99, current lr=0.001
train loss: 0.041638, dev loss: 1.182801, accuracy: 63.30
----------
Epoch 13/99, current lr=0.001
Copied best model weights!
train loss: 0.025651, dev loss: 0.066901, accuracy: 97.14
----------
Epoch 14/99, current lr=0.001
train loss: 0.031757, dev loss: 0.477224, accuracy: 93.85
----------
Epoch 15/99, current lr=0.001
train loss: 0.083838, dev loss: 0.619970, accuracy: 79.56
----------
Epoch 16/99, current lr=0.001
train loss: 0.061330, dev loss: 0.112186, accuracy: 97.58
----------
Epoch 17/99, current lr=0.001
Copied best model weights!
train loss: 0.039040, dev loss: 0.056857, accuracy: 99.12
----------
Epoch 18/99, current lr=0.001
train loss: 0.019259, dev loss: 0.142087, accuracy: 95.82
----------
Epoch 19/99, current lr=0.001
Copied best model weights!
train loss: 0.006945, dev loss: 0.022868, accuracy: 99.34
----------
Epoch 20/99, current lr=0.001
train loss: 0.016362, dev loss: 0.035986, accuracy: 98.68
----------
Epoch 21/99, current lr=0.001
train loss: 0.019238, dev loss: 0.116251, accuracy: 96.48
----------
Epoch 22/99, current lr=0.001
train loss: 0.018869, dev loss: 0.047536, accuracy: 98.90
----------
Epoch 23/99, current lr=0.001
train loss: 0.015293, dev loss: 0.059683, accuracy: 98.24
----------
Epoch 24/99, current lr=0.001
train loss: 0.006489, dev loss: 0.094880, accuracy: 98.46
----------
Epoch 25/99, current lr=0.001
train loss: 0.017233, dev loss: 0.138154, accuracy: 94.95
----------
Epoch 26/99, current lr=0.001
train loss: 0.019401, dev loss: 0.174015, accuracy: 96.92
----------
Epoch 27/99, current lr=0.001
train loss: 0.021397, dev loss: 0.079164, accuracy: 98.24
----------
Epoch 28/99, current lr=0.001
train loss: 0.032176, dev loss: 1.668480, accuracy: 69.01
----------
Epoch 29/99, current lr=0.001
train loss: 0.025031, dev loss: 0.089971, accuracy: 98.24
----------
Epoch 30/99, current lr=0.001
Epoch    31: reducing learning rate of group 0 to 1.0000e-04.
Loading best model weights!
train loss: 0.036581, dev loss: 0.513292, accuracy: 91.21
----------
Epoch 31/99, current lr=0.0001
train loss: 0.004847, dev loss: 0.030376, accuracy: 99.12
----------
Epoch 32/99, current lr=0.0001
train loss: 0.003592, dev loss: 0.032560, accuracy: 99.12
----------
Epoch 33/99, current lr=0.0001
train loss: 0.001799, dev loss: 0.031518, accuracy: 99.12
----------
Epoch 34/99, current lr=0.0001
Copied best model weights!
train loss: 0.000909, dev loss: 0.022572, accuracy: 99.12
----------
Epoch 35/99, current lr=0.0001
train loss: 0.000814, dev loss: 0.025076, accuracy: 99.12
----------
Epoch 36/99, current lr=0.0001
Copied best model weights!
train loss: 0.001457, dev loss: 0.020085, accuracy: 99.34
----------
Epoch 37/99, current lr=0.0001
train loss: 0.001352, dev loss: 0.021613, accuracy: 99.34
----------
Epoch 38/99, current lr=0.0001
train loss: 0.000732, dev loss: 0.029947, accuracy: 99.12
----------
Epoch 39/99, current lr=0.0001
train loss: 0.000704, dev loss: 0.030590, accuracy: 99.12
----------
Epoch 40/99, current lr=0.0001
train loss: 0.001058, dev loss: 0.029086, accuracy: 99.12
----------
Epoch 41/99, current lr=0.0001
train loss: 0.000976, dev loss: 0.030713, accuracy: 99.12
----------
Epoch 42/99, current lr=0.0001
train loss: 0.000788, dev loss: 0.026295, accuracy: 99.34
----------
Epoch 43/99, current lr=0.0001
train loss: 0.001429, dev loss: 0.024838, accuracy: 99.12
----------
Epoch 44/99, current lr=0.0001
train loss: 0.000737, dev loss: 0.028411, accuracy: 99.12
----------
Epoch 45/99, current lr=0.0001
train loss: 0.001406, dev loss: 0.029470, accuracy: 99.12
----------
Epoch 46/99, current lr=0.0001
train loss: 0.000755, dev loss: 0.021801, accuracy: 99.34
----------
Epoch 47/99, current lr=0.0001
Epoch    48: reducing learning rate of group 0 to 1.0000e-05.
Loading best model weights!
train loss: 0.000551, dev loss: 0.027326, accuracy: 99.34
----------
Epoch 48/99, current lr=1e-05
train loss: 0.001068, dev loss: 0.027265, accuracy: 99.12
----------
Epoch 49/99, current lr=1e-05
train loss: 0.001054, dev loss: 0.027513, accuracy: 99.12
----------
Epoch 50/99, current lr=1e-05
train loss: 0.001088, dev loss: 0.026932, accuracy: 99.12
----------
Epoch 51/99, current lr=1e-05
train loss: 0.001033, dev loss: 0.022976, accuracy: 99.12
----------
Epoch 52/99, current lr=1e-05
train loss: 0.000461, dev loss: 0.024057, accuracy: 99.12
----------
Epoch 53/99, current lr=1e-05
train loss: 0.000880, dev loss: 0.023172, accuracy: 99.12
----------
Epoch 54/99, current lr=1e-05
train loss: 0.000928, dev loss: 0.025957, accuracy: 99.12
----------
Epoch 55/99, current lr=1e-05
train loss: 0.000870, dev loss: 0.028422, accuracy: 99.12
----------
Epoch 56/99, current lr=1e-05
train loss: 0.001074, dev loss: 0.029834, accuracy: 99.12
----------
Epoch 57/99, current lr=1e-05
train loss: 0.001241, dev loss: 0.027780, accuracy: 99.12
----------
Epoch 58/99, current lr=1e-05
Epoch    59: reducing learning rate of group 0 to 1.0000e-06.
Loading best model weights!
train loss: 0.000882, dev loss: 0.027985, accuracy: 99.12
----------
Epoch 59/99, current lr=1.0000000000000002e-06
train loss: 0.001308, dev loss: 0.023412, accuracy: 99.12
----------
Epoch 60/99, current lr=1.0000000000000002e-06
train loss: 0.001230, dev loss: 0.024225, accuracy: 99.12
----------
Epoch 61/99, current lr=1.0000000000000002e-06
train loss: 0.000580, dev loss: 0.026208, accuracy: 99.12
----------
Epoch 62/99, current lr=1.0000000000000002e-06
train loss: 0.000863, dev loss: 0.021885, accuracy: 99.34
----------
Epoch 63/99, current lr=1.0000000000000002e-06
train loss: 0.000766, dev loss: 0.027367, accuracy: 99.12
----------
Epoch 64/99, current lr=1.0000000000000002e-06
train loss: 0.001665, dev loss: 0.027444, accuracy: 99.12
----------
Epoch 65/99, current lr=1.0000000000000002e-06
train loss: 0.001356, dev loss: 0.024005, accuracy: 99.12
----------
Epoch 66/99, current lr=1.0000000000000002e-06
train loss: 0.000889, dev loss: 0.020607, accuracy: 99.12
----------
Epoch 67/99, current lr=1.0000000000000002e-06
train loss: 0.000783, dev loss: 0.024565, accuracy: 99.12
----------
Epoch 68/99, current lr=1.0000000000000002e-06
train loss: 0.001142, dev loss: 0.021362, accuracy: 99.12
----------
Epoch 69/99, current lr=1.0000000000000002e-06
Epoch    70: reducing learning rate of group 0 to 1.0000e-07.
Loading best model weights!
train loss: 0.000967, dev loss: 0.021639, accuracy: 99.12
----------
Epoch 70/99, current lr=1.0000000000000002e-07
train loss: 0.001739, dev loss: 0.024227, accuracy: 99.12
----------
Epoch 71/99, current lr=1.0000000000000002e-07
train loss: 0.001108, dev loss: 0.024610, accuracy: 99.12
----------
Epoch 72/99, current lr=1.0000000000000002e-07
train loss: 0.001682, dev loss: 0.022040, accuracy: 99.34
----------
Epoch 73/99, current lr=1.0000000000000002e-07
train loss: 0.000924, dev loss: 0.025615, accuracy: 99.12
----------
Epoch 74/99, current lr=1.0000000000000002e-07
train loss: 0.000731, dev loss: 0.027892, accuracy: 99.12
----------
Epoch 75/99, current lr=1.0000000000000002e-07
train loss: 0.000984, dev loss: 0.025725, accuracy: 99.12
----------
Epoch 76/99, current lr=1.0000000000000002e-07
train loss: 0.000600, dev loss: 0.022158, accuracy: 99.12
----------
Epoch 77/99, current lr=1.0000000000000002e-07
train loss: 0.000869, dev loss: 0.026473, accuracy: 99.12
----------
Epoch 78/99, current lr=1.0000000000000002e-07
train loss: 0.000969, dev loss: 0.023402, accuracy: 99.12
----------
Epoch 79/99, current lr=1.0000000000000002e-07
train loss: 0.000612, dev loss: 0.021823, accuracy: 99.12
----------
Epoch 80/99, current lr=1.0000000000000002e-07
Epoch    81: reducing learning rate of group 0 to 1.0000e-08.
Loading best model weights!
train loss: 0.001039, dev loss: 0.026091, accuracy: 99.12
----------
Epoch 81/99, current lr=1.0000000000000004e-08
train loss: 0.000693, dev loss: 0.024535, accuracy: 99.12
----------
Epoch 82/99, current lr=1.0000000000000004e-08
train loss: 0.000806, dev loss: 0.025269, accuracy: 99.12
----------
Epoch 83/99, current lr=1.0000000000000004e-08
train loss: 0.001845, dev loss: 0.027764, accuracy: 99.12
----------
Epoch 84/99, current lr=1.0000000000000004e-08
train loss: 0.000847, dev loss: 0.022448, accuracy: 99.12
----------
Epoch 85/99, current lr=1.0000000000000004e-08
train loss: 0.001023, dev loss: 0.024722, accuracy: 99.12
----------
Epoch 86/99, current lr=1.0000000000000004e-08
train loss: 0.000798, dev loss: 0.024166, accuracy: 99.12
----------
Epoch 87/99, current lr=1.0000000000000004e-08
train loss: 0.000687, dev loss: 0.025535, accuracy: 99.12
----------
Epoch 88/99, current lr=1.0000000000000004e-08
train loss: 0.000808, dev loss: 0.025783, accuracy: 99.12
----------
Epoch 89/99, current lr=1.0000000000000004e-08
train loss: 0.001142, dev loss: 0.023205, accuracy: 99.12
----------
Epoch 90/99, current lr=1.0000000000000004e-08
train loss: 0.000685, dev loss: 0.024432, accuracy: 99.12
----------
Epoch 91/99, current lr=1.0000000000000004e-08
train loss: 0.000557, dev loss: 0.024314, accuracy: 99.12
----------
Epoch 92/99, current lr=1.0000000000000004e-08
train loss: 0.000695, dev loss: 0.027475, accuracy: 99.12
----------
Epoch 93/99, current lr=1.0000000000000004e-08
train loss: 0.001205, dev loss: 0.023824, accuracy: 99.12
----------
Epoch 94/99, current lr=1.0000000000000004e-08
train loss: 0.001091, dev loss: 0.023968, accuracy: 99.12
----------
Epoch 95/99, current lr=1.0000000000000004e-08
train loss: 0.001040, dev loss: 0.023387, accuracy: 99.34
----------
Epoch 96/99, current lr=1.0000000000000004e-08
train loss: 0.001033, dev loss: 0.024697, accuracy: 99.12
----------
Epoch 97/99, current lr=1.0000000000000004e-08
train loss: 0.001517, dev loss: 0.020699, accuracy: 99.12
----------
Epoch 98/99, current lr=1.0000000000000004e-08
train loss: 0.001358, dev loss: 0.020788, accuracy: 99.12
----------
Epoch 99/99, current lr=1.0000000000000004e-08
train loss: 0.001281, dev loss: 0.020703, accuracy: 99.12
----------

모델 배포

resnet_model = torchvision.models.resnet152(num_classes=3)
# state_dict 불러오기
path2weights = './models/weights.pt'
resnet_model.load_state_dict(torch.load(path2weights))
<All keys matched successfully>
# 모델을 eval 모드로 변경
resnet_model.eval()
ResNet(
  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (relu): ReLU(inplace=True)
  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  (layer1): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer2): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer3): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (8): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (9): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (10): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (11): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (12): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (13): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (14): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (15): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (16): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (17): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (18): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (19): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (20): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (21): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (22): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (23): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (24): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (25): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (26): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (27): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (28): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (29): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (30): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (31): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (32): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (33): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (34): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (35): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer4): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))
  (fc): Linear(in_features=2048, out_features=3, bias=True)
)
# CUDA device로 모델 옮기기
if torch.cuda.is_available():
    device = torch.device('cuda')
    resnet_model = resnet_model.to(device)
# define the deploy model function
def deploy_model(model, dataset, device, num_classes=3, sanity_check=False):
    len_data = len(dataset)
    # initialize output tensor on CPU: due to GPU memory limits
    y_out = torch.zeros(len_data, num_classes)
    # initialize ground truth on CPU: due to GPU memory limits
    y_gt = np.zeros((len_data), dtype='uint8')
    # move model to device
    model = model.to(device)
    
    elapsed_times = []
    with torch.no_grad():
        for i in range(len_data):
            x, y = dataset[i]
            y_gt[i] = y
            start = time.time()
            y_out[i] = model(x.unsqueeze(0).to(device))
            elapsed = time.time() - start
            elapsed_times.append(elapsed)
            if sanity_check is True:
                break
    
    inference_time = np.mean(elapsed_times) * 1000
    print('average inference time per image on %s: %.2f ms ' %(device, inference_time))
    
    return y_out.numpy(), y_gt
# deploy model
y_out, y_gt = deploy_model(resnet_model, val_ds, device=device, sanity_check=False)
print(y_out.shape, y_gt.shape)
average inference time per image on cuda: 48.09 ms 
(455, 3) (455,)

정확도 계산

# get predictions
y_pred = np.argmax(y_out, axis=1)
print(y_pred.shape, y_gt.shape)
(455,) (455,)
# compute accuracy
acc = accuracy_score(y_pred, y_gt)
print('accuracy: %.2f' %acc)
accuracy: 0.99
# confusion matrix
confusion_matrix(y_pred, y_gt)
array([[156,   0,   0],
       [  1, 162,   0],
       [  2,   0, 134]])

3. 모델 상위 layer의 features 차원 축소 시각화 (t-SNE)

모델에 학습된 weights 넣고 마지막 fc layer 직전 features 추출

extract_model = nn.Sequential(*(list(resnet_model.children())[:-1]))
extract_model
Sequential(
  (0): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU(inplace=True)
  (3): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  (4): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (5): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (6): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (8): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (9): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (10): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (11): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (12): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (13): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (14): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (15): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (16): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (17): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (18): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (19): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (20): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (21): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (22): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (23): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (24): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (25): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (26): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (27): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (28): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (29): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (30): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (31): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (32): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (33): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (34): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (35): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (7): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (8): AdaptiveAvgPool2d(output_size=(1, 1))
)
summary(extract_model, input_size=(3, 256, 256))
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1         [-1, 64, 128, 128]           9,408
       BatchNorm2d-2         [-1, 64, 128, 128]             128
              ReLU-3         [-1, 64, 128, 128]               0
         MaxPool2d-4           [-1, 64, 64, 64]               0
            Conv2d-5           [-1, 64, 64, 64]           4,096
       BatchNorm2d-6           [-1, 64, 64, 64]             128
              ReLU-7           [-1, 64, 64, 64]               0
            Conv2d-8           [-1, 64, 64, 64]          36,864
       BatchNorm2d-9           [-1, 64, 64, 64]             128
             ReLU-10           [-1, 64, 64, 64]               0
           Conv2d-11          [-1, 256, 64, 64]          16,384
      BatchNorm2d-12          [-1, 256, 64, 64]             512
           Conv2d-13          [-1, 256, 64, 64]          16,384
      BatchNorm2d-14          [-1, 256, 64, 64]             512
             ReLU-15          [-1, 256, 64, 64]               0
       Bottleneck-16          [-1, 256, 64, 64]               0
           Conv2d-17           [-1, 64, 64, 64]          16,384
      BatchNorm2d-18           [-1, 64, 64, 64]             128
             ReLU-19           [-1, 64, 64, 64]               0
           Conv2d-20           [-1, 64, 64, 64]          36,864
      BatchNorm2d-21           [-1, 64, 64, 64]             128
             ReLU-22           [-1, 64, 64, 64]               0
           Conv2d-23          [-1, 256, 64, 64]          16,384
      BatchNorm2d-24          [-1, 256, 64, 64]             512
             ReLU-25          [-1, 256, 64, 64]               0
       Bottleneck-26          [-1, 256, 64, 64]               0
           Conv2d-27           [-1, 64, 64, 64]          16,384
      BatchNorm2d-28           [-1, 64, 64, 64]             128
             ReLU-29           [-1, 64, 64, 64]               0
           Conv2d-30           [-1, 64, 64, 64]          36,864
      BatchNorm2d-31           [-1, 64, 64, 64]             128
             ReLU-32           [-1, 64, 64, 64]               0
           Conv2d-33          [-1, 256, 64, 64]          16,384
      BatchNorm2d-34          [-1, 256, 64, 64]             512
             ReLU-35          [-1, 256, 64, 64]               0
       Bottleneck-36          [-1, 256, 64, 64]               0
           Conv2d-37          [-1, 128, 64, 64]          32,768
      BatchNorm2d-38          [-1, 128, 64, 64]             256
             ReLU-39          [-1, 128, 64, 64]               0
           Conv2d-40          [-1, 128, 32, 32]         147,456
      BatchNorm2d-41          [-1, 128, 32, 32]             256
             ReLU-42          [-1, 128, 32, 32]               0
           Conv2d-43          [-1, 512, 32, 32]          65,536
      BatchNorm2d-44          [-1, 512, 32, 32]           1,024
           Conv2d-45          [-1, 512, 32, 32]         131,072
      BatchNorm2d-46          [-1, 512, 32, 32]           1,024
             ReLU-47          [-1, 512, 32, 32]               0
       Bottleneck-48          [-1, 512, 32, 32]               0
           Conv2d-49          [-1, 128, 32, 32]          65,536
      BatchNorm2d-50          [-1, 128, 32, 32]             256
             ReLU-51          [-1, 128, 32, 32]               0
           Conv2d-52          [-1, 128, 32, 32]         147,456
      BatchNorm2d-53          [-1, 128, 32, 32]             256
             ReLU-54          [-1, 128, 32, 32]               0
           Conv2d-55          [-1, 512, 32, 32]          65,536
      BatchNorm2d-56          [-1, 512, 32, 32]           1,024
             ReLU-57          [-1, 512, 32, 32]               0
       Bottleneck-58          [-1, 512, 32, 32]               0
           Conv2d-59          [-1, 128, 32, 32]          65,536
      BatchNorm2d-60          [-1, 128, 32, 32]             256
             ReLU-61          [-1, 128, 32, 32]               0
           Conv2d-62          [-1, 128, 32, 32]         147,456
      BatchNorm2d-63          [-1, 128, 32, 32]             256
             ReLU-64          [-1, 128, 32, 32]               0
           Conv2d-65          [-1, 512, 32, 32]          65,536
      BatchNorm2d-66          [-1, 512, 32, 32]           1,024
             ReLU-67          [-1, 512, 32, 32]               0
       Bottleneck-68          [-1, 512, 32, 32]               0
           Conv2d-69          [-1, 128, 32, 32]          65,536
      BatchNorm2d-70          [-1, 128, 32, 32]             256
             ReLU-71          [-1, 128, 32, 32]               0
           Conv2d-72          [-1, 128, 32, 32]         147,456
      BatchNorm2d-73          [-1, 128, 32, 32]             256
             ReLU-74          [-1, 128, 32, 32]               0
           Conv2d-75          [-1, 512, 32, 32]          65,536
      BatchNorm2d-76          [-1, 512, 32, 32]           1,024
             ReLU-77          [-1, 512, 32, 32]               0
       Bottleneck-78          [-1, 512, 32, 32]               0
           Conv2d-79          [-1, 128, 32, 32]          65,536
      BatchNorm2d-80          [-1, 128, 32, 32]             256
             ReLU-81          [-1, 128, 32, 32]               0
           Conv2d-82          [-1, 128, 32, 32]         147,456
      BatchNorm2d-83          [-1, 128, 32, 32]             256
             ReLU-84          [-1, 128, 32, 32]               0
           Conv2d-85          [-1, 512, 32, 32]          65,536
      BatchNorm2d-86          [-1, 512, 32, 32]           1,024
             ReLU-87          [-1, 512, 32, 32]               0
       Bottleneck-88          [-1, 512, 32, 32]               0
           Conv2d-89          [-1, 128, 32, 32]          65,536
      BatchNorm2d-90          [-1, 128, 32, 32]             256
             ReLU-91          [-1, 128, 32, 32]               0
           Conv2d-92          [-1, 128, 32, 32]         147,456
      BatchNorm2d-93          [-1, 128, 32, 32]             256
             ReLU-94          [-1, 128, 32, 32]               0
           Conv2d-95          [-1, 512, 32, 32]          65,536
      BatchNorm2d-96          [-1, 512, 32, 32]           1,024
             ReLU-97          [-1, 512, 32, 32]               0
       Bottleneck-98          [-1, 512, 32, 32]               0
           Conv2d-99          [-1, 128, 32, 32]          65,536
     BatchNorm2d-100          [-1, 128, 32, 32]             256
            ReLU-101          [-1, 128, 32, 32]               0
          Conv2d-102          [-1, 128, 32, 32]         147,456
     BatchNorm2d-103          [-1, 128, 32, 32]             256
            ReLU-104          [-1, 128, 32, 32]               0
          Conv2d-105          [-1, 512, 32, 32]          65,536
     BatchNorm2d-106          [-1, 512, 32, 32]           1,024
            ReLU-107          [-1, 512, 32, 32]               0
      Bottleneck-108          [-1, 512, 32, 32]               0
          Conv2d-109          [-1, 128, 32, 32]          65,536
     BatchNorm2d-110          [-1, 128, 32, 32]             256
            ReLU-111          [-1, 128, 32, 32]               0
          Conv2d-112          [-1, 128, 32, 32]         147,456
     BatchNorm2d-113          [-1, 128, 32, 32]             256
            ReLU-114          [-1, 128, 32, 32]               0
          Conv2d-115          [-1, 512, 32, 32]          65,536
     BatchNorm2d-116          [-1, 512, 32, 32]           1,024
            ReLU-117          [-1, 512, 32, 32]               0
      Bottleneck-118          [-1, 512, 32, 32]               0
          Conv2d-119          [-1, 256, 32, 32]         131,072
     BatchNorm2d-120          [-1, 256, 32, 32]             512
            ReLU-121          [-1, 256, 32, 32]               0
          Conv2d-122          [-1, 256, 16, 16]         589,824
     BatchNorm2d-123          [-1, 256, 16, 16]             512
            ReLU-124          [-1, 256, 16, 16]               0
          Conv2d-125         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-126         [-1, 1024, 16, 16]           2,048
          Conv2d-127         [-1, 1024, 16, 16]         524,288
     BatchNorm2d-128         [-1, 1024, 16, 16]           2,048
            ReLU-129         [-1, 1024, 16, 16]               0
      Bottleneck-130         [-1, 1024, 16, 16]               0
          Conv2d-131          [-1, 256, 16, 16]         262,144
     BatchNorm2d-132          [-1, 256, 16, 16]             512
            ReLU-133          [-1, 256, 16, 16]               0
          Conv2d-134          [-1, 256, 16, 16]         589,824
     BatchNorm2d-135          [-1, 256, 16, 16]             512
            ReLU-136          [-1, 256, 16, 16]               0
          Conv2d-137         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-138         [-1, 1024, 16, 16]           2,048
            ReLU-139         [-1, 1024, 16, 16]               0
      Bottleneck-140         [-1, 1024, 16, 16]               0
          Conv2d-141          [-1, 256, 16, 16]         262,144
     BatchNorm2d-142          [-1, 256, 16, 16]             512
            ReLU-143          [-1, 256, 16, 16]               0
          Conv2d-144          [-1, 256, 16, 16]         589,824
     BatchNorm2d-145          [-1, 256, 16, 16]             512
            ReLU-146          [-1, 256, 16, 16]               0
          Conv2d-147         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-148         [-1, 1024, 16, 16]           2,048
            ReLU-149         [-1, 1024, 16, 16]               0
      Bottleneck-150         [-1, 1024, 16, 16]               0
          Conv2d-151          [-1, 256, 16, 16]         262,144
     BatchNorm2d-152          [-1, 256, 16, 16]             512
            ReLU-153          [-1, 256, 16, 16]               0
          Conv2d-154          [-1, 256, 16, 16]         589,824
     BatchNorm2d-155          [-1, 256, 16, 16]             512
            ReLU-156          [-1, 256, 16, 16]               0
          Conv2d-157         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-158         [-1, 1024, 16, 16]           2,048
            ReLU-159         [-1, 1024, 16, 16]               0
      Bottleneck-160         [-1, 1024, 16, 16]               0
          Conv2d-161          [-1, 256, 16, 16]         262,144
     BatchNorm2d-162          [-1, 256, 16, 16]             512
            ReLU-163          [-1, 256, 16, 16]               0
          Conv2d-164          [-1, 256, 16, 16]         589,824
     BatchNorm2d-165          [-1, 256, 16, 16]             512
            ReLU-166          [-1, 256, 16, 16]               0
          Conv2d-167         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-168         [-1, 1024, 16, 16]           2,048
            ReLU-169         [-1, 1024, 16, 16]               0
      Bottleneck-170         [-1, 1024, 16, 16]               0
          Conv2d-171          [-1, 256, 16, 16]         262,144
     BatchNorm2d-172          [-1, 256, 16, 16]             512
            ReLU-173          [-1, 256, 16, 16]               0
          Conv2d-174          [-1, 256, 16, 16]         589,824
     BatchNorm2d-175          [-1, 256, 16, 16]             512
            ReLU-176          [-1, 256, 16, 16]               0
          Conv2d-177         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-178         [-1, 1024, 16, 16]           2,048
            ReLU-179         [-1, 1024, 16, 16]               0
      Bottleneck-180         [-1, 1024, 16, 16]               0
          Conv2d-181          [-1, 256, 16, 16]         262,144
     BatchNorm2d-182          [-1, 256, 16, 16]             512
            ReLU-183          [-1, 256, 16, 16]               0
          Conv2d-184          [-1, 256, 16, 16]         589,824
     BatchNorm2d-185          [-1, 256, 16, 16]             512
            ReLU-186          [-1, 256, 16, 16]               0
          Conv2d-187         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-188         [-1, 1024, 16, 16]           2,048
            ReLU-189         [-1, 1024, 16, 16]               0
      Bottleneck-190         [-1, 1024, 16, 16]               0
          Conv2d-191          [-1, 256, 16, 16]         262,144
     BatchNorm2d-192          [-1, 256, 16, 16]             512
            ReLU-193          [-1, 256, 16, 16]               0
          Conv2d-194          [-1, 256, 16, 16]         589,824
     BatchNorm2d-195          [-1, 256, 16, 16]             512
            ReLU-196          [-1, 256, 16, 16]               0
          Conv2d-197         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-198         [-1, 1024, 16, 16]           2,048
            ReLU-199         [-1, 1024, 16, 16]               0
      Bottleneck-200         [-1, 1024, 16, 16]               0
          Conv2d-201          [-1, 256, 16, 16]         262,144
     BatchNorm2d-202          [-1, 256, 16, 16]             512
            ReLU-203          [-1, 256, 16, 16]               0
          Conv2d-204          [-1, 256, 16, 16]         589,824
     BatchNorm2d-205          [-1, 256, 16, 16]             512
            ReLU-206          [-1, 256, 16, 16]               0
          Conv2d-207         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-208         [-1, 1024, 16, 16]           2,048
            ReLU-209         [-1, 1024, 16, 16]               0
      Bottleneck-210         [-1, 1024, 16, 16]               0
          Conv2d-211          [-1, 256, 16, 16]         262,144
     BatchNorm2d-212          [-1, 256, 16, 16]             512
            ReLU-213          [-1, 256, 16, 16]               0
          Conv2d-214          [-1, 256, 16, 16]         589,824
     BatchNorm2d-215          [-1, 256, 16, 16]             512
            ReLU-216          [-1, 256, 16, 16]               0
          Conv2d-217         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-218         [-1, 1024, 16, 16]           2,048
            ReLU-219         [-1, 1024, 16, 16]               0
      Bottleneck-220         [-1, 1024, 16, 16]               0
          Conv2d-221          [-1, 256, 16, 16]         262,144
     BatchNorm2d-222          [-1, 256, 16, 16]             512
            ReLU-223          [-1, 256, 16, 16]               0
          Conv2d-224          [-1, 256, 16, 16]         589,824
     BatchNorm2d-225          [-1, 256, 16, 16]             512
            ReLU-226          [-1, 256, 16, 16]               0
          Conv2d-227         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-228         [-1, 1024, 16, 16]           2,048
            ReLU-229         [-1, 1024, 16, 16]               0
      Bottleneck-230         [-1, 1024, 16, 16]               0
          Conv2d-231          [-1, 256, 16, 16]         262,144
     BatchNorm2d-232          [-1, 256, 16, 16]             512
            ReLU-233          [-1, 256, 16, 16]               0
          Conv2d-234          [-1, 256, 16, 16]         589,824
     BatchNorm2d-235          [-1, 256, 16, 16]             512
            ReLU-236          [-1, 256, 16, 16]               0
          Conv2d-237         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-238         [-1, 1024, 16, 16]           2,048
            ReLU-239         [-1, 1024, 16, 16]               0
      Bottleneck-240         [-1, 1024, 16, 16]               0
          Conv2d-241          [-1, 256, 16, 16]         262,144
     BatchNorm2d-242          [-1, 256, 16, 16]             512
            ReLU-243          [-1, 256, 16, 16]               0
          Conv2d-244          [-1, 256, 16, 16]         589,824
     BatchNorm2d-245          [-1, 256, 16, 16]             512
            ReLU-246          [-1, 256, 16, 16]               0
          Conv2d-247         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-248         [-1, 1024, 16, 16]           2,048
            ReLU-249         [-1, 1024, 16, 16]               0
      Bottleneck-250         [-1, 1024, 16, 16]               0
          Conv2d-251          [-1, 256, 16, 16]         262,144
     BatchNorm2d-252          [-1, 256, 16, 16]             512
            ReLU-253          [-1, 256, 16, 16]               0
          Conv2d-254          [-1, 256, 16, 16]         589,824
     BatchNorm2d-255          [-1, 256, 16, 16]             512
            ReLU-256          [-1, 256, 16, 16]               0
          Conv2d-257         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-258         [-1, 1024, 16, 16]           2,048
            ReLU-259         [-1, 1024, 16, 16]               0
      Bottleneck-260         [-1, 1024, 16, 16]               0
          Conv2d-261          [-1, 256, 16, 16]         262,144
     BatchNorm2d-262          [-1, 256, 16, 16]             512
            ReLU-263          [-1, 256, 16, 16]               0
          Conv2d-264          [-1, 256, 16, 16]         589,824
     BatchNorm2d-265          [-1, 256, 16, 16]             512
            ReLU-266          [-1, 256, 16, 16]               0
          Conv2d-267         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-268         [-1, 1024, 16, 16]           2,048
            ReLU-269         [-1, 1024, 16, 16]               0
      Bottleneck-270         [-1, 1024, 16, 16]               0
          Conv2d-271          [-1, 256, 16, 16]         262,144
     BatchNorm2d-272          [-1, 256, 16, 16]             512
            ReLU-273          [-1, 256, 16, 16]               0
          Conv2d-274          [-1, 256, 16, 16]         589,824
     BatchNorm2d-275          [-1, 256, 16, 16]             512
            ReLU-276          [-1, 256, 16, 16]               0
          Conv2d-277         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-278         [-1, 1024, 16, 16]           2,048
            ReLU-279         [-1, 1024, 16, 16]               0
      Bottleneck-280         [-1, 1024, 16, 16]               0
          Conv2d-281          [-1, 256, 16, 16]         262,144
     BatchNorm2d-282          [-1, 256, 16, 16]             512
            ReLU-283          [-1, 256, 16, 16]               0
          Conv2d-284          [-1, 256, 16, 16]         589,824
     BatchNorm2d-285          [-1, 256, 16, 16]             512
            ReLU-286          [-1, 256, 16, 16]               0
          Conv2d-287         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-288         [-1, 1024, 16, 16]           2,048
            ReLU-289         [-1, 1024, 16, 16]               0
      Bottleneck-290         [-1, 1024, 16, 16]               0
          Conv2d-291          [-1, 256, 16, 16]         262,144
     BatchNorm2d-292          [-1, 256, 16, 16]             512
            ReLU-293          [-1, 256, 16, 16]               0
          Conv2d-294          [-1, 256, 16, 16]         589,824
     BatchNorm2d-295          [-1, 256, 16, 16]             512
            ReLU-296          [-1, 256, 16, 16]               0
          Conv2d-297         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-298         [-1, 1024, 16, 16]           2,048
            ReLU-299         [-1, 1024, 16, 16]               0
      Bottleneck-300         [-1, 1024, 16, 16]               0
          Conv2d-301          [-1, 256, 16, 16]         262,144
     BatchNorm2d-302          [-1, 256, 16, 16]             512
            ReLU-303          [-1, 256, 16, 16]               0
          Conv2d-304          [-1, 256, 16, 16]         589,824
     BatchNorm2d-305          [-1, 256, 16, 16]             512
            ReLU-306          [-1, 256, 16, 16]               0
          Conv2d-307         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-308         [-1, 1024, 16, 16]           2,048
            ReLU-309         [-1, 1024, 16, 16]               0
      Bottleneck-310         [-1, 1024, 16, 16]               0
          Conv2d-311          [-1, 256, 16, 16]         262,144
     BatchNorm2d-312          [-1, 256, 16, 16]             512
            ReLU-313          [-1, 256, 16, 16]               0
          Conv2d-314          [-1, 256, 16, 16]         589,824
     BatchNorm2d-315          [-1, 256, 16, 16]             512
            ReLU-316          [-1, 256, 16, 16]               0
          Conv2d-317         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-318         [-1, 1024, 16, 16]           2,048
            ReLU-319         [-1, 1024, 16, 16]               0
      Bottleneck-320         [-1, 1024, 16, 16]               0
          Conv2d-321          [-1, 256, 16, 16]         262,144
     BatchNorm2d-322          [-1, 256, 16, 16]             512
            ReLU-323          [-1, 256, 16, 16]               0
          Conv2d-324          [-1, 256, 16, 16]         589,824
     BatchNorm2d-325          [-1, 256, 16, 16]             512
            ReLU-326          [-1, 256, 16, 16]               0
          Conv2d-327         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-328         [-1, 1024, 16, 16]           2,048
            ReLU-329         [-1, 1024, 16, 16]               0
      Bottleneck-330         [-1, 1024, 16, 16]               0
          Conv2d-331          [-1, 256, 16, 16]         262,144
     BatchNorm2d-332          [-1, 256, 16, 16]             512
            ReLU-333          [-1, 256, 16, 16]               0
          Conv2d-334          [-1, 256, 16, 16]         589,824
     BatchNorm2d-335          [-1, 256, 16, 16]             512
            ReLU-336          [-1, 256, 16, 16]               0
          Conv2d-337         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-338         [-1, 1024, 16, 16]           2,048
            ReLU-339         [-1, 1024, 16, 16]               0
      Bottleneck-340         [-1, 1024, 16, 16]               0
          Conv2d-341          [-1, 256, 16, 16]         262,144
     BatchNorm2d-342          [-1, 256, 16, 16]             512
            ReLU-343          [-1, 256, 16, 16]               0
          Conv2d-344          [-1, 256, 16, 16]         589,824
     BatchNorm2d-345          [-1, 256, 16, 16]             512
            ReLU-346          [-1, 256, 16, 16]               0
          Conv2d-347         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-348         [-1, 1024, 16, 16]           2,048
            ReLU-349         [-1, 1024, 16, 16]               0
      Bottleneck-350         [-1, 1024, 16, 16]               0
          Conv2d-351          [-1, 256, 16, 16]         262,144
     BatchNorm2d-352          [-1, 256, 16, 16]             512
            ReLU-353          [-1, 256, 16, 16]               0
          Conv2d-354          [-1, 256, 16, 16]         589,824
     BatchNorm2d-355          [-1, 256, 16, 16]             512
            ReLU-356          [-1, 256, 16, 16]               0
          Conv2d-357         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-358         [-1, 1024, 16, 16]           2,048
            ReLU-359         [-1, 1024, 16, 16]               0
      Bottleneck-360         [-1, 1024, 16, 16]               0
          Conv2d-361          [-1, 256, 16, 16]         262,144
     BatchNorm2d-362          [-1, 256, 16, 16]             512
            ReLU-363          [-1, 256, 16, 16]               0
          Conv2d-364          [-1, 256, 16, 16]         589,824
     BatchNorm2d-365          [-1, 256, 16, 16]             512
            ReLU-366          [-1, 256, 16, 16]               0
          Conv2d-367         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-368         [-1, 1024, 16, 16]           2,048
            ReLU-369         [-1, 1024, 16, 16]               0
      Bottleneck-370         [-1, 1024, 16, 16]               0
          Conv2d-371          [-1, 256, 16, 16]         262,144
     BatchNorm2d-372          [-1, 256, 16, 16]             512
            ReLU-373          [-1, 256, 16, 16]               0
          Conv2d-374          [-1, 256, 16, 16]         589,824
     BatchNorm2d-375          [-1, 256, 16, 16]             512
            ReLU-376          [-1, 256, 16, 16]               0
          Conv2d-377         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-378         [-1, 1024, 16, 16]           2,048
            ReLU-379         [-1, 1024, 16, 16]               0
      Bottleneck-380         [-1, 1024, 16, 16]               0
          Conv2d-381          [-1, 256, 16, 16]         262,144
     BatchNorm2d-382          [-1, 256, 16, 16]             512
            ReLU-383          [-1, 256, 16, 16]               0
          Conv2d-384          [-1, 256, 16, 16]         589,824
     BatchNorm2d-385          [-1, 256, 16, 16]             512
            ReLU-386          [-1, 256, 16, 16]               0
          Conv2d-387         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-388         [-1, 1024, 16, 16]           2,048
            ReLU-389         [-1, 1024, 16, 16]               0
      Bottleneck-390         [-1, 1024, 16, 16]               0
          Conv2d-391          [-1, 256, 16, 16]         262,144
     BatchNorm2d-392          [-1, 256, 16, 16]             512
            ReLU-393          [-1, 256, 16, 16]               0
          Conv2d-394          [-1, 256, 16, 16]         589,824
     BatchNorm2d-395          [-1, 256, 16, 16]             512
            ReLU-396          [-1, 256, 16, 16]               0
          Conv2d-397         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-398         [-1, 1024, 16, 16]           2,048
            ReLU-399         [-1, 1024, 16, 16]               0
      Bottleneck-400         [-1, 1024, 16, 16]               0
          Conv2d-401          [-1, 256, 16, 16]         262,144
     BatchNorm2d-402          [-1, 256, 16, 16]             512
            ReLU-403          [-1, 256, 16, 16]               0
          Conv2d-404          [-1, 256, 16, 16]         589,824
     BatchNorm2d-405          [-1, 256, 16, 16]             512
            ReLU-406          [-1, 256, 16, 16]               0
          Conv2d-407         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-408         [-1, 1024, 16, 16]           2,048
            ReLU-409         [-1, 1024, 16, 16]               0
      Bottleneck-410         [-1, 1024, 16, 16]               0
          Conv2d-411          [-1, 256, 16, 16]         262,144
     BatchNorm2d-412          [-1, 256, 16, 16]             512
            ReLU-413          [-1, 256, 16, 16]               0
          Conv2d-414          [-1, 256, 16, 16]         589,824
     BatchNorm2d-415          [-1, 256, 16, 16]             512
            ReLU-416          [-1, 256, 16, 16]               0
          Conv2d-417         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-418         [-1, 1024, 16, 16]           2,048
            ReLU-419         [-1, 1024, 16, 16]               0
      Bottleneck-420         [-1, 1024, 16, 16]               0
          Conv2d-421          [-1, 256, 16, 16]         262,144
     BatchNorm2d-422          [-1, 256, 16, 16]             512
            ReLU-423          [-1, 256, 16, 16]               0
          Conv2d-424          [-1, 256, 16, 16]         589,824
     BatchNorm2d-425          [-1, 256, 16, 16]             512
            ReLU-426          [-1, 256, 16, 16]               0
          Conv2d-427         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-428         [-1, 1024, 16, 16]           2,048
            ReLU-429         [-1, 1024, 16, 16]               0
      Bottleneck-430         [-1, 1024, 16, 16]               0
          Conv2d-431          [-1, 256, 16, 16]         262,144
     BatchNorm2d-432          [-1, 256, 16, 16]             512
            ReLU-433          [-1, 256, 16, 16]               0
          Conv2d-434          [-1, 256, 16, 16]         589,824
     BatchNorm2d-435          [-1, 256, 16, 16]             512
            ReLU-436          [-1, 256, 16, 16]               0
          Conv2d-437         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-438         [-1, 1024, 16, 16]           2,048
            ReLU-439         [-1, 1024, 16, 16]               0
      Bottleneck-440         [-1, 1024, 16, 16]               0
          Conv2d-441          [-1, 256, 16, 16]         262,144
     BatchNorm2d-442          [-1, 256, 16, 16]             512
            ReLU-443          [-1, 256, 16, 16]               0
          Conv2d-444          [-1, 256, 16, 16]         589,824
     BatchNorm2d-445          [-1, 256, 16, 16]             512
            ReLU-446          [-1, 256, 16, 16]               0
          Conv2d-447         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-448         [-1, 1024, 16, 16]           2,048
            ReLU-449         [-1, 1024, 16, 16]               0
      Bottleneck-450         [-1, 1024, 16, 16]               0
          Conv2d-451          [-1, 256, 16, 16]         262,144
     BatchNorm2d-452          [-1, 256, 16, 16]             512
            ReLU-453          [-1, 256, 16, 16]               0
          Conv2d-454          [-1, 256, 16, 16]         589,824
     BatchNorm2d-455          [-1, 256, 16, 16]             512
            ReLU-456          [-1, 256, 16, 16]               0
          Conv2d-457         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-458         [-1, 1024, 16, 16]           2,048
            ReLU-459         [-1, 1024, 16, 16]               0
      Bottleneck-460         [-1, 1024, 16, 16]               0
          Conv2d-461          [-1, 256, 16, 16]         262,144
     BatchNorm2d-462          [-1, 256, 16, 16]             512
            ReLU-463          [-1, 256, 16, 16]               0
          Conv2d-464          [-1, 256, 16, 16]         589,824
     BatchNorm2d-465          [-1, 256, 16, 16]             512
            ReLU-466          [-1, 256, 16, 16]               0
          Conv2d-467         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-468         [-1, 1024, 16, 16]           2,048
            ReLU-469         [-1, 1024, 16, 16]               0
      Bottleneck-470         [-1, 1024, 16, 16]               0
          Conv2d-471          [-1, 256, 16, 16]         262,144
     BatchNorm2d-472          [-1, 256, 16, 16]             512
            ReLU-473          [-1, 256, 16, 16]               0
          Conv2d-474          [-1, 256, 16, 16]         589,824
     BatchNorm2d-475          [-1, 256, 16, 16]             512
            ReLU-476          [-1, 256, 16, 16]               0
          Conv2d-477         [-1, 1024, 16, 16]         262,144
     BatchNorm2d-478         [-1, 1024, 16, 16]           2,048
            ReLU-479         [-1, 1024, 16, 16]               0
      Bottleneck-480         [-1, 1024, 16, 16]               0
          Conv2d-481          [-1, 512, 16, 16]         524,288
     BatchNorm2d-482          [-1, 512, 16, 16]           1,024
            ReLU-483          [-1, 512, 16, 16]               0
          Conv2d-484            [-1, 512, 8, 8]       2,359,296
     BatchNorm2d-485            [-1, 512, 8, 8]           1,024
            ReLU-486            [-1, 512, 8, 8]               0
          Conv2d-487           [-1, 2048, 8, 8]       1,048,576
     BatchNorm2d-488           [-1, 2048, 8, 8]           4,096
          Conv2d-489           [-1, 2048, 8, 8]       2,097,152
     BatchNorm2d-490           [-1, 2048, 8, 8]           4,096
            ReLU-491           [-1, 2048, 8, 8]               0
      Bottleneck-492           [-1, 2048, 8, 8]               0
          Conv2d-493            [-1, 512, 8, 8]       1,048,576
     BatchNorm2d-494            [-1, 512, 8, 8]           1,024
            ReLU-495            [-1, 512, 8, 8]               0
          Conv2d-496            [-1, 512, 8, 8]       2,359,296
     BatchNorm2d-497            [-1, 512, 8, 8]           1,024
            ReLU-498            [-1, 512, 8, 8]               0
          Conv2d-499           [-1, 2048, 8, 8]       1,048,576
     BatchNorm2d-500           [-1, 2048, 8, 8]           4,096
            ReLU-501           [-1, 2048, 8, 8]               0
      Bottleneck-502           [-1, 2048, 8, 8]               0
          Conv2d-503            [-1, 512, 8, 8]       1,048,576
     BatchNorm2d-504            [-1, 512, 8, 8]           1,024
            ReLU-505            [-1, 512, 8, 8]               0
          Conv2d-506            [-1, 512, 8, 8]       2,359,296
     BatchNorm2d-507            [-1, 512, 8, 8]           1,024
            ReLU-508            [-1, 512, 8, 8]               0
          Conv2d-509           [-1, 2048, 8, 8]       1,048,576
     BatchNorm2d-510           [-1, 2048, 8, 8]           4,096
            ReLU-511           [-1, 2048, 8, 8]               0
      Bottleneck-512           [-1, 2048, 8, 8]               0
AdaptiveAvgPool2d-513           [-1, 2048, 1, 1]               0
================================================================
Total params: 58,143,808
Trainable params: 58,143,808
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.75
Forward/backward pass size (MB): 792.27
Params size (MB): 221.80
Estimated Total Size (MB): 1014.82
----------------------------------------------------------------
# predict x_data
def predict_model(model, dataset, device, sanity_check=False):
    len_data = len(dataset)
    y_out = torch.zeros(len_data, 2048, 1, 1)
    # move model to device
    model = model.to(device)
    
    with torch.no_grad():
        for i in tqdm(range(len_data)):
            x, _ = dataset[i]
            y_out[i] = model(x.unsqueeze(0).to(device))
            if sanity_check is True:
                break
    
    return y_out.squeeze().numpy()
# prediction
y_out = predict_model(extract_model, codi_dataset, device=device, sanity_check=False)
100%|██████████| 2271/2271 [01:53<00:00, 19.94it/s]
y_out.shape
(2271, 2048)

t-SNE 차원 축소

tsne = TSNE(n_components=2, verbose=1, perplexity=40, n_iter=300)
tsne_results = tsne.fit_transform(y_out)
[t-SNE] Computing 121 nearest neighbors...
[t-SNE] Indexed 2271 samples in 0.006s...
[t-SNE] Computed neighbors for 2271 samples in 4.618s...
[t-SNE] Computed conditional probabilities for sample 1000 / 2271
[t-SNE] Computed conditional probabilities for sample 2000 / 2271
[t-SNE] Computed conditional probabilities for sample 2271 / 2271
[t-SNE] Mean sigma: 0.000001
[t-SNE] KL divergence after 250 iterations with early exaggeration: 59.221241
[t-SNE] KL divergence after 300 iterations: 0.909193
tsne_results.shape
(2271, 2)

features 데이터프레임 만들기

features_df = pd.DataFrame(y_out)
features_df['tsne-2d-one'] = tsne_results[:, 0]
features_df['tsne-2d-two'] = tsne_results[:, 1]
features_df['y'] = y_label
features_df
0 1 2 3 4 5 6 7 8 9 ... 2041 2042 2043 2044 2045 2046 2047 tsne-2d-one tsne-2d-two y
0 0.695721 1.710319 0.204196 0.881802 0.453413 1.775246 0.829325 1.248698 0.956496 1.579597 ... 1.410273 0.769142 0.248589 0.881902 0.497093 1.466140 0.226954 4.103096 7.574650 0
1 0.876267 1.528911 0.315072 1.045046 0.740249 1.676060 0.960913 1.214223 0.871376 1.377908 ... 1.239883 0.867477 0.355668 1.240655 0.716226 1.494818 0.189479 4.222771 5.532461 0
2 0.971387 1.932212 0.041615 0.248952 0.278344 1.253941 0.551203 1.153246 1.013609 1.438511 ... 1.431667 0.499859 0.131681 0.804263 0.177487 0.421547 0.319541 -1.041722 15.869433 0
3 0.736790 1.654709 0.264454 0.555555 0.446634 1.580580 0.716179 1.260359 0.979936 1.504789 ... 1.379350 0.634895 0.292921 0.847585 0.443400 1.011251 0.415138 -0.286212 8.826965 0
4 0.465451 1.772036 0.270101 0.790482 0.370361 1.520643 0.789837 1.279298 0.996089 1.471714 ... 1.470277 0.809922 0.322172 0.704098 0.356618 1.360407 0.382677 3.649280 9.103030 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
2266 0.890521 0.759911 0.631271 0.129028 0.459248 0.996799 0.512156 1.508023 0.849738 1.160838 ... 0.827952 0.310896 0.524080 0.498889 0.137738 0.139758 1.310194 -4.670966 -6.623757 2
2267 0.616984 0.935056 1.197203 0.453550 0.598763 1.247590 0.575706 1.599702 0.737817 1.201313 ... 0.918703 0.476393 0.868560 0.595230 0.388647 0.576357 1.742615 -7.470194 -4.477913 2
2268 0.608641 0.301664 1.581931 0.561933 0.781626 1.132254 0.732696 1.959225 0.653402 1.104992 ... 0.598717 0.453975 1.229169 0.566903 0.484697 0.628965 2.510765 -11.265617 -3.769606 2
2269 0.124758 0.192331 1.798410 0.217724 0.384453 1.715632 0.316639 3.120745 0.703620 1.560274 ... 0.705432 0.524404 1.541395 0.011621 0.053865 0.336642 2.875262 -10.277861 10.734368 2
2270 0.780986 0.396705 1.415692 0.498425 0.747759 0.955459 0.729080 1.606349 0.701012 1.017674 ... 0.596912 0.309453 1.056667 0.660946 0.393194 0.458485 2.197579 -10.792315 -5.156699 2

2271 rows × 2051 columns

t-SNE 시각화

plt.figure(figsize=(16, 10))
sns.scatterplot(
    x='tsne-2d-one',
    y='tsne-2d-two',
    hue='y',
    palette=sns.color_palette('hls', 3),
    data=features_df,
    legend='full',
    alpha=0.3
)
<AxesSubplot:xlabel='tsne-2d-one', ylabel='tsne-2d-two'>

png

4. 벡터 간 거리 계산을 통한 유사한 옷 찾기

# 거리 계산 함수 (Root Mean Square Error)
def cal_dist(v1, v2):
    return np.sqrt(sum((v1 - v2) ** 2))
# 가장 벡터 간 거리가 작은 top-5 옷 구하기 (같은 옷 종류 내)
def near(idx):
    # 옷 종류 코드 구하기
    label = features_df.loc[features_df.index == idx, 'y'].values[0]
    # 같은 옷 종류 데이터프레임 만들기
    same_df = features_df[features_df['y'] == label]
    
    # 결과 담을 리스트
    result = []
    for i in same_df.index:
        if i == idx:
            continue
        else:
            v1 = features_df[features_df.index == idx].values.reshape(-1)[:2048]
            v2 = features_df[features_df.index == i].values.reshape(-1)[:2048]
            result.append((i, cal_dist(v1, v2)))
    
    # 최종 top-5 옷 인덱스 리스트 만들기
    top_5 = [idx]
    top_5.extend(list(map(lambda x: x[0], sorted(result, key=lambda x: x[1])[:5])))
    
    # 이미지 출력
    rows = 2
    cols = 3
    axes = []
    fig = plt.figure(figsize=(10, 10))
    
    for a, b in zip(range(rows * cols), top_5):
        axes.append(fig.add_subplot(rows, cols, a + 1))
        subplot_title = 'Top-' + str(a)
        axes[-1].set_title(subplot_title)
        plt.imshow(x_data[b])
    fig.tight_layout()
    plt.show()
near(558)

png

near(800)

png

near(2108)

png

5. 코디 추천

상의/하의/신발 각각 resnet-152 마지막 직전 layer output 2048차원 features 추출

x_top = x_data[:757]
x_bottom = x_data[757:757 * 2]
x_shoes = x_data[757 * 2:]
y_top = y_label[:757]
y_bottom = y_label[757:757 * 2]
y_shoes = y_label[757 * 2:]
top_dataset = codiDataset(data_transformer, x_top, y_top)
bottom_dataset = codiDataset(data_transformer, x_bottom, y_bottom)
shoes_dataset = codiDataset(data_transformer, x_shoes, y_shoes)
top_arr = predict_model(extract_model, top_dataset, device=device, sanity_check=False)
bottom_arr = predict_model(extract_model, bottom_dataset, device=device, sanity_check=False)
shoes_arr = predict_model(extract_model, shoes_dataset, device=device, sanity_check=False)
100%|██████████| 757/757 [00:38<00:00, 19.68it/s]
100%|██████████| 757/757 [00:38<00:00, 19.62it/s]
100%|██████████| 757/757 [00:38<00:00, 19.78it/s]
print(top_arr.shape, bottom_arr.shape, shoes_arr.shape)
(757, 2048) (757, 2048) (757, 2048)

상의-하의-신발 features를 가로로 이어붙여 하나의 코디 벡터 생성

codi_arr = np.c_[top_arr, bottom_arr, shoes_arr]
codi_arr.shape
(757, 6144)
codi_df = pd.DataFrame(codi_arr)
codi_df
0 1 2 3 4 5 6 7 8 9 ... 6134 6135 6136 6137 6138 6139 6140 6141 6142 6143
0 0.695721 1.710319 0.204196 0.881802 0.453413 1.775246 0.829325 1.248698 0.956496 1.579597 ... 0.190421 1.308452 0.258963 0.399076 0.315414 1.369288 0.589988 0.476239 0.479393 2.835139
1 0.876267 1.528911 0.315072 1.045046 0.740249 1.676060 0.960913 1.214223 0.871376 1.377908 ... 0.276514 1.435217 0.146900 0.954991 0.401580 1.165464 0.126094 0.195851 0.437154 2.310848
2 0.971387 1.932212 0.041615 0.248952 0.278344 1.253941 0.551203 1.153246 1.013609 1.438511 ... 1.119465 1.860958 1.197395 0.515104 1.430135 1.842285 1.584267 1.794690 2.662927 1.883419
3 0.736790 1.654709 0.264454 0.555555 0.446634 1.580580 0.716179 1.260359 0.979936 1.504789 ... 0.119357 1.686849 0.065535 0.801654 0.468268 1.322552 0.125541 0.057522 0.200236 2.640122
4 0.465451 1.772036 0.270101 0.790482 0.370361 1.520643 0.789837 1.279298 0.996089 1.471714 ... 0.779716 1.701782 0.732446 0.496466 1.017152 1.756627 0.933950 1.252143 2.046726 2.612151
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
752 0.539420 1.132082 0.559330 1.785141 0.866360 2.124033 1.267244 1.431463 0.798052 1.519512 ... 0.240333 0.893834 0.278614 0.827952 0.310896 0.524080 0.498889 0.137738 0.139758 1.310194
753 0.621453 1.338610 0.364830 1.027499 0.549602 1.911238 0.900323 1.362479 0.927677 1.547515 ... 0.317526 1.122675 0.274742 0.918703 0.476393 0.868560 0.595230 0.388647 0.576357 1.742615
754 0.631091 1.581180 0.415773 1.117232 0.664558 1.751241 0.992670 1.290885 0.875214 1.472359 ... 0.187152 1.412468 0.207008 0.598717 0.453975 1.229169 0.566903 0.484697 0.628965 2.510765
755 0.579979 1.517567 0.568194 1.777840 0.885476 2.144906 1.175267 1.296606 0.872383 1.470840 ... 0.029407 2.069379 0.000875 0.705432 0.524404 1.541395 0.011621 0.053865 0.336642 2.875262
756 0.876100 1.505179 0.205132 0.575300 0.391039 1.604383 0.700477 1.289552 0.893184 1.468811 ... 0.205205 1.224224 0.272118 0.596912 0.309453 1.056667 0.660946 0.393194 0.458485 2.197579

757 rows × 6144 columns

코디 벡터 군집화

  • 모든 코디에 대해 비교를 하면 시간이 오래걸리므로 K-Means Clustering으로 우선 코디를 군집화 한 후 해당 군집 내에서 추천
NUM_CLUSTERS = 5
km = KMeans(n_clusters=NUM_CLUSTERS)
km.fit(codi_df)
KMeans(n_clusters=5)
codi_df['cluster'] = km.labels_
codi_df
0 1 2 3 4 5 6 7 8 9 ... 6135 6136 6137 6138 6139 6140 6141 6142 6143 cluster
0 0.695721 1.710319 0.204196 0.881802 0.453413 1.775246 0.829325 1.248698 0.956496 1.579597 ... 1.308452 0.258963 0.399076 0.315414 1.369288 0.589988 0.476239 0.479393 2.835139 2
1 0.876267 1.528911 0.315072 1.045046 0.740249 1.676060 0.960913 1.214223 0.871376 1.377908 ... 1.435217 0.146900 0.954991 0.401580 1.165464 0.126094 0.195851 0.437154 2.310848 2
2 0.971387 1.932212 0.041615 0.248952 0.278344 1.253941 0.551203 1.153246 1.013609 1.438511 ... 1.860958 1.197395 0.515104 1.430135 1.842285 1.584267 1.794690 2.662927 1.883419 0
3 0.736790 1.654709 0.264454 0.555555 0.446634 1.580580 0.716179 1.260359 0.979936 1.504789 ... 1.686849 0.065535 0.801654 0.468268 1.322552 0.125541 0.057522 0.200236 2.640122 1
4 0.465451 1.772036 0.270101 0.790482 0.370361 1.520643 0.789837 1.279298 0.996089 1.471714 ... 1.701782 0.732446 0.496466 1.017152 1.756627 0.933950 1.252143 2.046726 2.612151 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
752 0.539420 1.132082 0.559330 1.785141 0.866360 2.124033 1.267244 1.431463 0.798052 1.519512 ... 0.893834 0.278614 0.827952 0.310896 0.524080 0.498889 0.137738 0.139758 1.310194 3
753 0.621453 1.338610 0.364830 1.027499 0.549602 1.911238 0.900323 1.362479 0.927677 1.547515 ... 1.122675 0.274742 0.918703 0.476393 0.868560 0.595230 0.388647 0.576357 1.742615 2
754 0.631091 1.581180 0.415773 1.117232 0.664558 1.751241 0.992670 1.290885 0.875214 1.472359 ... 1.412468 0.207008 0.598717 0.453975 1.229169 0.566903 0.484697 0.628965 2.510765 4
755 0.579979 1.517567 0.568194 1.777840 0.885476 2.144906 1.175267 1.296606 0.872383 1.470840 ... 2.069379 0.000875 0.705432 0.524404 1.541395 0.011621 0.053865 0.336642 2.875262 3
756 0.876100 1.505179 0.205132 0.575300 0.391039 1.604383 0.700477 1.289552 0.893184 1.468811 ... 1.224224 0.272118 0.596912 0.309453 1.056667 0.660946 0.393194 0.458485 2.197579 2

757 rows × 6145 columns

codi_df['cluster'].value_counts()
4    241
1    157
2    153
3    141
0     65
Name: cluster, dtype: int64

t-SNE 차원 축소

tsne_codi = TSNE(n_components=2, verbose=1, perplexity=40, n_iter=300)
tsne_results_codi = tsne_codi.fit_transform(codi_arr)
[t-SNE] Computing 121 nearest neighbors...
[t-SNE] Indexed 757 samples in 0.007s...
[t-SNE] Computed neighbors for 757 samples in 3.028s...
[t-SNE] Computed conditional probabilities for sample 757 / 757
[t-SNE] Mean sigma: 7.224551
[t-SNE] KL divergence after 250 iterations with early exaggeration: 66.773010
[t-SNE] KL divergence after 300 iterations: 1.223199
tsne_results_codi.shape
(757, 2)

features 데이터프레임 만들기

codi_df['tsne-2d-one'] = tsne_results_codi[:, 0]
codi_df['tsne-2d-two'] = tsne_results_codi[:, 1]
codi_df
0 1 2 3 4 5 6 7 8 9 ... 6137 6138 6139 6140 6141 6142 6143 cluster tsne-2d-one tsne-2d-two
0 0.695721 1.710319 0.204196 0.881802 0.453413 1.775246 0.829325 1.248698 0.956496 1.579597 ... 0.399076 0.315414 1.369288 0.589988 0.476239 0.479393 2.835139 2 8.580887 -11.019665
1 0.876267 1.528911 0.315072 1.045046 0.740249 1.676060 0.960913 1.214223 0.871376 1.377908 ... 0.954991 0.401580 1.165464 0.126094 0.195851 0.437154 2.310848 2 5.357522 -12.869209
2 0.971387 1.932212 0.041615 0.248952 0.278344 1.253941 0.551203 1.153246 1.013609 1.438511 ... 0.515104 1.430135 1.842285 1.584267 1.794690 2.662927 1.883419 0 -15.190707 7.228960
3 0.736790 1.654709 0.264454 0.555555 0.446634 1.580580 0.716179 1.260359 0.979936 1.504789 ... 0.801654 0.468268 1.322552 0.125541 0.057522 0.200236 2.640122 1 -3.320311 -0.505118
4 0.465451 1.772036 0.270101 0.790482 0.370361 1.520643 0.789837 1.279298 0.996089 1.471714 ... 0.496466 1.017152 1.756627 0.933950 1.252143 2.046726 2.612151 0 -14.348054 8.096408
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
752 0.539420 1.132082 0.559330 1.785141 0.866360 2.124033 1.267244 1.431463 0.798052 1.519512 ... 0.827952 0.310896 0.524080 0.498889 0.137738 0.139758 1.310194 3 -2.625929 -12.819452
753 0.621453 1.338610 0.364830 1.027499 0.549602 1.911238 0.900323 1.362479 0.927677 1.547515 ... 0.918703 0.476393 0.868560 0.595230 0.388647 0.576357 1.742615 2 3.646783 -10.591810
754 0.631091 1.581180 0.415773 1.117232 0.664558 1.751241 0.992670 1.290885 0.875214 1.472359 ... 0.598717 0.453975 1.229169 0.566903 0.484697 0.628965 2.510765 4 0.752487 -2.758420
755 0.579979 1.517567 0.568194 1.777840 0.885476 2.144906 1.175267 1.296606 0.872383 1.470840 ... 0.705432 0.524404 1.541395 0.011621 0.053865 0.336642 2.875262 3 -8.947572 1.171672
756 0.876100 1.505179 0.205132 0.575300 0.391039 1.604383 0.700477 1.289552 0.893184 1.468811 ... 0.596912 0.309453 1.056667 0.660946 0.393194 0.458485 2.197579 2 7.300136 -10.798800

757 rows × 6147 columns

t-SNE 시각화

plt.figure(figsize=(16, 10))
sns.scatterplot(
    x='tsne-2d-one',
    y='tsne-2d-two',
    hue='cluster',
    palette=sns.color_palette('hls', 5),
    data=codi_df,
    legend='full',
    alpha=0.3
)
<AxesSubplot:xlabel='tsne-2d-one', ylabel='tsne-2d-two'>

png

Cosine Similarity를 이용한 특정 코디와 가장 유사한 코디 추천

# 유사도 계산 함수(Cosine Similarity)
def cos_sim(A, B):
    return np.dot(A, B) / (norm(A) * norm(B))
# 코디 추천 함수
def rec_codi(idx):
    # 군집 번호 추출
    cls = codi_df.loc[idx, 'cluster']
    # 같은 코디 군집 데이터프레임 만들기
    same_df = codi_df[codi_df['cluster'] == cls]
    
    # 결과 담을 리스트
    result = []
    for i in same_df.index:
        if i == idx:
            continue
        else:
            result.append((i, cos_sim(same_df.loc[idx][:6144], same_df.loc[i][:6144])))
    
    # 최종 추천 코디 인덱스 리스트 만들기
    final = sorted(result, key=lambda x: x[1])[0][0]
    
    # 이미지 출력
    rows = 3
    cols = 2
    axes = []
    fig = plt.figure(figsize=(10, 10))
    
    for a, b in zip(range(1, 7), ['Top', 'Top', 'Bottom', 'Bottom', 'Shoes', 'Shoes']):
        axes.append(fig.add_subplot(rows, cols, a))
        if a % 2 == 1:
            title_str = 'Original'
            final_idx = idx
        else:
            title_str = 'Recommended'
            final_idx = final
        
        subplot_title = title_str + ' ' + b
        axes[-1].set_title(subplot_title)
        
        if a in [1, 2]:
            plt.imshow(x_top[final_idx])
        elif a in [3, 4]:
            plt.imshow(x_bottom[final_idx])
        else:
            plt.imshow(x_shoes[final_idx])
            
    fig.tight_layout()
    plt.show()
rec_codi(298)

png

rec_codi(555)

png

6. 해시태그를 이용한 코디 추천

len(hashtags.values())
4494

Word2Vec 모델 생성

wv_model = Word2Vec(sentences=list(hashtags.values()), vector_size=100, window=5, min_count=1, workers=8)
wv_model.wv.vectors.shape
(1462, 100)

샘플 살펴보기

wv_model.wv.most_similar('꾸안꾸')
[('일상', 0.9993165135383606),
 ('일상적', 0.9991195797920227),
 ('간단', 0.9990447163581848),
 ('로퍼', 0.9988987445831299),
 ('깔끔', 0.9988948702812195),
 ('일상룩', 0.9988282918930054),
 ('체크스커트', 0.998749852180481),
 ('편안함', 0.9987215995788574),
 ('에코백', 0.9986320734024048),
 ('간편하게', 0.998620867729187)]
wv_model.wv.most_similar('섹시')
[('블랙코디', 0.9990940690040588),
 ('黑色', 0.9988331198692322),
 ('에코백', 0.9987908601760864),
 ('Black', 0.998751699924469),
 ('미니스커트', 0.9987427592277527),
 ('체인숄더백', 0.9987362623214722),
 ('베레모', 0.9986872673034668),
 ('프린팅티셔츠', 0.9986580610275269),
 ('레이스업부츠', 0.998623788356781),
 ('슬리브리스탑', 0.9986011981964111)]

eigenvalue 계산 식

inv = np.linalg.inv  # 역행렬
mm = np.matmul  # 행렬곱
def fit_std_eigenvalue_method(a, b):
    """
    Fits CCA parameters using the standard eigenvalue problem
    
    param Xa: Observation with shape (n_samps, p_dim)
    param Xb: Observation with shape (n_samps, q_dim)
    return: Linear transformation Wa and Wb
    """
    Xa = copy.deepcopy(a)
    Xb = copy.deepcopy(b)
    N, p = Xa.shape
    N, q = Xb.shape
    r = min(p, q)
    
    Xa -= Xa.mean(axis=0)
    Xa /= Xa.std(axis=0)
    Xb -= Xb.mean(axis=0)
    Xb /= Xb.std(axis=0)
    
    p = Xa.shape[1]
    C = np.cov(Xa.T, Xb.T)
    Caa = C[:p, :p]
    Cbb = C[p:, p:]
    Cab = C[:p, p:]
    Cba = C[p:, :p]
    
    # Either branch results in r * r matrix where r = min(p, q)
    if q < p:
        M = mm(mm(inv(Cbb), Cba), mm(inv(Caa), Cab))
    else:
        M = mm(mm(inv(Caa), Cab), mm(inv(Cbb), Cba))
        
    # Solving the characteristic equation
    #
    #     det(M - rho^2 I) = 0
    # 
    # is equivalent to solving for rho^2, which are the eigenvalues of the matrix
    eigvals, eigvecs = np.linalg.eig(M)
    rhos = np.sqrt(eigvals)
    
    # Ensure we go through eigenvectors in descending order
    inds = (-rhos).argsort()
    rhos = rhos[inds]
    eigvals = eigvals[inds]
    
    # NumPy returns each eigenvector as a column in a matrix
    eigvecs = eigvecs.T[inds].T
    Wb = eigvecs
    Wa = np.zeros((p, r))
    for i, (rho, wb_i) in enumerate(zip(rhos, Wb.T)):
        wa_i = mm(mm(inv(Caa), Cab), wb_i) / rho
        Wa[:, i] = wa_i
    
    return Wa, Wb

코디 벡터

codi_arr.shape
(757, 6144)

해시태그 벡터

hash_idx = []
for i, codi in enumerate(codi_list):
    if i in except_list:
        continue
    hash_idx.append(int(codi.split('_')[1]))
hash_idx
[1,
 2,
 3,
 5,
 6,
 8,
 9,
 10,
 13,
 15,
 20,
 23,
 25,
 26,
 27,
 28,
 29,
 30,
 31,
 34,
 36,
 37,
 38,
 41,
 42,
 43,
 45,
 46,
 48,
 49,
 51,
 54,
 58,
 63,
 67,
 69,
 70,
 73,
 86,
 92,
 95,
 98,
 99,
 101,
 102,
 109,
 110,
 111,
 114,
 116,
 119,
 127,
 143,
 144,
 146,
 165,
 186,
 214,
 228,
 231,
 235,
 239,
 256,
 257,
 261,
 266,
 271,
 283,
 289,
 291,
 294,
 302,
 304,
 305,
 310,
 318,
 321,
 323,
 329,
 332,
 338,
 344,
 345,
 346,
 350,
 355,
 356,
 378,
 379,
 380,
 389,
 391,
 396,
 402,
 418,
 431,
 434,
 437,
 449,
 456,
 513,
 515,
 530,
 535,
 536,
 537,
 540,
 543,
 544,
 545,
 546,
 549,
 550,
 551,
 552,
 554,
 555,
 556,
 564,
 565,
 566,
 567,
 569,
 571,
 574,
 577,
 579,
 580,
 583,
 586,
 587,
 588,
 589,
 591,
 592,
 593,
 595,
 596,
 597,
 599,
 602,
 603,
 604,
 606,
 608,
 612,
 619,
 627,
 629,
 631,
 632,
 642,
 644,
 647,
 648,
 650,
 658,
 662,
 665,
 669,
 671,
 686,
 687,
 693,
 704,
 705,
 718,
 720,
 725,
 764,
 767,
 778,
 795,
 805,
 814,
 816,
 817,
 837,
 842,
 843,
 844,
 846,
 847,
 848,
 849,
 850,
 853,
 855,
 856,
 868,
 869,
 874,
 886,
 911,
 912,
 913,
 915,
 918,
 932,
 937,
 938,
 940,
 945,
 947,
 954,
 955,
 962,
 973,
 976,
 987,
 992,
 995,
 1007,
 1009,
 1015,
 1020,
 1021,
 1024,
 1029,
 1033,
 1057,
 1058,
 1059,
 1060,
 1061,
 1064,
 1065,
 1067,
 1068,
 1069,
 1070,
 1072,
 1073,
 1074,
 1075,
 1076,
 1077,
 1079,
 1081,
 1087,
 1088,
 1095,
 1098,
 1114,
 1121,
 1124,
 1127,
 1146,
 1149,
 1152,
 1167,
 1209,
 1242,
 1332,
 1336,
 1338,
 1346,
 1351,
 1361,
 1390,
 1405,
 1412,
 1434,
 1460,
 1478,
 1508,
 1554,
 1569,
 1575,
 1587,
 1589,
 1590,
 1598,
 1606,
 1608,
 1616,
 1622,
 1623,
 1624,
 1625,
 1637,
 1639,
 1641,
 1643,
 1644,
 1680,
 1718,
 1722,
 1754,
 1762,
 1767,
 1777,
 1779,
 1781,
 1787,
 1791,
 1792,
 1800,
 1804,
 1809,
 1822,
 1838,
 1840,
 1844,
 1849,
 1850,
 1851,
 1852,
 1853,
 1854,
 1856,
 1857,
 1858,
 1859,
 1860,
 1861,
 1862,
 1866,
 1869,
 1873,
 1874,
 1876,
 1877,
 1879,
 1880,
 1903,
 1905,
 1909,
 1910,
 1911,
 1912,
 1913,
 1916,
 1927,
 1931,
 1942,
 1957,
 1972,
 1979,
 1982,
 1985,
 1991,
 2003,
 2113,
 2115,
 2117,
 2118,
 2119,
 2120,
 2122,
 2125,
 2126,
 2130,
 2132,
 2145,
 2148,
 2240,
 2244,
 2377,
 2378,
 2393,
 2413,
 2423,
 2424,
 2427,
 2432,
 2449,
 2463,
 2467,
 2470,
 2477,
 2489,
 2497,
 2499,
 2506,
 2518,
 2534,
 2542,
 2549,
 2551,
 2562,
 2564,
 2582,
 2584,
 2604,
 2611,
 2612,
 2613,
 2614,
 2615,
 2619,
 2620,
 2621,
 2622,
 2623,
 2624,
 2626,
 2631,
 2632,
 2633,
 2634,
 2636,
 2637,
 2638,
 2639,
 2640,
 2644,
 2645,
 2646,
 2647,
 2648,
 2649,
 2650,
 2651,
 2652,
 2654,
 2655,
 2658,
 2659,
 2661,
 2663,
 2665,
 2670,
 2671,
 2674,
 2675,
 2676,
 2681,
 2684,
 2686,
 2688,
 2695,
 2699,
 2701,
 2702,
 2703,
 2710,
 2711,
 2713,
 2718,
 2719,
 2738,
 2739,
 2743,
 2746,
 2748,
 2749,
 2754,
 2762,
 2769,
 2788,
 2789,
 2790,
 2792,
 2795,
 2808,
 2816,
 2821,
 2838,
 2850,
 2875,
 2876,
 2878,
 2882,
 2883,
 2886,
 2887,
 2888,
 2890,
 2891,
 2893,
 2895,
 2896,
 2899,
 2902,
 2903,
 2904,
 2908,
 2912,
 2913,
 2919,
 2922,
 2926,
 2927,
 2929,
 2930,
 2931,
 2932,
 2933,
 2934,
 2937,
 2938,
 2939,
 2941,
 2942,
 2944,
 2949,
 2950,
 2951,
 2953,
 2954,
 2955,
 2956,
 2957,
 2960,
 2961,
 2962,
 2966,
 2969,
 2971,
 2973,
 2974,
 2976,
 2981,
 2986,
 2987,
 3000,
 3026,
 3031,
 3040,
 3045,
 3050,
 3054,
 3057,
 3061,
 3062,
 3135,
 3141,
 3148,
 3149,
 3151,
 3152,
 3154,
 3155,
 3156,
 3157,
 3158,
 3161,
 3162,
 3164,
 3165,
 3166,
 3169,
 3176,
 3177,
 3178,
 3179,
 3180,
 3182,
 3188,
 3190,
 3191,
 3193,
 3195,
 3197,
 3198,
 3200,
 3201,
 3203,
 3206,
 3207,
 3208,
 3209,
 3210,
 3211,
 3214,
 3217,
 3218,
 3220,
 3221,
 3222,
 3224,
 3229,
 3230,
 3235,
 3236,
 3237,
 3239,
 3240,
 3242,
 3244,
 3245,
 3246,
 3248,
 3252,
 3253,
 3255,
 3256,
 3258,
 3260,
 3265,
 3266,
 3270,
 3273,
 3275,
 3276,
 3278,
 3279,
 3281,
 3284,
 3285,
 3286,
 3291,
 3292,
 3295,
 3299,
 3301,
 3306,
 3307,
 3319,
 3320,
 3321,
 3326,
 3330,
 3334,
 3335,
 3338,
 3339,
 3347,
 3348,
 3349,
 3351,
 3357,
 3361,
 3366,
 3371,
 3373,
 3376,
 3383,
 3387,
 3389,
 3401,
 3403,
 3406,
 3409,
 3413,
 3417,
 3418,
 3419,
 3449,
 3463,
 3470,
 3475,
 3477,
 3478,
 3483,
 3484,
 3494,
 3495,
 3505,
 3509,
 3512,
 3516,
 3538,
 3542,
 3571,
 3587,
 3603,
 3605,
 3608,
 3621,
 3631,
 3632,
 3636,
 3639,
 3643,
 3648,
 3668,
 3677,
 3679,
 3685,
 3688,
 3689,
 3693,
 3701,
 3707,
 3708,
 3716,
 3717,
 3723,
 3726,
 3734,
 3751,
 3760,
 3763,
 3765,
 3770,
 3771,
 3774,
 3775,
 3779,
 3787,
 3802,
 3809,
 3812,
 3813,
 3816,
 3818,
 3820,
 3824,
 3840,
 3842,
 3843,
 3847,
 3857,
 3876,
 3925,
 4029,
 4049,
 4052,
 4055,
 4059,
 4074,
 4080,
 4084,
 4090,
 4095,
 4107,
 4124,
 4133,
 4134,
 4144,
 4149,
 4207,
 4219,
 4227,
 4249,
 4256,
 4276,
 4280,
 4295,
 4304,
 4305,
 4308,
 4309,
 4310,
 4313,
 4322,
 4325,
 4334,
 4336,
 4339,
 4342,
 4347,
 4348,
 4351,
 4358,
 4377,
 4378,
 4380,
 4385,
 4390,
 4405,
 4417,
 4430,
 4454,
 4476]
hash_arr = np.zeros((len(hash_idx), 100), dtype=np.float32)
cnt = 0

for k, v in hashtags.items():
    if k not in hash_idx:
        continue
    else:
        # 한 코디에 해당하는 여러 개의 해시 태그에 대해 벡터로 변환
        temp = np.zeros((100,), dtype=np.float32)
        # 해시 태그가 없는 경우 미세한 값으로 조정
        if len(v) == 0:
            for i in range(temp.shape[0]):
                temp[i] = np.abs(np.random.normal(0, 0.01, 1))
        else:
            # 해시 태그 벡터가 여러 개 있는 경우 평균을 냄
            for item in v:
                temp += wv_model.wv.get_vector(item)
            temp /= len(v)
        
        # 전체 해시 태그 벡터에 삽입
        hash_arr[cnt, :] = temp
        cnt += 1

코디 벡터와 해시 태그 벡터 확인

print(codi_arr.shape, hash_arr.shape)
(757, 6144) (757, 100)

0의 값을 가진 경우 계산을 위해 미세한 값으로 조정

for i in range(codi_arr.shape[0]):
    for j in range(codi_arr.shape[1]):
        if codi_arr[i, j] == 0:
            codi_arr[i, j] = np.abs(np.random.normal(0, 0.01, 1))

eigenvalue 계산

W_codi, W_hash = fit_std_eigenvalue_method(codi_arr, hash_arr)
/home/2918895/.virtualenvs/venv/lib/python3.6/site-packages/ipykernel_launcher.py:52: ComplexWarning: Casting complex values to real discards the imaginary part
print(W_codi.shape, W_hash.shape)
(6144, 100) (100, 100)
codi_wcodi = np.dot(codi_arr, W_codi)
hash_whash = np.dot(hash_arr, W_hash)

hash_whash의 계산은 복소수로 되어있어 실수로 변환

hash_whash = hash_whash.real
print(codi_wcodi.shape, hash_whash.shape)
(757, 100) (757, 100)

해시태그와 코디 유사도 비교하여 추천

해시태그 dictionary 정리

hash_dic = dict()
c = 0
for k, v in hashtags.items():
    if k in hash_idx:
        hash_dic[c] = v
        c += 1
hash_dic
{0: ['심플코디', '2021autunm', '코디', '무채색'],
 1: ['깔끔', '편안함', '간단', '캐주얼룩'],
 2: ['데일리룩', '간단', '간편', '가을코디', '깔끔'],
 3: ['20대룩', '심플'],
 4: ['누구나', '가을', '심플룩', '간편하게', '20대코디', '대학생', '자켓'],
 5: ['일상', '20대코디', '프레피룩'],
 6: ['깔끔', '모던시크', '간단'],
 7: ['가을코디', '베이직', '심플', '깔끔'],
 8: ['모던', '프레피룩', '블랙'],
 9: ['일상', '데일리', '일상적', '캐주얼'],
 10: ['간편', '20대코디', '가을데일리', '오오티디'],
 11: ['데일리룩', '심플코디', '예쁨', '자켓', '팬츠'],
 12: ['20대', '스커트', '데일리룩'],
 13: ['팬츠', '시크룩', '일상', '20대코디', '프레피룩'],
 14: ['20대코디', '프레피룩', '브라운코디', '아가일패턴'],
 15: ['20대코디', '가을', '프레피룩'],
 16: ['블랙', '블라우스', '프레피룩', '20대룩', '러블리'],
 17: ['예쁨', '가을', '20대', '프레피룩'],
 18: ['프레피룩', '가을', '브라운코디', '데일리'],
 19: ['니트', '20대룩', '간편'],
 20: ['오오티디', '가을', '플렛', '20대코디', '프레피룩'],
 21: ['20대룩', '가을', '프레피룩', '깔끔단정'],
 22: ['20대룩', '프레피룩', '오피스룩'],
 23: ['모던', '프레피룩', '러블리룩', '오오티디'],
 24: ['브라운코디', '프레피룩', '유니크룩'],
 25: ['시크', '모던시크', '프레피룩'],
 26: ['숄더백', '20대코디', '일상'],
 27: ['여름', '꾸안꾸', '심플룩', '캐주얼', '간편하게'],
 28: ['20대코디', '편안함', '꾸안꾸', '가디건'],
 29: ['20대코디', '베이직', '일상룩', '심플룩'],
 30: ['꾸안꾸', '일상룩', '심플', '20대코디'],
 31: ['블랙', '모노톤', '유니크룩', '깅엄체크'],
 32: ['일상적', '코디', '20대코디', '미리가을'],
 33: ['간편', '일상', '20대', '오오티디', '심플코디'],
 34: ['코디', '예쁨', '심플', '20대'],
 35: ['심플룩', '대학생', '코디', '아가일베스트', '오오티디'],
 36: ['사진', '간단', '예쁨', '오피스룩', '20대코디'],
 37: ['데이트', '깔끔단정', '이쁨', '20대코디'],
 38: ['예쁨', '샌들&슬리퍼', '깔끔', '심플룩', '데일리', '20대코디'],
 39: ['심플', '20대코디', '가디건&베스트'],
 40: ['패션', '20대코디', '여름코디', '네크리스'],
 41: ['간편', '여름룩', '심플'],
 42: ['간편하게', '심플룩', '모노톤'],
 43: ['오오티디', '간단', '20대코디'],
 44: ['오오티디', '슬리브리스', '시크룩', '블랙', '편안함'],
 45: ['심플코디', '누구나', '일상적', '패션'],
 46: ['깔끔', '20대코디', '모던'],
 47: ['시원', '스트라이프', '심플'],
 48: ['간편하게', '20대코디', '여름'],
 49: ['코디', '20대코디', '유니크', '여름'],
 50: ['데이트', '편하게', '20대코디'],
 51: ['오오티디', '20대', '패션', '심플룩'],
 52: ['모던', '20대코디', '심플코디'],
 53: ['일상룩', '심플코디', '일상적', '캐주얼룩'],
 54: ['데이트룩', '일상적', '20대코디'],
 55: ['심플룩', '여름코디', '베이직'],
 56: ['베이직', '코디', '20대룩', '간편하게'],
 57: ['데일리', '일상', '모던'],
 58: ['20대코디', '간편', '블랙&화이트'],
 59: ['20대코디', '네크리스', '캐주얼', '20대코디'],
 60: ['데이트', '대학생룩', '20대코디', '러블리룩'],
 61: ['간편', '캐주얼', '예쁨'],
 62: ['20대코디', '깔끔단정'],
 63: ['20대', '편안함', '20대코디'],
 64: ['20대', '데님', '심플룩'],
 65: ['牛仔褲', '腰帶', '球鞋/布鞋'],
 66: ['襪子', '戒指', '針織衫'],
 67: ['牛仔褲', '針織衫', '腰帶'],
 68: ['無袖', '裙子', '針織外套'],
 69: ['長褲', '無袖', '襯衫'],
 70: ['長袖上衣', '裙子', '針織外套'],
 71: ['夾克', '針織衫', '裙子'],
 72: ['針織外套', '牛仔褲', '腰帶'],
 73: ['長袖上衣', '長褲', '靴子'],
 74: ['無袖', '裙子', '針織外套'],
 75: ['針織外套', '托特包', '無袖'],
 76: ['裙子', '針織衫', '夾克'],
 77: ['photo', '裙子', '短袖上衣'],
 78: ['大衣', '針織衫', '裙子'],
 79: ['長褲', '短袖上衣', '針織外套'],
 80: ['項鍊', '長袖上衣', '短褲'],
 81: ['大衣', '無袖', '牛仔褲'],
 82: ['photo', '針織衫', '裙子'],
 83: ['牛仔褲', '針織衫', '球鞋/布鞋'],
 84: ['photo', '短袖上衣', '短褲'],
 85: ['襯衫', '無袖', '牛仔褲'],
 86: ['襯衫', '牛仔褲', '及膝洋裝'],
 87: ['戒指', '無袖', '針織外套'],
 88: ['涼鞋', 'photo', '針織外套'],
 89: ['photo', '針織衫', '牛仔褲'],
 90: ['背包', '打底褲', '短袖上衣'],
 91: ['短袖上衣', '短褲', '涼鞋'],
 92: ['靴子', '針織外套', '針織衫'],
 93: ['牛仔褲', '無袖', '針織外套'],
 94: ['長褲', '針織外套', '球鞋/布鞋'],
 95: ['photo', '襯衫', '無袖'],
 96: ['短袖上衣', '裙子', '耳環'],
 97: ['長袖上衣', '牛仔褲', '球鞋/布鞋'],
 98: ['無袖', '夾克', '短褲'],
 99: ['牛仔褲', '短袖上衣', '涼鞋'],
 100: ['裙子', '長袖上衣', '靴子'],
 101: ['長袖上衣', '長褲', '夾克外套'],
 102: ['대학생'],
 103: ['10~20대'],
 104: ['데님'],
 105: ['슬리브리스'],
 106: ['자켓', '슬리브리스', '스커트'],
 107: ['오오티디', '오오티디'],
 108: ['가을코디'],
 109: ['牛仔褲'],
 110: ['스커트'],
 111: ['10대~20대'],
 112: ['デニムパンツ'],
 113: ['반팔티'],
 114: ['가디건&베스트', '슬리브리스', '스커트'],
 115: ['양말'],
 116: ['배경'],
 117: ['無袖'],
 118: ['캐주얼'],
 119: ['대학생룩'],
 120: ['短褲'],
 121: ['여름코디'],
 122: ['대학생'],
 123: ['편안함'],
 124: ['토트백'],
 125: ['러블리', '러블리'],
 126: ['スカート'],
 127: ['롱원피스'],
 128: ['깔끔'],
 129: ['短裙'],
 130: ['간단'],
 131: ['심플코디'],
 132: ['스커트'],
 133: ['캐주얼'],
 134: ['슬리브리스'],
 135: ['스니커즈'],
 136: ['이어링'],
 137: ['귀여운'],
 138: ['꾸안꾸'],
 139: ['깔끔'],
 140: ['캔버스백&에코백'],
 141: ['블랙'],
 142: ['캐쥬얼룩'],
 143: ['일상룩'],
 144: ['데이트'],
 145: ['폰케이스'],
 146: ['パンツ'],
 147: ['봄코디'],
 148: ['일상룩'],
 149: ['심플코디'],
 150: ['러블리룩'],
 151: ['반바지'],
 152: ['10대~20대'],
 153: ['집업&점퍼'],
 154: ['短褲'],
 155: ['스커트'],
 156: ['플렛'],
 157: ['사랑스러운'],
 158: ['심플룩'],
 159: ['사랑스러운'],
 160: ['10~20대'],
 161: ['여성스러운'],
 162: ['시크'],
 163: ['모던시크'],
 164: ['팬츠'],
 165: ['팬츠'],
 166: ['심플베이직'],
 167: ['깔끔단정'],
 168: ['러블리룩'],
 169: ['간단'],
 170: ['꾸안꾸'],
 171: ['데일리룩'],
 172: ['데님', '니트', '스니커즈'],
 173: ['슬리브리스', '데님', '가디건&베스트'],
 174: ['반팔티', '데님', '자켓'],
 175: ['팬츠', '니트', '스니커즈'],
 176: ['슬리브리스', '팬츠', '블라우스'],
 177: ['반팔티', '스커트', '자켓'],
 178: ['블라우스', '팬츠', '로퍼'],
 179: ['긴팔티', '팬츠', '집업&점퍼'],
 180: ['블라우스', '데님', '로퍼'],
 181: ['로퍼', '블라우스', '데님'],
 182: ['토트백', '블라우스', '팬츠'],
 183: ['블라우스', '팬츠', '로퍼'],
 184: ['숄더백', '니트', '스커트'],
 185: ['반바지', '모자', '니트'],
 186: ['캔버스백&에코백', '긴팔티', '팬츠'],
 187: ['팬츠', '블라우스', '모자'],
 188: ['팬츠', '로퍼', '가디건&베스트'],
 189: ['니트', '스커트', '자켓'],
 190: ['반바지', '가디건&베스트', '숄더백'],
 191: ['스커트', '긴팔티', '부츠'],
 192: ['슬리브리스', '데님', '집업&점퍼'],
 193: ['슬리브리스', '반바지', '블라우스'],
 194: ['숄더백', '스커트', '긴팔티'],
 195: ['블라우스', '스커트', '자켓'],
 196: ['긴팔티', '스커트', '집업&점퍼'],
 197: ['니트', '데님', '로퍼'],
 198: ['니트', '숄더백', '반바지', '꾸안꾸'],
 199: ['슬리브리스', '데님', '가디건&베스트'],
 200: ['스커트', '니트', '숄더백'],
 201: ['데님', '니트', '로퍼'],
 202: ['데일리', '니트', '꾸안꾸', '간절기', 'simple', '스웨터', '청바지'],
 203: ['슬리브리스', '데님', '샌들&슬리퍼', '간절기니트'],
 204: ['팬츠', '블라우스', '로퍼'],
 205: ['office', '오피스룩'],
 206: ['office', '오피스룩'],
 207: ['가디건&베스트', '슬리브리스', '팬츠'],
 208: ['스커트', '반팔티', '숄더백'],
 209: ['팬츠', '블라우스', '숄더백'],
 210: ['반팔티', '가디건&베스트', '데님'],
 211: ['데님', '반팔티', '가디건&베스트'],
 212: ['슬리브리스', '팬츠', '집업&점퍼'],
 213: ['숄더백', '블라우스', '샌들&슬리퍼', '꾸안꾸'],
 214: ['스니커즈', '팬츠', '블라우스'],
 215: ['데님', '로퍼', '반팔티'],
 216: ['양말', '스커트', '가디건&베스트'],
 217: ['팬츠', '긴팔티', '스니커즈'],
 218: ['숄더백', '양말', '스커트', 'pleatedskirt', 'oliviarodrigo', '90s'],
 219: ['양말', '스커트', '반팔티', 'pleatedskirt'],
 220: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 221: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 222: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 223: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 224: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 225: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 226: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 227: ['2021',
  '가을',
  '프레피룩',
  '체크팬츠',
  '아가일패턴',
  '가디건',
  'Daily',
  'Ootd',
  '블랙&화이트'],
 228: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 229: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 230: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 231: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 232: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 233: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 234: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 235: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 236: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 237: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 238: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 239: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 240: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 241: ['2021', '룩', '여름', '발랄', 'Daily', 'OOTD'],
 242: ['2021', '룩', '여름', '발랄', 'Daily', 'OOTD'],
 243: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 244: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 245: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 246: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 247: ['2021', '봄', '야외활동', '룩', 'Daily', 'OOTD'],
 248: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 249: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 250: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 251: ['2021', '봄', '로맨틱', '그런지', 'Daily', 'OOTD'],
 252: ['2020', '하이틴', '데일리', 'Daily', 'OOTD'],
 253: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 254: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 255: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 256: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 257: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 258: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 259: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 260: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 261: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 262: ['오오티디',
  '데이트코디',
  '봄',
  '데일리코디',
  '2021여름',
  '여름',
  '2021summer',
  'Summercode',
  'Simplecode',
  'Highteens',
  'Casualcode',
  '夏',
  '夏コーデ',
  'summerlook'],
 263: ['데이리코디',
  '2021summer',
  'Summercode',
  '오오티디',
  '데이트코디',
  '여름',
  '2021여름',
  '여름룩',
  '여름코디',
  'Simplecode',
  '夏コーデ',
  '夏'],
 264: ['데이리코디',
  '2021summer',
  'Summercode',
  '오오티디',
  '데이트코디',
  '여름',
  '2021여름',
  '여름룩',
  '여름코디',
  'Simplecode',
  '夏コーデ',
  '夏'],
 265: ['데이리코디',
  '2021summer',
  'Summercode',
  '오오티디',
  '데이트코디',
  '여름',
  '2021여름',
  '여름룩',
  '여름코디',
  'Simplecode',
  '夏コーデ',
  '夏'],
 266: ['ootd',
  'Dailycode',
  '2021spring',
  "90'sfashion",
  'Springcode',
  'Simplecode',
  "90'slook",
  'summer',
  '여름코디',
  '여름룩',
  '여름',
  'Highteens',
  "90'sコーデ",
  '学生コーデ',
  '大学生',
  '学校コーデ',
  'casuallook',
  'カジュアル',
  '大人カジュアル',
  '오오티디',
  '봄',
  '데일리룩',
  '데일리코디',
  '데이트룩',
  '데이트코디',
  '캐주얼룩',
  '春コーデ',
  'シンプルコーデ'],
 267: ['ootd',
  'Dailycode',
  '2021spring',
  "90'sfashion",
  'Springcode',
  'Simplecode',
  "90'slook",
  'Highteens',
  "90'sコーデ",
  '学生コーデ',
  '大学生',
  '学校コーデ',
  'casuallook',
  'カジュアル',
  '大人カジュアル',
  '오오티디',
  '봄',
  '데일리룩',
  '데일리코디',
  '데이트룩',
  '데이트코디',
  '캐주얼룩',
  '春コーデ',
  'シンプルコーデ'],
 268: ['ootd',
  'Dailycode',
  '2021spring',
  "90'sfashion",
  'Springcode',
  'Simplecode',
  "90'slook",
  'Highteens',
  "90'sコーデ",
  '学生コーデ',
  '大学生',
  '学校コーデ',
  'casuallook',
  'カジュアル',
  '大人カジュアル',
  '오오티디',
  '봄',
  '데일리룩',
  '데일리코디',
  '데이트룩',
  '데이트코디',
  '캐주얼룩',
  '春コーデ',
  'シンプルコーデ'],
 269: [],
 270: [],
 271: [],
 272: [],
 273: [],
 274: [],
 275: [],
 276: [],
 277: [],
 278: [],
 279: [],
 280: [],
 281: [],
 282: [],
 283: [],
 284: [],
 285: [],
 286: [],
 287: [],
 288: [],
 289: [],
 290: [],
 291: [],
 292: [],
 293: [],
 294: [],
 295: [],
 296: [],
 297: [],
 298: [],
 299: [],
 300: [],
 301: [],
 302: [],
 303: [],
 304: ['가을코디',
  '가을데일리룩',
  '프레피룩',
  '스쿨룩',
  '니트코디',
  '화이트셔츠',
  '봄코디',
  '체크스커트',
  '베이지',
  '네이비',
  '블랙',
  '데이트룩'],
 305: ['가을코디',
  '가을데일리룩',
  '데이트룩',
  '프레피룩',
  '스쿨룩',
  '체크스커트',
  '니트조끼',
  '화이트셔츠',
  '흰운동화',
  '10대코디'],
 306: ['프레피룩',
  '스쿨룩',
  '하이틴룩',
  '가을코디',
  '가을데일리룩',
  '니트조끼',
  '체크스커트',
  '로퍼',
  '폰케이스',
  '봄코디'],
 307: ['가을데일리룩',
  '가을코디',
  '프레피룩',
  '봄코디',
  '니트조끼',
  '체크스커트',
  '로퍼',
  '베이지',
  '블루',
  '스쿨룩',
  '하이틴룩',
  '데이트룩',
  '10대코디'],
 308: ['가을데일리룩',
  '프레피룩',
  '니트코디',
  '가을코디',
  '봄코디',
  '체크스커트',
  '운동화',
  '그린',
  '브라운',
  '데이트룩',
  '스쿨룩',
  '하이틴룩',
  '10대코디'],
 309: ['가을데일리룩',
  '데이트룩',
  '프레피룩',
  '체크니트',
  '니트조끼',
  '플리츠스커트',
  '블랙',
  '그레이',
  '스쿨룩',
  '하이틴룩',
  '봄코디',
  '가을코디'],
 310: ['가을코디', '10대코디', '20대코디', '스쿨룩', '프레피룩', '데이트룩', '니트조끼', '체크스커트', '로퍼'],
 311: ['가을코디',
  '가디건코디',
  '아가일패턴',
  '니트',
  '베이지',
  '블랙',
  '화이트',
  '에코백',
  '10대코디',
  '20대코디',
  '데이트룩',
  '데일리룩',
  'ootd'],
 312: ['깔끔단정',
  '꾸안꾸',
  '데이트룩',
  '가디건코디',
  '가을코디',
  '봄코디',
  '데일리룩',
  '20대코디',
  '결혼식코디',
  '베이지',
  '브라운',
  '블랙',
  '러블리'],
 313: ['편안함',
  '봄',
  '20대코디',
  '블라우스',
  '가을코디',
  '데이트룩',
  '니트조끼',
  '화이트셔츠',
  '그린',
  '청치마',
  '운동화',
  '데일리룩',
  '10대코디',
  '하이틴룩'],
 314: ['가을코디',
  '20대코디',
  '데일리룩',
  '데이트룩',
  '유니크',
  '스트릿패션',
  '니트코디',
  '화이트셔츠',
  '아가일조끼',
  '링귀걸이',
  '블랙코디',
  '워커',
  '아이돌코디'],
 315: ['20대',
  '편안함',
  '일상',
  '20대코디',
  '10대코디',
  '하이틴룩',
  '가을코디',
  '스쿨룩',
  '니트코디',
  '화이트셔츠',
  '크롭셔츠',
  '브라우코디',
  '베이지',
  '블랙',
  '로퍼',
  '데이트코디',
  '가을데이트룩',
  '가을감성'],
 316: ['편안함',
  '봄',
  '일상',
  '20대코디',
  '10대코디',
  '하이틴룩',
  '스쿨룩',
  '니트코디',
  '레이어드룩',
  '화이트셔츠',
  '핑크',
  '체크스커트',
  '데이트룩',
  '아이돌코디',
  '러블리',
  '가을코디'],
 317: ['데일리룩',
  '여름코디',
  '가을코디',
  '산책룩',
  '대학생코디',
  '고등학생코디',
  '도서관룩',
  '캐주얼',
  '백팩',
  '점퍼',
  '반팔티',
  '블랙'],
 318: ['트윈룩',
  '20대트윈룩',
  '친구랑트윈룩',
  '초록색코디',
  '그린포인트',
  '데이트코디',
  '결혼식코디',
  '20대코디',
  '화이트',
  '여름코디',
  '친구랑커플룩'],
 319: ['트윈룩', '여름코디', '봄코디', '화이트블라우스', '핑크코디', '러블리', '블라우스코디'],
 320: ['아이돌룩',
  '아이돌코디',
  '여자아이돌',
  '트윈룩',
  '댄스룩',
  '댄스복',
  '댄스팀',
  '축제코디',
  '여름코디',
  '레드포인트',
  '레드코디',
  '화이트',
  '블랙',
  '캐주얼',
  '유니크'],
 321: ['아이돌룩',
  '아이돌코디',
  '여자아이돌',
  '댄스룩',
  '댄스복',
  '댄스팀',
  '유니크',
  '블랙앤옐로우',
  '블랙',
  '옐로우',
  '워커',
  '힐',
  '블랙미니원피스',
  '니트코디',
  '블랙코디',
  '축제'],
 322: ['아이돌룩',
  '아이돌코디',
  '블루코디',
  '화이트',
  '블루앤화이트',
  '포카리룩',
  '댄스룩',
  '댄스팀',
  '댄스복',
  '여자아이돌',
  '캐주얼',
  '여름코디',
  '축제코디'],
 323: ['아이돌룩',
  '아이돌코디',
  '여자아이돌',
  '댄스룩',
  '댄스복',
  '춤출때',
  '하늘색코디',
  '화이트',
  '러블리',
  '여름코디'],
 324: ['아이돌룩',
  '트윈룩',
  '댄스복',
  '댄스팀',
  '블랙코디',
  '레드코디',
  '블랙앤레드',
  '여자아이돌',
  '캐주얼',
  '유니크',
  '댄스룩'],
 325: ['오오티디',
  '10~20대',
  '캐주얼룩',
  '10대코디',
  '대학생룩',
  '반팔티셔츠',
  '연보라',
  '데님',
  '찢청',
  '백팩',
  '화이트',
  '피크닉',
  '여름코디',
  '편안한',
  '데일리룩',
  '도서관룩'],
 326: ['일상',
  '반팔티',
  '캐주얼룩',
  '테니스스커트',
  '스쿨룩',
  '하이틴룩',
  '여름코디',
  '옐로우',
  '포인트컬러',
  '스니커즈',
  '플라워귀걸이',
  '10대코디'],
 327: ['러블리룩',
  '데이트룩',
  '심플룩',
  '예쁨',
  '데일리룩',
  '봄코디',
  '여름코디',
  '가디건코디',
  '청치마',
  '슬리브리스',
  '화이트',
  '연보라',
  '블랙',
  '샌들',
  '10대코디',
  '20대코디',
  '캐주얼',
  '대학생코디'],
 328: ['캐주얼룩',
  '데일리룩',
  '에코백',
  '옐로우',
  '프린팅티셔츠',
  '연청바지',
  '청바지코디',
  '귀걸이',
  '부츠컷',
  '데님',
  '블랙',
  '여름코디',
  '10대코디',
  '20대코디',
  '여행룩',
  '피크닉',
  '편안함',
  '대학생코디'],
 329: ['캐주얼룩',
  '친구랑놀러갈때',
  '가디건코디',
  '연청',
  '청바지코디',
  '화이트',
  '그린',
  '블랙',
  '실버',
  '여름코디',
  '여름휴가',
  '바캉스',
  '피크닉',
  '물놀이',
  '여름데일리룩'],
 330: ['캐주얼룩',
  '친구랑놀러갈때',
  '사랑스러운',
  '토트백',
  '스니커즈',
  '크롭니트',
  '프릴니트',
  '아이보리',
  '베이지',
  '화이트',
  '반팔자켓',
  '여름코디',
  '피크닉',
  '여행룩',
  '청바지코디',
  '연청',
  '데일리룩',
  '데이트룩',
  '썸머룩'],
 331: ['캐주얼룩',
  '친구랑놀러갈때',
  '여름코디',
  '청치마',
  '연청',
  '반팔티',
  '프린팅티셔츠',
  '블랙',
  '데일리룩',
  '여름데일리룩',
  '대학생코디',
  '10대코디',
  '20대코디',
  '피크닉',
  '편안함'],
 332: ['20대룩',
  '블랙',
  '캐쥬얼',
  '스트릿패션',
  '유니크',
  '시크',
  '블랙코디',
  '워커',
  '타이다이',
  '크롭티',
  '미니스커트',
  '댄스룩',
  '섹시'],
 333: ['여름코디',
  '결혼식코디',
  '하객룩',
  '반팔자켓',
  '여름자켓',
  '그레이',
  '나시코디',
  '블랙',
  '화이트',
  '데일리룩',
  '데이트룩',
  '세미정장'],
 334: ['여름코디',
  '20대코디',
  '데이트룩',
  '결혼식코디',
  '하객룩',
  '클래식',
  '스카이블루',
  '하늘색코디',
  '여름코디'],
 335: ['봄코디',
  '여름코디',
  '초여름',
  '블라우스코디',
  '화이트블라우스',
  '청바지',
  '연청바지',
  '가디건코디',
  '차콜',
  '브이넥가디건',
  '샌들',
  '데일리룩',
  '데이트룩'],
 336: ['캐주얼',
  '봄코디',
  '반바지코디',
  '프린팅티셔츠',
  '스케이트보드룩',
  '스니커즈',
  '미니백',
  '블랙',
  '레드',
  '화이트',
  '연청반바지',
  '데일리룩',
  '여행룩',
  '운동룩'],
 337: ['데이트룩',
  '데일리룩',
  '봄코디',
  '가을코디',
  '블라우스',
  '블랙',
  '블랙&화이트',
  '자켓코디',
  '플리츠스커트',
  '그레이',
  '헤어밴드',
  '정장',
  '하객룩'],
 338: ['스커트',
  '일상',
  '가디건',
  '스카이블루',
  '체크스커트',
  '화이트',
  '데이트룩',
  '봄코디',
  '데일리룩',
  '여행',
  '특별한날',
  '꽃구경'],
 339: ['운동할때', '산책', '데일리룩', '트레이닝복', '레깅스', '슬리브리스탑', '후드집업', '봄'],
 340: ['학생룩',
  '데이트코디',
  '하이틴룩',
  '러블리룩',
  '일상',
  '데일리룩',
  '대학생룩',
  '청치마코디',
  '니트조끼',
  '스트라이프셔츠',
  '로퍼',
  '블랙',
  '10대코디',
  '20대코디',
  '여행룩',
  '데이트룩',
  '봄코디',
  '가을코디'],
 341: ['학생',
  '대학생',
  '편안함',
  '오오티디',
  '스니커즈',
  '일상',
  '캐주얼룩',
  '스포티룩',
  '캡모자',
  '반바지코디',
  '블랙&화이트',
  '봄코디',
  '여름코디',
  '데일리룩',
  '운동',
  '산책'],
 342: ['봄데이트룩',
  '데이트룩',
  '편하게',
  '데일리룩',
  '20대코디',
  '30대코디',
  '부츠컷팬츠',
  '데님',
  '니트뷔스티에',
  '레이어드룩',
  '화이트셔츠',
  '루즈핏셔츠',
  '트렌치코트',
  '롱코트',
  '로퍼',
  '베이지',
  '화이트',
  '블랙',
  '시계',
  '하트귀걸이',
  '캠퍼스룩',
  '대학생코디',
  '직장인패션'],
 343: ['심플', '데일리', '가을준비'],
 344: ['심플', '데일리'],
 345: ['모던', '심플', '데일리'],
 346: ['심플', '데일리'],
 347: ['심플', '데일리'],
 348: ['심플룩', '캐주얼룩'],
 349: ['캐주얼룩', '데일리'],
 350: ['오오티디', '심플', '데일리'],
 351: ['캐주얼룩', '심플룩'],
 352: ['시크', '데일리', '여름'],
 353: ['심플룩', '캐주얼룩'],
 354: ['간편', '데일리룩'],
 355: ['꾸안꾸', '캐쥬얼'],
 356: ['오오티디'],
 357: ['편안함', '데이트룩'],
 358: ['ootd',
  'Simplecode',
  'Dailycode',
  '韓国ファッション',
  'デイリーコーデ',
  '秋コーデ',
  '2021autumn',
  'シンプルベーシック',
  'カジュアルファッション',
  'カーディガン',
  'デニムスカート',
  'スニーカー',
  'ブルーコーデ'],
 359: ['ootd',
  'Simplecode',
  '2021autumn',
  'Dailycode',
  'シンプルベーシック',
  '秋コーデ',
  'モノトーンコーデ',
  'ストリートファッション',
  'デイリーコーデ',
  'ユニセックス',
  'ニットベスト',
  '半袖tシャツ',
  'ニット帽',
  'デニム',
  'スニーカー',
  '腕時計'],
 360: ['ootd',
  '2021summer',
  'Simplecode',
  'Dailycode',
  '韓国ファッション',
  'ストリートファッション',
  'デイリーコーデ',
  'シンプルベーシック',
  'モノトーンコーデ',
  'デニムパンツ',
  'サンダル',
  '夏コーデ',
  '大人コーデ'],
 361: ['ootd',
  'Simplecode',
  '2021summer',
  'Dailycode',
  'Monotonecode',
  '韓国ファッション',
  '夏コーデ',
  'ストリートファッション',
  'デイリーコーデ',
  'モノトーンコーデ',
  'シンプルベーシック',
  'カーディガン',
  'キャミソール',
  'サンダル'],
 362: ['ootd',
  '2021summer',
  'Simplecode',
  'Dailycode',
  '오오티디',
  '夏コーデ',
  'カジュアルコーデ',
  'シンプルベーシック',
  '韓国ファッション'],
 363: ['ootd',
  '2021summer',
  'Simplecode',
  'Dailycode',
  '오오티디',
  '韓国ファッション',
  'シンプルベーシック',
  'モノトーンコーデ',
  'ネイビー',
  'カジュアルコーデ',
  'ストリートコーデ',
  'ニットベスト',
  'スニーカー'],
 364: ['ootd',
  '2021summer',
  'Simplecode',
  '오오티디',
  '韓国ファッション',
  '大人カジュアル',
  'シンプルベーシック',
  'シャツ',
  'スラックス'],
 365: ['ootd',
  'Simplecode',
  '2021summer',
  'シンプルコーデ',
  'モノトーンコーデ',
  '大人コーデ',
  'カーディガン',
  'スラックス',
  '韓国ファッション'],
 366: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  '春夏コーデ',
  'デイリーファッション',
  '大人カジュアル',
  'シンプルベーシック',
  'ストリートファッション',
  'モノトーンコーデ',
  '韓国ファッション',
  'デニム',
  'ニットベスト',
  'サンダル'],
 367: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  '大人カジュアル',
  'シンプルベーシック',
  'お出かけコーデ',
  '韓国ファッション',
  'ニット',
  'デニム',
  'スニーカー'],
 368: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  '韓国ファッション',
  'デイリーファッション',
  'カジュアル',
  'シンプルベーシック',
  'チェック',
  'デニム',
  'スニーカー',
  '春夏コーデ'],
 369: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  'デイリーファッション',
  'モノトーンコーデ',
  'シンプルベーシック',
  '大人カジュアル',
  'スカイブルー',
  'グレー',
  'ニット',
  'スラックス',
  'スニーカー',
  '韓国ファッション'],
 370: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  'ストリート',
  'カジュアル',
  'シンプルコーデ',
  'モノトーンコーデ',
  '半袖tシャツ',
  'スニーカー',
  'スラックス',
  '韓国ファッション'],
 371: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  '韓国ファッション',
  'カジュアルコーデ',
  'シンプルコーデ',
  'デイリーファッション',
  'モノトーンコーデ',
  'ニット',
  'スラックス',
  'スニーカー'],
 372: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  '春コーデ',
  'デイリーファッション',
  'シンプルコーデ',
  'モノトーンコーデ',
  'スカイブルー',
  'スニーカー',
  'ストリート'],
 373: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'ストリートファッション',
  'シンプルコーデ',
  'デイリーファッション',
  'モノトーンコーデ',
  '春コーデ',
  'ニットベスト',
  'スニーカー'],
 374: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  'デイリーファッション',
  'シンプルコーデ',
  '韓国ファッション',
  'パーカー',
  'スラックス',
  'スニーカー'],
 375: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'Dailycode',
  '春コーデ',
  'シンプルコーデ',
  'デイリーファッション',
  'カジュアル',
  'お出かけコーデ',
  '大人コーデ',
  'モノトーンコーデ',
  'シャツ',
  'ジャケット',
  'スラックス',
  'ストリートファッション',
  'スニーカー',
  '韓国ファッション'],
 376: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'モノトーンコーデ',
  '韓国コーデ',
  '韓国ファッション',
  '春コーデ',
  'ブーツコーデ',
  'テニススカート',
  'カーディガン',
  'グレー',
  'ブラック',
  'シンプルコーデ'],
 377: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  '春コーデ',
  'カジュアルファッション',
  'デイリーファッション',
  'モノトーンコーデ',
  'ニットコーデ',
  'デニムコーデ',
  'スニーカー',
  'シンプルコーデ',
  'グレー'],
 378: ['ootd',
  '2021spring',
  'Springcode',
  'Simplecode',
  'モノトーンコーデ',
  '大人コーデ',
  'シンプルコーデ',
  'デイリーファッション',
  'お出かけコーデ',
  'ニット',
  '春コーデ'],
 379: ['ootd',
  'Dailycode',
  'Springcode',
  'Simplecode',
  '2021spring',
  '春コーデ',
  'シャツコーデ',
  'デイリーファッション',
  'シンプルコーデ',
  'モノトーン',
  'ブルーシャツ',
  'スニーカー'],
 380: ['ootd',
  '오오티디',
  'モノトーンコーデ',
  'シンプルコーデ',
  '春コーデ',
  'ブーツコーデ',
  'デイリーファッション',
  'Dailycode',
  '2021spring',
  'Simplecode'],
 381: ['シンプルコーデ',
  '春コーデ',
  'パステルカラー',
  'デイリーファッション',
  'ootd',
  'Dailycode',
  'Springcode',
  'Simplecode',
  '2021春',
  'スニーカーコーデ',
  'ニットコーデ',
  'スカイブルー'],
 382: ['シンプルコーデ', 'モノトーンコーデ', 'デイリーコーデ', 'ootd', '오오티디', 'デイリーファッション'],
 383: ['シンプルコーデ', 'ストリート', '오오타디', 'ブラック', 'グレー', 'モノトーンコーデ', 'デイリーコーデ'],
 384: ['大学生コーデ', '데이리코디', 'おうちコーデ', 'モノトーンコーデ', 'シンプル'],
 385: ['긴팔티', '팬츠', '자켓'],
 386: ['긴팔티', '자켓', '팬츠'],
 387: ['긴팔티', '스커트', '가디건&베스트'],
 388: ['가디건&베스트', '니트', '팬츠'],
 389: ['긴팔티', '팬츠', '집업&점퍼'],
 390: ['반팔티', '가디건&베스트', '데님'],
 391: ['코트', '반팔티', '팬츠'],
 392: ['가디건&베스트', '반팔티', '데님'],
 393: ['반팔티', '데님', '자켓'],
 394: ['긴팔티', '팬츠', '가디건&베스트'],
 395: ['반팔티', '데님', '집업&점퍼'],
 396: ['반팔티', '가디건&베스트', '숄더백'],
 397: ['스커트', '긴팔티', '스니커즈'],
 398: ['가디건&베스트', '니트', '팬츠'],
 399: ['긴팔티', '스커트', '가디건&베스트'],
 400: ['링', '니트', '팬츠'],
 401: ['반팔티', '스커트', '가디건&베스트'],
 402: ['반팔티', '팬츠', '집업&점퍼'],
 403: ['링', '가디건&베스트', '데님'],
 404: ['링', '긴팔티', '자켓'],
 405: ['반팔티', '팬츠', '가디건&베스트'],
 406: ['니트', '데님', '자켓'],
 407: ['링', '가디건&베스트', '블라우스'],
 408: ['긴팔티', '데님', '자켓'],
 409: ['가디건&베스트', '반팔티', '스커트'],
 410: ['가디건&베스트', '반팔티', '스커트'],
 411: ['가디건&베스트', '슬리브리스', '데님'],
 412: ['링', '블라우스', '팬츠'],
 413: ['가디건&베스트', '반팔티', '데님'],
 414: ['반팔티', '가디건&베스트', '데님'],
 415: ['링', '반팔티', '팬츠'],
 416: ['사진', '반팔티', '가디건&베스트'],
 417: ['가디건&베스트', '니트', '데님'],
 418: ['슬리브리스', '반바지', '자켓'],
 419: ['스커트', '긴팔티', '양말'],
 420: ['데님', '반팔티', '가디건&베스트'],
 421: ['팬츠', '니트', '캔버스백&에코백'],
 422: ['가디건&베스트', '슬리브리스', '데님'],
 423: ['사진', '반팔티', '가디건&베스트'],
 424: ['사진', '반팔티', '데님'],
 425: ['사진', '자켓', '데님'],
 426: ['사진', '블라우스', '팬츠'],
 427: ['이어링', '블라우스', '데님'],
 428: ['기타악세서리', '슬리브리스', '팬츠'],
 429: ['기타악세서리', '반팔티', '스커트'],
 430: ['이어링', '블라우스', '데님'],
 431: ['팬츠', '반팔티', '캔버스백&에코백'],
 432: ['반팔티', '반바지', '캔버스백&에코백'],
 433: ['반팔티', '스커트', '스니커즈'],
 434: ['스커트', '블라우스', '기타악세서리'],
 435: ['반팔티', '반바지', '모자'],
 436: ['팬츠', '니트', '캔버스백&에코백'],
 437: ['스커트', '블라우스', '샌들&슬리퍼'],
 438: ['스커트', '가디건&베스트', '숄더백'],
 439: ['스커트', '반팔티', '니트'],
 440: ['가디건&베스트', '데님', '숄더백'],
 441: ['데님', '니트', '캔버스백&에코백'],
 442: ['데님', '블라우스', '샌들&슬리퍼'],
 443: ['팬츠', '긴팔티', '캔버스백&에코백'],
 444: ['팬츠', '반팔티', '스니커즈'],
 445: ['반팔티', '데님', '가디건&베스트'],
 446: ['블라우스', '팬츠', '스니커즈'],
 447: ['가디건&베스트', '블라우스', '스커트'],
 448: ['긴팔티', '팬츠', '블라우스'],
 449: ['긴팔티', '데님', '캔버스백&에코백'],
 450: ['반팔티', '스커트', '자켓'],
 451: ['블라우스', '스커트', '자켓'],
 452: ['반팔티', '팬츠', '자켓'],
 453: ['반팔티', '스커트', '가디건&베스트'],
 454: ['가디건&베스트', '반팔티', '데님'],
 455: ['블라우스', '자켓', '데님'],
 456: ['블라우스', '자켓'],
 457: ['반팔티', '데님', '자켓'],
 458: ['가디건&베스트', '반팔티', '팬츠'],
 459: ['자켓', '블라우스', '데님'],
 460: ['2021', '가을', '쿨룩', '데일리', '캐쥬얼', 'Daily', 'OOTD', '편안한', '심플'],
 461: ['2021', '가을', '쿨룩', '데일리', '캐쥬얼', 'Daily', 'OOTD', '편안한', '심플'],
 462: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 463: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 464: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 465: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 466: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 467: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 468: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 469: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 470: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 471: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 472: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 473: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 474: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 475: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 476: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 477: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 478: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 479: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 480: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 481: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 482: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 483: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 484: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 485: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 486: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 487: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 488: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 489: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 490: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 491: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 492: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 493: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 494: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 495: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 496: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 497: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 498: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 499: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 500: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 501: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 502: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 503: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 504: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 505: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 506: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 507: ['2021', '가을', '프레피룩', '체크팬츠', '아가일패턴', '가디건', 'Daily', 'OOTD'],
 508: ['2021', '룩', '여름', '발랄', 'Daily', 'OOTD'],
 509: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 510: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 511: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 512: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 513: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 514: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 515: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 516: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 517: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 518: ['2021', '룩', '여름', '여행', 'Daily', 'OOTD'],
 519: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 520: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 521: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 522: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 523: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 524: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 525: ['2021', '봄', '로맨틱', 'Daily', 'OOTD'],
 526: ['2020', '하이틴', '데일리', 'Daily', 'OOTD'],
 527: ['크롭반팔티셔츠', '플라워스커트', '크롭니트가디건', '메리제인플랫', '숄더백'],
 528: ['캡슬리브리스', '핀턱슬랙스', '니트가디건', '버클로퍼', '숄더백'],
 529: ['플라워슬리브리스', '브이넥티셔츠', '와이드데님팬츠', '스니커즈', '숄더백'],
 530: ['오프숄더티셔츠', '홀터슬리브리스', '플리츠스커트', '청키힐', '체인벨트', '시스루워머'],
 531: ['크롭셔츠', '플리츠스커트', '크롭니트베스트', '로퍼', '숄더백'],
 532: ['오버핏셔츠', '숏데님', '스탠다드자켓', '미들부츠', '체인숄더백'],
 533: ['하프목폴라', '체크스커트', '브이넥가디건', '앵클부츠', '숄더백', '베레모'],
 534: ['레이스슬리브리스', '플리츠스커트', '아가일가디건', '앵클부츠', '숄더백'],
 535: ['슬리브리스탑', '체크스커트', '라운드니트가디건', '어글리스니커즈', '숄더백'],
 536: ['카라니트', '체크스커트', '로퍼', '숄더백'],
 537: ['니트맨투맨', '플레어롱스커트', '숄더백', '로퍼'],
 538: ['터틀넥티셔츠', '체크스커트', '꽈배기조끼', '베레모', '로퍼', '숄더백'],
 539: ['프린팅티셔츠', '타이다이팬츠', '체크셔츠', '스니커즈', '숄더백', '집게핀'],
 540: ['꽈배기니트원피스', '체크롱스커트', '미들부츠', '숄더백', '집게핀'],
 541: ['카라니트', '체크스커트', '로퍼', '숄더백', '집게핀'],
 542: ['셔츠', '체크스커트', '로퍼', '숄더백', '니트조끼', '머리띠'],
 543: ['슬리브리스탑', '4부바지', '웨스턴부츠', '볼레로가디건', '숄더백'],
 544: ['브라렛', '크롭티셔츠', '시스루워머', '로퍼', '체인숄더백'],
 545: ['반팔가디건', '트임스커트', '숄더백', '루즈핏자켓', '로퍼뮬', '베레모'],
 546: ['체크스커트', '셔츠', '니트베스트', '에코백', '로퍼'],
 547: ['슬리브리스가디건', '와이드데님팬츠', '로퍼', '숄더백', '집게핀', '썬글라스'],
 548: ['크롭반팔티셔츠', '벨트스커트', '로퍼', '숄더백', '집게핀'],
 549: ['셔츠', '아가일니트베스트', '플리츠스커트', '숄더백', '로우힐펌프스', '숄더백', '집게핀'],
 550: ['카라니트', '체크스커트', '숄더백', '스니커즈', '머리띠'],
 551: ['하프셔츠', '체크플리츠스커트', '크롭자켓', '통굽로퍼', '토트백'],
 552: ['프린팅티셔츠', '플리츠스커트', '어글리스니커즈', '숄더백'],
 553: ['크롭셔츠', '코튼스커트', '트위드가디건', '미들힐펌프스', '체인숄더백', '체인숄더백'],
 554: ['셔츠', '아가일스커트', '투버튼자켓', '숄더백', '베레모', '메리제인플랫슈즈', '니삭스'],
 555: ['레이스슬리브리스', '브이넥크롭조끼', '체크스커트', '로퍼', '숄더백', '헤어밴드'],
 556: ['레터링티셔츠', '플리츠스커트', '아가일가디건', '로퍼', '숄더백'],
 557: ['슬리브리스', '반팔가디건', '체크플리츠스커트', '미들부츠', '숄더백'],
 558: ['크롭블라우스', '체크스커트', '싱글자켓', '스니커즈', '숄더백', '머리띠'],
 559: ['반팔셔츠', '체크플리츠스커트', '가디건', '로퍼', '숄더백'],
 560: ['핀턱슬랙스', '프린팅티셔츠', '스니커즈', '스트링백', '버킷햇'],
 561: ['크롭티셔츠', '핀턱하프팬츠', '웨스턴부츠', '체인숄더백', '베레모', '체인벨트'],
 562: ['카라니트', '체크플리츠스커트', '숄더백', '로퍼'],
 563: ['라운드니트', '데미지진', '스니커즈', '에코백'],
 564: ['레이스슬리브리스', '브이넥니트', '트임스커트', '로퍼', '숄더백'],
 565: ['아가일니트베스트', '트임스커트', '로퍼', '숄더백', '머리띠'],
 566: ['크롭카라티셔츠', '체크스커트', '브이넥가디건', '베레모', '로퍼', '포켓백'],
 567: ['뷔스티에', '데님하프팬츠', '가디건', '스니커즈', '숄더백'],
 568: ['슬리브리스탑', '호피스커트', '가디건', '로퍼', '에코', '집게핀'],
 569: ['레이스슬리브리스', '미니스커트', '레이스업부츠'],
 570: ['언발티셔츠', '카고스커트', '하프부츠', '숄더백', '썬글라스'],
 571: ['반팔셔츠', '체크스커트', '니트베스트', '로퍼', '숄더백', '머리띠'],
 572: ['펀칭슬리브리스', '부츠컷팬츠', '스니커즈', '에코백', '집게핀'],
 573: ['네트니트', '핀턱데님', '롱부츠', '에코백'],
 574: ['체크블라우스', '데님롱스커트', '스니커즈', '에코백', '버킷햇'],
 575: ['스트레이트핏', '크롭반팔티셔츠', '스니커즈', '에코백', '볼캡'],
 576: ['슬리브리스탑', '크롭니트가디건', '와이드데님팬츠', '어글리스니커즈', '체인숄더백'],
 577: ['린넨반팔가디건', '부츠컷데님팬츠', '스니커즈', '백팩'],
 578: ['니트', '부츠컷데님팬츠', '스니커즈', '크로스백'],
 579: ['슬리브리스탑', '핀턱데님', '크롭후드집업', '어글리스니커즈', '숄더백'],
 580: ['레터링반팔티셔츠', '플리츠스커트', '셔츠', '로퍼', '숄더백'],
 581: ['슬리브리스탑', '핀턱슬랙스', '스니커즈', '에코백', '슬림크롭티셔츠'],
 582: ['니트슬리브리스', '데님숏팬츠', '크롭후드집업', '레이스업부츠', '에코백'],
 583: ['프린팅크롭티셔츠', '카고조거팬츠', '스니커즈', '숄더백', '시스루워머'],
 584: ['카라크롭티셔츠', '니트조끼', '세미와이드데님팬츠', '로퍼', '숄더백', '집게핀'],
 585: ['슬리브리스탑', '체크블라우스', '부츠컷데님팬츠', '플랫폼스니커즈', '숄더백'],
 586: ['슬리브리스탑', '부츠컷팬츠', '스니커즈', '체인숄더백'],
 587: ['크롭티셔츠', '체크스커트', '니삭스', '로퍼', '숄더백'],
 588: ['슬리브리스탑', '체크스커트', '로퍼', '오버니삭스', '베레모', '숄더백'],
 589: ['레이스슬리브리스', '플라워티셔츠', '하프핀턱팬츠', '스니커즈', '에코백'],
 590: ['레이스블라우스', '레이스롱스커트', '메리제인플랫슈즈', '에코백', '리본머리끈'],
 591: ['셔링블라우스', '부츠컷팬츠', '스니커즈', '숄더백'],
 592: ['슬리브리스탑', '핀턱슬랙스', '스트랩샌들', '숄더백', '집게핀'],
 593: ['크롭반팔티셔츠', '데님하프팬츠', '레이스업부츠', '숄더백', '버킷햇'],
 594: ['카라크롭티셔츠', '미니스커트', '로퍼', '숄더백', '집게핀'],
 595: ['크롭카라티셔츠', '스트라이프스커트', '로퍼', '베스트', '숄더백'],
 596: ['슬리브리스탑', '부츠컷데님팬츠', '스니커즈', '숄더백'],
 597: ['레터링슬리브리스탑', '미니스커트', '플랫폼스니커즈', '숄더백', '스니커즈'],
 598: ['슬리브리스탑', '루즈니트', '스트라이프스커트', '스니커즈', '숄더백', '집게핀'],
 599: ['반팔크롭티셔츠', '숏팬츠', '웨스턴부츠', '숄더백', '집게핀'],
 600: ['아가일크롭가디건', '미니스커트', '숄더백', '리본핀', '로퍼'],
 601: ['슬리브리스탑', '체크스커트', '로퍼', '볼레로가디건', '숄더백'],
 602: ['오프숄더티셔츠', '브라렛', '부츠컷팬츠', '스트랩힐', '숄더백', '집게핀'],
 603: ['슬리브리스탑', '플리츠스커트', '볼레로가디건', '스니커즈', '숄더백', '베레모'],
 604: ['가디건블라우스', '체크플리츠스커트', '웨스턴부츠', '숄더백', '베레모'],
 605: ['면와이드팬츠', '슬리브리스탑', '로퍼', '숄더백', '썬글라스'],
 606: ['프린팅티셔츠', '청반바지', '웨스턴부츠', '숄더백'],
 607: ['나그랑티셔츠', '데님플리츠스커트', '어글리스니커즈', '숄더백'],
 608: ['프린팅반팔티셔츠', '스트라이프스커트', '레이스업부츠', '크로스백', '스크런치'],
 609: ['슬리브리스탑', '크로셰뷔스티에', '청반바지', '스니커즈', '숄더백', '집게핀'],
 610: ['크로셰뷔스티에', '데님하프팬츠', '스니커즈', '숄더백', '썬글라스'],
 611: ['니트슬리브리스', '데님숏팬츠', '시스루슬리브리스', '에코백'],
 612: ['카라티셔츠', '체크스커트', '스니커즈', '숄더백', '집게핀'],
 613: ['크롭슬리브리스', '프릴스커트', '크롭가디건', '스니커즈', '머리띠', '미니숄더백'],
 614: ['슬리브리스탑', '타이다이티셔츠', '와이드슬랙스', '스트랩샌들', '숄더백'],
 615: ['크롭반팔티셔츠', '코듀로이하프팬츠', '니트후드집업', '스니커즈', '에코백', '썬글라스'],
 616: ['슬리브리스탑', '하프팬츠', '크롭점퍼', '스니커즈', '에코백', '집게핀'],
 617: ['슬리브리스탑', '하프팬츠', '어글리스니커즈', '숄더백', '체크셔츠', '볼캡'],
 618: ['와이드데님팬츠', '크롭카라티셔츠', '스니커즈', '에코백', '집게핀'],
 619: ['베스트', '와이드팬츠', '글래디에이터샌들', '숄더백'],
 620: ['프린팅티셔츠', '와이드슬랙스', '플랫샌들', '에코백'],
 621: ['반팔티셔츠', '핀턱데님', '스니커즈', '숄더백', '볼캡'],
 622: ['타이다이티셔츠', '부츠컷팬츠', '스니커즈뮬', '숄더백'],
 623: ['반팔크롭티셔츠', '밴딩팬츠', '플랫폼슬리퍼', '에코백', '집게핀'],
 624: ['반팔티셔츠', '데님플리츠스커트', '시스루워머', '셔링부츠', '체인숄더백', '집게핀'],
 625: ['반팔크롭티셔츠', '체크스커트', '오픈토앵클부츠', '숄더백'],
 626: ['브라렛', '쉬폰슬리브리스', '스트랩힐', '미니백'],
 627: ['레이스슬리브리스', '면바지', '스트랩힐', '숄더백'],
 628: ['브라렛', '일자데님', '시스루반팔티', '스니커즈', '숄더백'],
 629: ['홀터슬리브리스', '청반바지', '크롭자켓', '토트백', '미디부츠'],
 630: ['와이드슬랙스', '슬리브리스탑', '크롭가디건', '스니커즈', '숄더백'],
 631: ['반팔티셔츠', '5부핀턱데님팬츠', '스니커즈', '버킷햇', '에코백'],
 632: ['カーディガン/ベスト', 'パンツ', 'ブーツ'],
 633: ['ニット', 'ショートパンツ', 'カーディガン/ベスト'],
 634: ['ニット', 'ショートパンツ', 'ベルト'],
 635: ['photo', '長袖Tシャツ', 'カーディガン/ベスト'],
 636: ['ブラウス', 'カーディガン/ベスト', 'スカート'],
 637: ['ジップアップ', 'ブラウス', 'スカート'],
 638: ['ブラウス', 'スカート', 'カーディガン/ベスト'],
 639: ['ニット', 'スカート', 'ブーツ'],
 640: [],
 641: ['ブラウス', 'ジャケット', 'デニムパンツ'],
 642: ['ニット', 'スカート', 'ベルト'],
 643: ['ニット', 'ジャケット', 'スカート'],
 644: ['ブラウス', 'カーディガン/ベスト', 'スカート'],
 645: ['ジャケット', 'ブラウス', 'スカート'],
 646: ['ノースリーブ', '長袖Tシャツ', 'デニムパンツ'],
 647: ['ブラウス', 'デニムパンツ', 'スニーカー'],
 648: ['ニット', 'デニムパンツ', 'ジャケット'],
 649: ['ニット', 'ショートパンツ', 'ブーツ'],
 650: ['半袖Tシャツ', '長袖Tシャツ', 'デニムパンツ'],
 651: ['長袖Tシャツ', 'パンツ', 'スニーカー'],
 652: ['靴下', 'ブラウス', 'カーディガン/ベスト'],
 653: ['ニット', 'スカート', 'ブーツ'],
 654: ['長袖Tシャツ', 'パンツ', 'カーディガン/ベスト'],
 655: ['ニット', 'デニムパンツ', 'スニーカー'],
 656: ['ブラウス', 'スカート', 'ジャケット'],
 657: ['photo', 'ブラウス', 'カーディガン/ベスト'],
 658: ['ショートパンツ', '長袖Tシャツ', 'サンダル'],
 659: ['カーディガン/ベスト', 'ノースリーブ', 'パンツ'],
 660: ['半袖Tシャツ', 'デニムパンツ', 'サンダル'],
 661: ['photo', 'ブラウス', 'ショートパンツ'],
 662: ['symbols&text', 'スカート', 'ノースリーブ'],
 663: ['半袖Tシャツ', 'ショートパンツ', 'カーディガン/ベスト'],
 664: ['ノースリーブ', 'スカート', 'ブーツ'],
 665: ['ブラウス', 'スカート', 'ブーツ'],
 666: ['デニムパンツ', 'カーディガン/ベスト', 'サンダル'],
 667: ['Trendy', 'Spring', '코디북', 'Simple', 'Backtoschool'],
 668: ['Daillylook', 'Simple'],
 669: ['College',
  'Sneakers',
  'Lovely',
  'Pink',
  'Highteen',
  'Autumnoutfit',
  'Lovely',
  'Sneakers',
  'Daily'],
 670: ['Dailyfashion', 'Chic', 'Cool', 'Dateoutfit', '2021'],
 671: ['Springmood', 'Datelook', 'Cool', 'Lookbook', 'Mystyle'],
 672: ['Springfashion',
  'Preppy',
  'Girlish',
  'Ootddaily',
  'Knitwears',
  'Lovely',
  'Daillylook'],
 673: ['Simplelook', 'Black&white', 'Casualstyle', 'Set', 'Sweet'],
 674: ['Spring', 'Girlish', 'Datelook', 'Shoulderbags', 'Casualstyle'],
 675: ['Lovely', 'Coolgirl', 'Coolandcomfortable', 'Mystyle'],
 676: ['Trendy', 'Girly', 'Outfit', 'Ootd', 'Casualstyle', 'Simple'],
 677: ['Casualstyle', 'Simple'],
 678: [],
 679: ['Streetstyle', '2021'],
 680: ['Dailylook',
  'Casualstyle',
  'Comfy',
  'Lovely',
  'Spring',
  'Skirt',
  'Girlish',
  'Girly'],
 681: ['Summerlook', 'Streetstyle', 'Cardigan/vest'],
 682: ['Comfy', 'Daillylook', 'Lookbook'],
 683: ['Cardigan/Vest', '민소매', '청바지'],
 684: ['Casual', 'Casualstyle', 'Simple', 'Dailyfashion'],
 685: ['Basic', 'Summerlook', 'Jeans', 'Simplelook', 'Spring'],
 686: ['Basic',
  'Girly',
  'Summeroutfit',
  'Outfit',
  'Springmood',
  'Coolgirl',
  'Sweet'],
 687: ['Cute', 'Jeans', 'Summeroutfit', 'White'],
 688: ['Sweet',
  'Spring',
  'Trendy',
  'Outfit',
  'Daily',
  'Simplelook',
  'Shoulderbags',
  'Black'],
 689: ['Outfit', 'Streetstyle', 'Sleeveless'],
 690: ['Spring/summer',
  'Modern',
  'Coolgirl',
  'Dailystyle',
  'Simplelook',
  'Blackcode'],
 691: ['Sweet', 'Coolgirl', 'Trendy', 'Summer'],
 692: ['Simplestyle',
  'Pastel',
  'Lookbook',
  'Color',
  '2021',
  'Spring',
  'Springlook',
  'Simple',
  'Casual',
  'Casuallook'],
 693: ['Dailystyle', 'Chic'],
 694: ['Simplestyle', 'Coolgirl', 'Jacket', 'Casuallook'],
 695: ['2021autumn',
  'Autumncode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'autumnoutfit',
  'Autumn',
  'Autumnstyle',
  'Autumnlook',
  '秋天穿搭',
  '秋天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 696: ['2021autumn',
  'Autumncode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'autumnoutfit',
  'Autumn',
  'Autumnstyle',
  'Autumnlook',
  '秋天穿搭',
  '秋天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 697: ['2021autumn',
  'Autumncode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'autumnoutfit',
  'Autumn',
  'Autumnstyle',
  'Autumnlook',
  '秋天穿搭',
  '秋天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 698: ['2021autumn',
  'Autumncode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'autumnoutfit',
  'Autumn',
  'Autumnstyle',
  'Autumnlook',
  '秋天穿搭',
  '秋天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 699: ['2021autumn',
  'Autumncode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'autumnoutfit',
  'Autumn',
  'Autumnstyle',
  'Autumnlook',
  '秋天穿搭',
  '秋天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 700: ['2021summer',
  'Summercode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Summeroutfit',
  'Summer',
  'Summerstyle',
  'Summerlook',
  '夏天穿搭',
  '夏天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 701: ['2021summer',
  'Summercode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Summeroutfit',
  'Summer',
  'Summerstyle',
  'Summerlook',
  '夏天穿搭',
  '夏天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 702: ['2021summer',
  'Summercode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Summeroutfit',
  'Summer',
  'Summerstyle',
  'Summerlook',
  '夏天穿搭',
  '夏天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 703: ['2021summer',
  'Summercode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Summeroutfit',
  'Summer',
  'Summerstyle',
  'Summerlook',
  '夏天穿搭',
  '夏天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 704: ['2021summer',
  'Summercode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Summeroutfit',
  'Summer',
  'Summerstyle',
  'Summerlook',
  '夏天穿搭',
  '夏天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 705: ['2021summer',
  'Summercode',
  'Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Summeroutfit',
  'Summer',
  'Summerstyle',
  'Summerlook',
  '夏天穿搭',
  '夏天',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 706: ['Simple',
  'Simplelook',
  'Simplestyle',
  'Daillylook',
  'Dailystyle',
  'Daily',
  'Date',
  'Datelook',
  'Dateoutfit',
  'Winteroutfit',
  'Winter',
  'Winterstyle',
  'Winterlook',
  '冬天穿搭',
  '冬天',
  '秋冬',
  '簡約',
  '日常',
  '每日穿搭',
  '每天穿搭',
  '約會',
  '約會穿搭',
  '데이트룩',
  '데일리룩',
  '베이직룩',
  '데이리',
  '베이직',
  '데이트'],
 707: ['Black&white',
  'Black',
  '화이트',
  'Khaki',
  '白色',
  '黑色',
  '黑白色系',
  '復古',
  'Vintage',
  'Butterfly',
  '蝴蝶',
  'Date',
  '約會',
  '約會穿搭',
  '블랙&화이트',
  '화이트룩',
  '화이트',
  '블랙',
  '블랙룩',
  '카키룩',
  '카키',
  '데이트룩',
  '데이트'],
 708: ['2021겨울', '2021autumn', '겨울데이리'],
 709: ['2021겨울', '2021autumn'],
 710: ['2021겨울', '겨울데이리', "90'sfashion"],
 711: ['2021summer', 'Summercode', '2021여름'],
 712: ['2021summer', 'Summercode', '2021여름', 'Bluecode'],
 713: ['2021summer', 'Summercode', '2021여름', 'Denimcode', 'Greencode'],
 714: ['2021summer', 'Summercode', '2021여름', 'Denimcode', 'Mintgreen'],
 715: ['2021summer', 'Summercode', '2021여름', 'Denimcode', '핑크'],
 716: ['2021summer', 'Summercode', '2021여름', 'Beigecode', '블랙', '오오타디'],
 717: ['2021summer', 'Summercode', '2021여름', 'Beigecode', '블랙'],
 718: ['2021summer', 'Summercode', '2021여름', 'Greencode', 'whitecode'],
 719: ['2021summer', 'Summercode', '2021여름', 'Denimcode', '핑크'],
 720: ['2021summer', 'Summercode', '2021여름', 'Denimcode'],
 721: ['2021summer', 'Summercode', '2021여름', '화이트', 'Bluecode'],
 722: ['2021summer', 'Summercode', '2021여름', 'Denimcode', 'Bluecode'],
 723: ['2021spring', '2021summer', 'Greencode', '화이트'],
 724: ['2021summer', '2021spring', 'Denimcode', 'Graycode'],
 725: ['2021summer', '2021spring', '그린', '핑크'],
 726: ['2021spring', '2021summer', 'Denimcode', 'Greencode'],
 727: ['2021spring', '2021summer', '블랙'],
 728: ['2021spring', '2021summer', 'Yellowcode', 'Beigecode'],
 729: ['2021spring', '2021summer', 'Browncode', 'Denimcode'],
 730: ['2021spring', '2021summer', 'Greencode', 'Beigecode'],
 731: ['2021spring', 'Springcode', '봄', 'Bluecode'],
 732: ['니트', '예쁨', '長褲'],
 733: ['패션', '베이직', 'パンツ'],
 734: ['일상적', '팬츠', '가을'],
 735: ['편안함', '일상적'],
 736: ['일상', '오오티디', '短褲'],
 737: ['꾸안꾸', '심플', '短裙'],
 738: ['여름', '20대', '캐쥬얼'],
 739: ['오오티디', '숄더백', '심플'],
 740: ['시크룩', '오오티디', '캐쥬얼룩'],
 741: ['Mc'],
 742: ['여름코디', '니트', '데님', '일상룩'],
 743: ['사랑스러운', '캠퍼스룩'],
 744: ['팬츠', '반바지', '간단', '데일리'],
 745: ['일상적', '대학생', '깔끔'],
 746: ['심플코디', '시크룩', '20대'],
 747: ['데일리룩', '하이틴룩', '러블리룩'],
 748: ['가디건', '심플베이직', '데일리'],
 749: ['심플코디', '캐주얼', '20대'],
 750: ['캐쥬얼룩', '일상룩'],
 751: ['모던', '깔끔단정', '러블리', '베이직'],
 752: ['캠퍼스룩', '캐주얼', '깔끔'],
 753: ['간편', '데이트룩', '심플코디'],
 754: ['일상', '데이트룩'],
 755: ['시크룩', '팬츠', '편안함'],
 756: ['편안함', '친구랑놀러갈때', '코디', '따뜻하게', '베이직']}

해시태그와 유사한 코디 추천

def hash_codi(tag):
    # 입력한 해시태그가 포함된 인덱스만 필터링 (lst 리스트에 인덱스 삽입)
    # 해당 인덱스들은 모두 입력한 해시태그를 대표한다고 가정
    lst = []
    for k, v in hash_dic.items():
        if tag in v:
            lst.append(k)
    
    temp = []
    # 입력한 해시태그가 포함된 인덱스 만큼 반복
    for idx in lst:
        # 해시태그 벡터와 전체 코디 벡터 간 cosine similarity 계산
        for i in range(codi_wcodi.shape[0]):
            temp.append((i, cos_sim(hash_whash[idx], codi_wcodi[i])))
    # 계산된 cosine similarity 중 가장 높은 세 개 추리기
    final = sorted(temp, key=lambda x: x[1], reverse=True)[:3]
    # 상위 3개의 인덱스 추출
    final = [x[0] for x in final]
    
    # 추천 코디 3개 미만인 경우 추천하지 않음
    if len(final) < 3:
        print('추천 코디 없음')
    else:
        # 이미지 출력
        axes = []
        fig = plt.figure(figsize=(10, 10))
        for i, j, k in zip([1, 4, 7, 2, 5, 8, 3, 6, 9], 
                           ['Top', 'Bottom', 'Shoes'] * 3,
                           list(chain.from_iterable(repeat(number, 3) for number in final))):
            axes.append(fig.add_subplot(3, 3, i))
            subplot_title = j
            axes[-1].set_title(subplot_title)

            if i in [1, 2, 3]:
                plt.imshow(x_top[k])
            elif i in [4, 5, 6]:
                plt.imshow(x_bottom[k])
            else:
                plt.imshow(x_shoes[k])

        fig.tight_layout()
        plt.show()
hash_codi('축제')

png