From 645ac0b8894c8e569c6a49c422539de90920c03e Mon Sep 17 00:00:00 2001 From: dewisnu Date: Sun, 17 Dec 2023 14:47:13 +0800 Subject: [PATCH] feat: braiprediction logic --- app/image/imageTest1.jpeg | Bin 0 -> 2676 bytes app/routes.py | 48 +++++++++++++++++++++++++++++++++++--- 2 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 app/image/imageTest1.jpeg diff --git a/app/image/imageTest1.jpeg b/app/image/imageTest1.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..0c678d19f2d9a45c1dca4049f28edac70e4004b7 GIT binary patch literal 2676 zcmbW1c{J2(AHaWOW{fRsBA2mtiI8PvUn0x63fZ@ALlKuQ64{0kmx;NFp(fe4P_8Yq zgb=ye4H~2DOd>H>tg07ak`43Yylgg{^+5UUrE1^^Hg^c~=DKpbEQloQ6q&BMzMXc7cCKwvNj z1Pp~jAnfQk_C5d+f(pxOm~o0YT!zVo9@R|DEaH+sTh}e>IJT{zg$l!P^BfZsKQ3|N zM@6NR%GwB>GrD^E<`(BHt*mX37oD74T;1F~eEqIm^$)lf7=9z-W@OZ@=%m}pDZizr zVee&S<8yNFKOhvBl$MoOR6ee%Z)j|K*4*;E^<__Q->d$C*Ms8|lfop6Q103H0 zbFixf2D2*#VFQIhzX9WdeTVCBc)r8?jU)fSVkZHybFdF{vilKk823MW|9Zii10Vn& ziv;k2LF~r_76MFwJ>T<0nyQt1oZ351RDza{rf4`WsEmihz8N*v z(u3vOn_HO`Q8+1gC+$D+0f~#|lbrCp=@=^56D{%PNwsOFrTk7wGKf^j|2_@DjLJc<5H*Wo`!W|U zG4Z50W7S-m#2hLZR6XzH<2B;#<$bE!YNI@#eL4G=nCR8k+M^6C z3+SbHlJt|R=Ak%kajqoYOOun`XSF}lEkn+)LdBF4kw4LEl@j^x|e za#nmdgS@C| zC~q^mnzO8Dqmj?OO?*{dJQO?5oUklAbm&oo!7=qxNm`(cDqVQqk|@b7>cs|fyCEvv z&blxor62D~Pd1gj+Wu~F!Tz{cuyo%ht-}2xa!gY4AyEveQXXfu;U$(7FS5{X&|A>bL z_)LU^_@SsIiyv=Faaw_mSOCu&O>q79Q|O(Mr1ylaO+$BdMU?BIkT*i^S@UeYjZ^&2 zPd7)(RT5rw?4M>1I4{-KKBkq3eVlYvDx4)Rx67hyHhkn-8ti&KS}`xSLMJFbZx`b@ z)_QU=!?9B9H3Iu&7NEaicx2I5O1^jJ5=A+Mkv>q=2 zW*Z{_{<$EeVSW$6pzt0cH?!~kH9~w*h%7N!w(U4Y`BZ|mmuBYPB(HC<@AZ|LZb=PA zV=tvjZo}||;wC?N$o{S~J*7*^;83*Df3U`rym&+9tF0?lf#f1?#ce+WdDOedAB}1) zNYjZfz`PZ*FF-gX3G4D%uT4}$PmWuUHJNP2AeOJ`wRFK z>wZ%W!eCjpTC;3J-S+vU%0QCQ(A>Z;W*kGE(rOoi=M0=IBVf>eII*oTs@eJBOPKZe z#OsMK3#sNUwLgiZV^?%eFI~S7dWdLW;0eWA>}s(__x8Jd;8E?qCteiG$Jlm@KzyoFrEPxn%xqmxb~F0{tEHpwYn}Ubn)8Jh^mPIN%;Bw~^(H$v8c|#i!bdsF(RoX>mc?nYpPcx+R&Q(H`aDBBy<-V|{q$461!& zfjJdxvm`bu7FOdKc2wkjt>~t)#x`|(xblmz&2zkSC+E{#ONX+U^#MnEP1wiqO&sx2 zBqY2DYt|F$lK!x+d7LmmFEWj|=Sn2(iXMtHknd*u=vqOMw$RdK2|Y1|dH8+FLp4f`nv8vQn>g9(;;{a7e#u>?{Cdo4jTT0;Kc6?9()jFQ90~3k7_`0DRLQ)Oin8!`&6Y~9h}<8}C>+@HVz!OrT z4A;6?>&eCrTc4aI(!Ey6f>4hfK)8pk%$}!T{RL)X=2zjQJWT(p-O~yMDLIj1m zhmA5Kox=iEu_uijSY!K!4eD>=H?7nqlTr-VNTs=7U6&lZZd{_Di+6W^r#Q+deWv+| zJjCbo@`l6f3F=cHCunh}ddqQ9h1klusVs)oe&h0{_i1cBs57=+%NGZ=3#`s44b37X zpr0Nn6FV=`D(^LbZF{0`zYv)kNo}kgHz-dlMs*3kT{rP)s`)S-z4DQOEBTSB&~1`? zVYvP(3ve-c$Nr~W_Or>eE`F8Okc0^>rQpomle~iZm31Vxgk{GkisYU4v bM+ao@0DT&rL+OF4Yy9K; literal 0 HcmV?d00001 diff --git a/app/routes.py b/app/routes.py index d292c33..fd5d487 100644 --- a/app/routes.py +++ b/app/routes.py @@ -6,13 +6,15 @@ import cv2 from PIL import Image import torch.nn.functional as F +import os +dirname = os.path.dirname(__file__) class BraitCnn(nn.Module): def __init__(self): - super(BraitCnn,self).__init__() + super(BraitCnn, self).__init__() self.brait1 = nn.Sequential( - nn.Conv2d(in_channels=3,out_channels=16,kernel_size=5,stride=1,padding=2), + nn.Conv2d(in_channels=3, out_channels=16, kernel_size=5, stride=1, padding=2), nn.MaxPool2d(kernel_size=2), nn.Dropout(p=0.01) ) @@ -26,10 +28,50 @@ def __init__(self): nn.Linear(100, 26) ) + def forward(self, x): + y = F.relu(self.brait1(x)) + y = F.relu(self.brait2(y)) + # flatten + y = y.view(-1, 32 * 7 * 7) + y = F.relu(self.brait3(y)) + + return y + + +def BraitPrediction(img_path): + model = BraitCnn() + model_path = filename = os.path.join(dirname, 'ml-model/BRAIT_PYTORCH.pth') + model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu'))) + #prepocess image inputan convert jadi RGB + image = Image.open(img_path) + image = image.convert('RGB') + + #segmentasi image inputan + width, height = image.size #mengambil ukuran size image + jumlah_segment = round(width/height/0.78) #menentukan jumlah segment huruf braille + print(jumlah_segment) + segment = width/jumlah_segment + print(segment) + + tamp = [] + for i in range(0, jumlah_segment): + cropped = image.crop((i * segment, 0, (i + 1) * segment, height)) + cropped = np.array(cropped) + cropped = cv2.resize(cropped, (28, 28)) + cropped = cropped.astype(np.float32) / 255.0 + cropped = torch.from_numpy(cropped[None, :, :, :]) + cropped = cropped.permute(0, 3, 1, 2) + predicted_tensor = model(cropped) + _, predicted_letter = torch.max(predicted_tensor, 1) + tamp.append(chr(97 + predicted_letter)) + + return tamp @app.route('/') @app.route('/index') def index(): - return "Hello, World!" + filename = os.path.join(dirname, 'image/imageTest1.jpeg') + + return BraitPrediction(filename)