Commit 529337f9 authored by Meet Narendra's avatar Meet Narendra 💬

Loss and gram matrix

parent 4ff6ba78
...@@ -2,6 +2,7 @@ import torch ...@@ -2,6 +2,7 @@ import torch
from logger import Logger from logger import Logger
LOGGER = Logger().logger() LOGGER = Logger().logger()
LOGGER.info("Started Feature Maps") LOGGER.info("Started Feature Maps")
#Author: @meetdoshi
class FeatureMaps: class FeatureMaps:
def __init__(self,arch="vgg19"): def __init__(self,arch="vgg19"):
''' '''
......
from distutils.log import Log
import logging import logging
import os import os
#Author: @meetdoshi
class Logger: class Logger:
''' '''
Singleton logger class Singleton logger class
......
import numpy as np import numpy as np
import torch
from logger import Logger from logger import Logger
LOGGER = Logger().logger() LOGGER = Logger().logger()
...@@ -24,21 +25,36 @@ class Loss: ...@@ -24,21 +25,36 @@ class Loss:
def gram_matrix(F): def gram_matrix(F):
''' '''
Function to compute the gram matrix of a feature representation at a layer Function to compute the gram matrix of a feature representation at a layer
Author: @himalisaini
''' '''
shape_mat = F.shape
num_channels = shape_mat[1]
height = shape_mat[2]
width = shape_mat[3]
return torch.mm(F.view(num_channels,(height*width)),F.view(num_channels,(height*width)).t())
@staticmethod @staticmethod
def style_loss(F,A): def style_loss(F,A):
''' '''
Function to compute style loss between two feature representations at multiple layers Function to compute style loss between two feature representations at multiple layers
@params @params
Author: @soumyagupta
''' '''
num_channels = F[1]
h = F[2]
w = F[3]
style_gram_matrix = Loss.gram_matrix(F)
target_gram_matrix = Loss.gram_matrix(A)
loss_s = np.sum((style_gram_matrix-target_gram_matrix)**2)
constant = 1/(4.0*(num_channels**2)*((h*w)**2))
return constant*loss_s
@staticmethod @staticmethod
def total_loss(alpha,beta,cont_fmap_real,cont_fmap_noise,style_fmap_real,style_fmap_noise): def total_loss(alpha,beta,cont_fmap_real,cont_fmap_noise,style_fmap_real,style_fmap_noise):
''' '''
Function which computes total loss and returns it Function which computes total loss and returns it
@params @params
Author: @jiteshg
''' '''
loss_t = alpha*Loss.content_loss(cont_fmap_real,cont_fmap_noise) + beta*Loss.style_loss(style_fmap_real,style_fmap_noise)
\ No newline at end of file return loss_t
\ No newline at end of file
from distutils.log import Log
from logger import Logger from logger import Logger
from torch import transforms
from PIL import Image
import numpy as np import numpy as np
LOGGER = Logger().logger() LOGGER = Logger().logger()
#Author: @meetdoshi
class Preprocessor: class Preprocessor:
@staticmethod
def load_image(path):
'''
Function to load image
@params
path: os.path
'''
img = Image.open(path)
return img
@staticmethod @staticmethod
def subtract_mean(img): def subtract_mean(img):
''' '''
...@@ -21,15 +32,22 @@ class Preprocessor: ...@@ -21,15 +32,22 @@ class Preprocessor:
@params @params
img: 3d numpy array img: 3d numpy array
''' '''
loader = transforms.Compose([transforms.ToTensor(),transforms.Resize([224,224]),transforms.Normalize(mean=[0.485, 0.456, 0.406],std=[0.229, 0.224, 0.225],),])
img = loader(img).unsqueeze(0)
return img
@staticmethod @staticmethod
def process(img): def process(path):
''' '''
Function to preprocess the image Function to preprocess the image
@params @params
img: 2d numpy a[103.939, 116.779, 123.68]rray path: os.path
''' '''
img = Preprocessor.load_image(path)
img = Preprocessor.reshape_img(img)
img = Preprocessor.subtract_mean(img)
return img
if __name__=="__main__": if __name__=="__main__":
prec = Preprocessor() prec = Preprocessor()
......
...@@ -15,10 +15,12 @@ class StyleTransfer: ...@@ -15,10 +15,12 @@ class StyleTransfer:
pass pass
@staticmethod @staticmethod
def pipepline(): def pipeline():
''' '''
Pipeline for style transfer Pipeline for style transfer
@params: None @params: None
Author: @gaurangathavale
''' '''
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment