Commit 4d10fa51 authored by Mirza Cutuk's avatar Mirza Cutuk
Browse files

GROUPWORK_fixed leakyReLU+BCE+Linear:updateparams

parent a13d90cd
......@@ -71,6 +71,9 @@ class Linear(Module):
self.grad_b = torch.empty(self.b.size())
self.x = None
self.velocity_w = None
self.velocity_b = None
def forward(self, input):
self.x = input
return torch.add(torch.matmul(self.x, self.w.t()), self.b)
......@@ -83,11 +86,10 @@ class Linear(Module):
def param(self):
return [(self.w, self.grad_w), (self.b, self.grad_b)]
def update_params(self, lr, params_SGD=None):
# keep lr/self.x.size(0) for mini-batches
if params_SGD:
self.w = params_SGD[0][0]
self.b = params_SGD[0][1]
def update_params(self, lr=None, opt=False, w=None, b=None):
if opt == True:
self.w = w
self.b = b
else:
self.w = self.w - (lr/self.x.size(0))*self.grad_w
self.b = self.b - (lr/self.x.size(0))*self.grad_b
......@@ -98,7 +100,8 @@ class Linear(Module):
self.grad_w.zero_() #check with empty if encounter problem
self.grad_b.zero_()
class Tanh(Module):
#this is tanh
class sigma(Module):
def __init__(self) -> None:
super().__init__()
self.x = None
......@@ -138,14 +141,14 @@ class leakyReLU(Module):
super().__init__()
self.x = None
def forward(self, input, a=0.01):
def forward(self, input, a=0.1):
self.x = input
self.x[self.x <= 0] = self.x[self.x <= 0] * a
self.x[self.x < 0] = self.x[self.x < 0] * a
return self.x
def backward(self, a, gradwrtoutput):
self.x[self.x <= 0] = 0
self.x[self.x > 0] = a
def backward(self, gradwrtoutput, a=0.1):
self.x[self.x > 0] = 1
self.x[self.x < 0] = a
return self.x * gradwrtoutput
def param(self):
......@@ -190,7 +193,7 @@ class BCE(Module):
def forward(self, y, t): # output, target
return -torch.sum(y*torch.log(t) + (1-y)*torch.log(1-t))
return -torch.sum(y*torch.clamp(torch.log(t), min=-1) + (1-y)*torch.clamp(torch.log(1-t), min=-1))
def backward(self, y, t):
return (y - t).mean()
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment