forked from itayhubara/BinaryNet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SqrHingeEmbeddingCriterion.lua
40 lines (34 loc) · 1.24 KB
/
SqrHingeEmbeddingCriterion.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
--[[
This Function implement the squared hinge loss criterion
]]
local SqrtHingeEmbeddingCriterion, parent = torch.class('SqrtHingeEmbeddingCriterion', 'nn.Criterion')
function SqrtHingeEmbeddingCriterion:__init(margin)
parent.__init(self)
self.margin = margin or 1
self.sizeAverage = true
end
function SqrtHingeEmbeddingCriterion:updateOutput(input,y)
self.buffer = self.buffer or input.new()
if not torch.isTensor(y) then
self.ty = self.ty or input.new():resize(1)
self.ty[1]=y
y=self.ty
end
self.buffer:resizeAs(input):copy(input)
self.buffer:cmul(y):mul(-1):add(self.margin)
self.buffer[torch.le(self.buffer ,0)]=0
self.output=self.buffer:clone():pow(2):sum()
if (self.sizeAverage == nil or self.sizeAverage == true) then
self.output = self.output / input:nElement()
end
return self.output
end
function SqrtHingeEmbeddingCriterion:updateGradInput(input, y)
if not torch.isTensor(y) then self.ty[1]=y; y=self.ty end
self.gradInput:resizeAs(input):copy(y):mul(-2):cmul(self.buffer)
self.gradInput[torch.cmul(y,input):gt(self.margin)] = 0
if (self.sizeAverage == nil or self.sizeAverage == true) then
self.gradInput:mul(1 / input:nElement())
end
return self.gradInput
end