@@ -75,7 +75,7 @@ def forward(self, sample, mode='single'):
75
75
In the 'head-batch' or 'tail-batch' mode, sample consists two part.
76
76
The first part is usually the positive sample.
77
77
And the second part is the entities in the negative samples.
78
- Becuase negative samples and positive samples usually share two elements
78
+ Because negative samples and positive samples usually share two elements
79
79
in their triple ((head, relation) or (relation, tail)).
80
80
'''
81
81
@@ -267,7 +267,7 @@ def train_step(model, optimizer, train_iterator, args):
267
267
negative_score = model ((positive_sample , negative_sample ), mode = mode )
268
268
269
269
if args .negative_adversarial_sampling :
270
- #In self-negative sampling, we do not apply back-propagation on the sampling weight
270
+ #In self-adversarial sampling, we do not apply back-propagation on the sampling weight
271
271
negative_score = (F .softmax (negative_score * args .adversarial_temperature , dim = 1 ).detach ()
272
272
* F .logsigmoid (- negative_score )).sum (dim = 1 )
273
273
else :
@@ -278,8 +278,8 @@ def train_step(model, optimizer, train_iterator, args):
278
278
positive_score = F .logsigmoid (positive_score ).squeeze (dim = 1 )
279
279
280
280
if args .uni_weight :
281
- positive_sample_loss = positive_score .mean ()
282
- negative_sample_loss = negative_score .mean ()
281
+ positive_sample_loss = - positive_score .mean ()
282
+ negative_sample_loss = - negative_score .mean ()
283
283
else :
284
284
positive_sample_loss = - (subsampling_weight * positive_score ).sum ()/ subsampling_weight .sum ()
285
285
negative_sample_loss = - (subsampling_weight * negative_score ).sum ()/ subsampling_weight .sum ()
0 commit comments