Skip to content

Commit be06825

Browse files
committed
Fix uniform negative sampling and some typos
1 parent 82c3810 commit be06825

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

codes/model.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def forward(self, sample, mode='single'):
7575
In the 'head-batch' or 'tail-batch' mode, sample consists two part.
7676
The first part is usually the positive sample.
7777
And the second part is the entities in the negative samples.
78-
Becuase negative samples and positive samples usually share two elements
78+
Because negative samples and positive samples usually share two elements
7979
in their triple ((head, relation) or (relation, tail)).
8080
'''
8181

@@ -267,7 +267,7 @@ def train_step(model, optimizer, train_iterator, args):
267267
negative_score = model((positive_sample, negative_sample), mode=mode)
268268

269269
if args.negative_adversarial_sampling:
270-
#In self-negative sampling, we do not apply back-propagation on the sampling weight
270+
#In self-adversarial sampling, we do not apply back-propagation on the sampling weight
271271
negative_score = (F.softmax(negative_score * args.adversarial_temperature, dim = 1).detach()
272272
* F.logsigmoid(-negative_score)).sum(dim = 1)
273273
else:
@@ -278,8 +278,8 @@ def train_step(model, optimizer, train_iterator, args):
278278
positive_score = F.logsigmoid(positive_score).squeeze(dim = 1)
279279

280280
if args.uni_weight:
281-
positive_sample_loss = positive_score.mean()
282-
negative_sample_loss = negative_score.mean()
281+
positive_sample_loss = - positive_score.mean()
282+
negative_sample_loss = - negative_score.mean()
283283
else:
284284
positive_sample_loss = - (subsampling_weight * positive_score).sum()/subsampling_weight.sum()
285285
negative_sample_loss = - (subsampling_weight * negative_score).sum()/subsampling_weight.sum()

0 commit comments

Comments
 (0)