Skip to content

Commit

Permalink
fix a mistake about negative sampling
Browse files Browse the repository at this point in the history
  • Loading branch information
e-yi committed Oct 21, 2019
1 parent 1141945 commit 56d7432
Showing 1 changed file with 8 additions and 2 deletions.
10 changes: 8 additions & 2 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ def __init__(self, node_size, path_size, embed_dim, sigmoid_reg=False, r=True):
# self.args = args

def binary_reg(x: torch.Tensor):
return (x >= 0).float()
raise NotImplementedError()
# return (x >= 0).float() # do not have gradients

self.reg = torch.sigmoid if sigmoid_reg else binary_reg

Expand Down Expand Up @@ -78,18 +79,23 @@ def __init__(self, sample, path_size, neg=5):
:param sample: HIN.sample()返回值,(start_node, end_node, path_id)
"""

print('init training dataset...')

l = len(sample)

x = np.tile(sample, (neg + 1, 1))
y = np.zeros(l * (1 + neg))
y[:l] = 1

x[l:, 2] = np.random.randint(0, path_size - 1, (l * neg,))
# x[l:, 2] = np.random.randint(0, path_size - 1, (l * neg,))
x[l:, 1] = np.random.randint(0, path_size - 1, (l * neg,))

self.x = torch.LongTensor(x)
self.y = torch.FloatTensor(y)
self.length = len(x)

print('finished')

def __getitem__(self, index):
return self.x[index], self.y[index]

Expand Down

0 comments on commit 56d7432

Please sign in to comment.