Skip to content

Instantly share code, notes, and snippets.

@obengwilliam
Last active May 6, 2021 20:09
Show Gist options
  • Save obengwilliam/99b1c0d7ee259e75802aeb897015a3ac to your computer and use it in GitHub Desktop.
Save obengwilliam/99b1c0d7ee259e75802aeb897015a3ac to your computer and use it in GitHub Desktop.

Revisions

  1. obengwilliam revised this gist May 6, 2021. 1 changed file with 13 additions and 13 deletions.
    26 changes: 13 additions & 13 deletions mbg.py
    Original file line number Diff line number Diff line change
    @@ -1,25 +1,25 @@
    self.init_plot(self.FEATURES)

    has_converged = False

    iter = 0
    iter = 1

    while not has_converged:
    i = np.random.randint(0, self.DATAPOINTS - 1)

    for k in range(self.FEATURES):
    self.gradient[k] = self.x[i][k] * (self.conditional_prob(1, i) - self.y[i])
    print(f"iter {iter}")

    minibatch = random.sample(range(0, self.DATAPOINTS), self.MINIBATCH_SIZE)
    for k in range(self.FEATURES):
    total = 0
    for i in minibatch:
    total += self.x[i][k] * self.conditional_prob(1, i) - self.y[i]
    gradient = 1.0 / self.DATAPOINTS * total
    self.gradient[k] = gradient

    self.theta[k] = self.theta[k] - self.LEARNING_RATE * self.gradient[k]

    has_converged = all(
    abs(gradient) < self.CONVERGENCE_MARGIN for gradient in self.gradient
    )
    self.update_plot(np.sum(np.square(self.gradient)))
    for k in range(0, self.FEATURES):
    self.theta[k] -= self.LEARNING_RATE * self.gradient[k]

    if iter < 10 or iter % 5:
    self.update_plot(np.sum(np.square(self.gradient)))
    has_converged = all(abs(i) < self.CONVERGENCE_MARGIN for i in self.gradient)

    iter += 1


  2. obengwilliam created this gist May 6, 2021.
    25 changes: 25 additions & 0 deletions mbg.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,25 @@
    self.init_plot(self.FEATURES)

    has_converged = False

    iter = 0

    while not has_converged:
    i = np.random.randint(0, self.DATAPOINTS - 1)

    for k in range(self.FEATURES):
    self.gradient[k] = self.x[i][k] * (self.conditional_prob(1, i) - self.y[i])

    for k in range(self.FEATURES):

    self.theta[k] = self.theta[k] - self.LEARNING_RATE * self.gradient[k]

    has_converged = all(
    abs(gradient) < self.CONVERGENCE_MARGIN for gradient in self.gradient
    )

    if iter < 10 or iter % 5:
    self.update_plot(np.sum(np.square(self.gradient)))

    iter += 1