Ask
CoCalc Logo Icon
StoreFeaturesDocsShareSupport News Sign UpSign In
Views: 48
Image: ubuntu2004-dev
Embed | Download | Raw |
Kernel: Python 3 (system-wide)

Julia Set

import random import taichi as ti import matplotlib.pyplot as plt ti.__version__
n = 320 pixels = ti.field(dtype=float, shape=(n * 2, n)) @ti.func def complex_sqr(z): return ti.Vector([z[0]**2 - z[1]**2, z[1] * z[0] * 2]) @ti.kernel def paint(t: float): for i, j in pixels: # Parallelized over all pixels c = ti.Vector([-0.8, ti.cos(t) * 0.2]) z = ti.Vector([i / n - 1, j / n - 0.5]) * 2 iterations = 0 while z.norm() < 20 and iterations < 50: z = complex_sqr(z) + c iterations += 1 pixels[i, j] = 1 - iterations * 0.02
paint(10000) plt.matshow(pixels.to_numpy().T, cmap="gray")
from ipywidgets import interact @interact def julia(i=(0, 10000)): paint(int(i)) plt.matshow(pixels.to_numpy().T, cmap="gray")

Gradient/Residual

ti.init(arch=ti.cpu) n = 8 x = ti.field(dtype=ti.f32, shape=n, needs_grad=True) y = ti.field(dtype=ti.f32, shape=n) L = ti.field(dtype=ti.f32, shape=(), needs_grad=True) @ti.kernel def reduce(): for i in range(n): L[None] += 0.5 * (x[i] - y[i])**2 # Initialize vectors for i in range(n): x[i] = random.random() y[i] = random.random() @ti.kernel def gradient_descent(): for i in x: x[i] -= x.grad[i] * 0.1
# Optimize with 100 gradient descent iterations for k in range(100): with ti.Tape(loss=L): reduce() print('Loss =', L[None]) gradient_descent() for i in range(n): # Now you should approximately have x[i] == y[i] print(x[i], y[i])