| Hosted by CoCalc | Download
Kernel: Python 3 (system-wide)

Julia Set

import random import taichi as ti import matplotlib.pyplot as plt ti.__version__
(0, 8, 4)
n = 320 pixels = ti.field(dtype=float, shape=(n * 2, n)) @ti.func def complex_sqr(z): return ti.Vector([z[0]**2 - z[1]**2, z[1] * z[0] * 2]) @ti.kernel def paint(t: float): for i, j in pixels: # Parallelized over all pixels c = ti.Vector([-0.8, ti.cos(t) * 0.2]) z = ti.Vector([i / n - 1, j / n - 0.5]) * 2 iterations = 0 while z.norm() < 20 and iterations < 50: z = complex_sqr(z) + c iterations += 1 pixels[i, j] = 1 - iterations * 0.02
paint(10000) plt.matshow(pixels.to_numpy().T, cmap="gray")
<matplotlib.image.AxesImage at 0x7f9bd17a93a0>
Image in a Jupyter notebook
from ipywidgets import interact @interact def julia(i=(0, 10000)): paint(int(i)) plt.matshow(pixels.to_numpy().T, cmap="gray")

Gradient/Residual

ti.init(arch=ti.cpu) n = 8 x = ti.field(dtype=ti.f32, shape=n, needs_grad=True) y = ti.field(dtype=ti.f32, shape=n) L = ti.field(dtype=ti.f32, shape=(), needs_grad=True) @ti.kernel def reduce(): for i in range(n): L[None] += 0.5 * (x[i] - y[i])**2 # Initialize vectors for i in range(n): x[i] = random.random() y[i] = random.random() @ti.kernel def gradient_descent(): for i in x: x[i] -= x.grad[i] * 0.1
[Taichi] Starting on arch=x64
# Optimize with 100 gradient descent iterations for k in range(100): with ti.Tape(loss=L): reduce() print('Loss =', L[None]) gradient_descent() for i in range(n): # Now you should approximately have x[i] == y[i] print(x[i], y[i])
Loss = 1.1389164924621582 Loss = 0.9225223660469055 Loss = 0.7472430467605591 Loss = 0.6052669286727905 Loss = 0.49026617407798767 Loss = 0.3971155881881714 Loss = 0.32166361808776855 Loss = 0.26054754853248596 Loss = 0.21104347705841064 Loss = 0.1709452122449875 Loss = 0.13846561312675476 Loss = 0.11215715110301971 Loss = 0.09084729105234146 Loss = 0.07358631491661072 Loss = 0.059604912996292114 Loss = 0.04827997088432312 Loss = 0.03910677134990692 Loss = 0.031676482409238815 Loss = 0.02565794810652733 Loss = 0.020782940089702606 Loss = 0.016834180802106857 Loss = 0.013635682873427868 Loss = 0.01104490365833044 Loss = 0.008946369402110577 Loss = 0.007246557157486677 Loss = 0.005869708489626646 Loss = 0.0047544632107019424 Loss = 0.003851114772260189 Loss = 0.0031194020994007587 Loss = 0.002526714699342847 Loss = 0.0020466384012252092 Loss = 0.0016577770002186298 Loss = 0.0013428000966086984 Loss = 0.0010876673040911555 Loss = 0.000881010724697262 Loss = 0.0007136187632568181 Loss = 0.0005780311767011881 Loss = 0.0004682051367126405 Loss = 0.0003792463685385883 Loss = 0.0003071892133448273 Loss = 0.0002488234604243189 Loss = 0.0002015464415308088 Loss = 0.00016325234901160002 Loss = 0.00013223463611211628 Loss = 0.00010711004870245233 Loss = 8.675886056153104e-05 Loss = 7.027461106190458e-05 Loss = 5.692265403922647e-05 Loss = 4.610730684362352e-05 Loss = 3.7346715544117615e-05 Loss = 3.0250737836468033e-05 Loss = 2.4502987798769027e-05 Loss = 1.9847540897899307e-05 Loss = 1.607660306035541e-05 Loss = 1.302214150200598e-05 Loss = 1.0547945748839993e-05 Loss = 8.54378959047608e-06 Loss = 6.920437499502441e-06 Loss = 5.605516435025493e-06 Loss = 4.540357167570619e-06 Loss = 3.6777394143427955e-06 Loss = 2.9789539439661894e-06 Loss = 2.412929234196781e-06 Loss = 1.9545054783520754e-06 Loss = 1.583156290507759e-06 Loss = 1.282345010622521e-06 Loss = 1.0386710300736013e-06 Loss = 8.413223895331612e-07 Loss = 6.814877338001679e-07 Loss = 5.520000740943942e-07 Loss = 4.4713519287142844e-07 Loss = 3.6216951571077516e-07 Loss = 2.933398377535923e-07 Loss = 2.3759915279697452e-07 Loss = 1.9243580595684762e-07 Loss = 1.5587072255129897e-07 Loss = 1.2625341128114087e-07 Loss = 1.0226438718063946e-07 Loss = 8.283029728772817e-08 Loss = 6.708797428700564e-08 Loss = 5.434258198988573e-08 Loss = 4.4016012168413e-08 Loss = 3.565469341992866e-08 Loss = 2.8875057012101024e-08 Loss = 2.3388583514361017e-08 Loss = 1.8945989666008245e-08 Loss = 1.5343122328204117e-08 Loss = 1.2429103968258914e-08 Loss = 1.006759031696447e-08 Loss = 8.154136033056147e-09 Loss = 6.6053913450048185e-09 Loss = 5.351447285306676e-09 Loss = 4.335420911161236e-09 Loss = 3.5109328777593873e-09 Loss = 2.8445414912425804e-09 Loss = 2.304052504342735e-09 Loss = 1.8671542090231696e-09 Loss = 1.5122422203361907e-09 Loss = 1.2247408642807045e-09 Loss = 9.919919330414473e-10 0.05810321867465973 0.058101363480091095 0.9867505431175232 0.9867596626281738 0.803779125213623 0.8037930130958557 0.9251376986503601 0.9251614809036255 0.26498156785964966 0.26496800780296326 0.06710710376501083 0.06708946079015732 0.1637413501739502 0.1637296825647354 0.8130745887756348 0.8130860924720764