# coding: utf-8123import sys4from python_environment_check import check_packages5import torch6import torch.nn as nn78# # Machine Learning with PyTorch and Scikit-Learn9# # -- Code Examples1011# ## Package version checks1213# Add folder to path in order to load from the check_packages.py script:14151617sys.path.insert(0, '..')181920# Check recommended package versions:212223242526d = {27'torch': '1.8.0',28}29check_packages(d)303132# # Chapter 15: Modeling Sequential Data Using Recurrent Neural Networks (Part 1/3)3334# **Outline**35#36# - [Introducing sequential data](#Introducing-sequential-data)37# - [Modeling sequential data -- order matters](#Modeling-sequential-data----order-matters)38# - [Sequential data versus time series data](#Sequential-data-versus-time-series-data)39# - [Representing sequences](#Representing-sequences)40# - [The different categories of sequence modeling](#The-different-categories-of-sequence-modeling)41# - [RNNs for modeling sequences](#RNNs-for-modeling-sequences)42# - [Understanding the dataflow in RNNs](#Understanding-the-dataflow-in-RNNs)43# - [Computing activations in an RNN](#Computing-activations-in-an-RNN)44# - [Hidden recurrence versus output recurrence](#Hidden-recurrence-versus-output-recurrence)45# - [The challenges of learning long-range interactions](#The-challenges-of-learning-long-range-interactions)46# - [Long short-term memory cells](#Long-short-term-memory-cells)474849505152# # Introducing sequential data53#54# ## Modeling sequential data—order matters55#56# ## Representing sequences57#58#596061626364# ## The different categories of sequence modeling656667686970# # RNNs for modeling sequences71#72# ## Understanding the RNN looping mechanism73#74757677787980818283# ## Computing activations in an RNN84#85868788899091929394# ## Hidden-recurrence vs. output-recurrence9596979899100101102103torch.manual_seed(1)104105rnn_layer = nn.RNN(input_size=5, hidden_size=2, num_layers=1, batch_first=True)106107w_xh = rnn_layer.weight_ih_l0108w_hh = rnn_layer.weight_hh_l0109b_xh = rnn_layer.bias_ih_l0110b_hh = rnn_layer.bias_hh_l0111112print('W_xh shape:', w_xh.shape)113print('W_hh shape:', w_hh.shape)114print('b_xh shape:', b_xh.shape)115print('b_hh shape:', b_hh.shape)116117118119120x_seq = torch.tensor([[1.0]*5, [2.0]*5, [3.0]*5]).float()121122## output of the simple RNN:123output, hn = rnn_layer(torch.reshape(x_seq, (1, 3, 5)))124125## manually computing the output:126out_man = []127for t in range(3):128xt = torch.reshape(x_seq[t], (1, 5))129print(f'Time step {t} =>')130print(' Input :', xt.numpy())131132ht = torch.matmul(xt, torch.transpose(w_xh, 0, 1)) + b_xh133print(' Hidden :', ht.detach().numpy())134135if t>0:136prev_h = out_man[t-1]137else:138prev_h = torch.zeros((ht.shape))139140ot = ht + torch.matmul(prev_h, torch.transpose(w_hh, 0, 1)) + b_hh141ot = torch.tanh(ot)142out_man.append(ot)143print(' Output (manual) :', ot.detach().numpy())144print(' RNN output :', output[:, t].detach().numpy())145print()146147148# ## The challenges of learning long-range interactions149#150151152153154155#156# ## Long Short-Term Memory cells157158159160161162#163# ---164165#166#167# Readers may ignore the next cell.168#169170171172173174175