Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
probml
GitHub Repository: probml/pyprobml
Path: blob/master/internal/add_readme.ipynb
1191 views
Kernel: Python [conda env:root] *
import re from glob import glob import requests import pandas as pd

Get chapter names

chap_names = {} for chap_no in range(1,24): suppl = f"https://raw.githubusercontent.com/probml/pml-book/main/pml1/supplements/chap{chap_no}.md" text = requests.get(suppl).text names = re.findall(r"Chapter.+?[(](.+)[)]",text) chap_names[chap_no] = names[0] print(chap_no, names)
1 ['Introduction'] 2 ['Probability: univariate models'] 3 ['Probability: multivariate models'] 4 ['Statistics'] 5 ['Decision theory'] 6 ['Information theory'] 7 ['Linear algebra'] 8 ['Optimization'] 9 ['Linear discriminant analysis'] 10 ['Logistic regression'] 11 ['Linear regression'] 12 ['Generalized linear models'] 13 ['Neural networks for unstructured data'] 14 ['Neural networks for images'] 15 ['Neural networks for sequences'] 16 ['Exemplar-based methods'] 17 ['Kernel methods'] 18 ['Trees'] 19 ['Learning with fewer labeled examples'] 20 ['Dimensionality reduction'] 21 ['Clustering'] 22 ['Recommender systems'] 23 ['Graph embeddings']
df = pd.DataFrame(chap_names.items(), columns=["chap_no","chap_name"]) df
df.to_csv("chapter_no_to_name_mapping.csv", index=None)

Create a Readme.md

content = ''' # "Probabilistic Machine Learning: An Introduction" ## Chapters |Chapter|Name| Notebooks| |-|-|-| ''' for chap_no in range(1,24): chap_url = f"https://github.com/probml/pyprobml/tree/master/notebooks/book1/{chap_no:02d}" content+=f"| {chap_no} | {chap_names[chap_no]} | [{chap_no:02d}/]({chap_no:02d}/) |\n" content
'\n# "Probabilistic Machine Learning: An Introduction"\n\n## Chapters\n|Chapter|Name| Notebooks|\n|-|-|-|\n| 1 | Introduction | [01/](01/) |\n| 2 | Probability: univariate models | [02/](02/) |\n| 3 | Probability: multivariate models | [03/](03/) |\n| 4 | Statistics | [04/](04/) |\n| 5 | Decision theory | [05/](05/) |\n| 6 | Information theory | [06/](06/) |\n| 7 | Linear algebra | [07/](07/) |\n| 8 | Optimization | [08/](08/) |\n| 9 | Linear discriminant analysis | [09/](09/) |\n| 10 | Logistic regression | [10/](10/) |\n| 11 | Linear regression | [11/](11/) |\n| 12 | Generalized linear models | [12/](12/) |\n| 13 | Neural networks for unstructured data | [13/](13/) |\n| 14 | Neural networks for images | [14/](14/) |\n| 15 | Neural networks for sequences | [15/](15/) |\n| 16 | Exemplar-based methods | [16/](16/) |\n| 17 | Kernel methods | [17/](17/) |\n| 18 | Trees | [18/](18/) |\n| 19 | Learning with fewer labeled examples | [19/](19/) |\n| 20 | Dimensionality reduction | [20/](20/) |\n| 21 | Clustering | [21/](21/) |\n| 22 | Recommender systems | [22/](22/) |\n| 23 | Graph embeddings | [23/](23/) |\n'
readme_file = "../notebooks/book1/README.md" with open(readme_file,"w") as fp: fp.write(content)