Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
GRAAL-Research
GitHub Repository: GRAAL-Research/deepparse
Path: blob/main/examples/fine_tuning_with_csv_dataset.py
1232 views
1
# pylint: skip-file
2
###################
3
"""
4
IMPORTANT:
5
THE EXAMPLE IN THIS FILE IS CURRENTLY NOT FUNCTIONAL
6
BECAUSE THE `download_from_public_repository` FUNCTION
7
NO LONGER EXISTS. WE HAD TO MAKE A QUICK RELEASE TO
8
REMEDIATE AN ISSUE IN OUR PREVIOUS STORAGE SOLUTION.
9
THIS WILL BE FIXED IN A FUTURE RELEASE.
10
11
IN THE MEAN TIME IF YOU NEED ANY CLARIFICATION
12
REGARDING THE PACKAGE PLEASE FEEL FREE TO OPEN AN ISSUE.
13
"""
14
import os
15
16
import poutyne
17
18
from deepparse import download_from_public_repository
19
from deepparse.dataset_container import CSVDatasetContainer
20
from deepparse.parser import AddressParser
21
22
# First, let's download the train and test data from the public repository but using a CSV format dataset.
23
saving_dir = "./data"
24
file_extension = "csv"
25
training_dataset_name = "sample_incomplete_data"
26
test_dataset_name = "test_sample_data"
27
download_from_public_repository(training_dataset_name, saving_dir, file_extension=file_extension)
28
download_from_public_repository(test_dataset_name, saving_dir, file_extension=file_extension)
29
30
# Now let's create a training and test container.
31
training_container = CSVDatasetContainer(
32
os.path.join(saving_dir, training_dataset_name + "." + file_extension),
33
column_names=["Address", "Tags"],
34
separator=",",
35
)
36
test_container = CSVDatasetContainer(
37
os.path.join(saving_dir, test_dataset_name + "." + file_extension),
38
column_names=["Address", "Tags"],
39
separator=",",
40
)
41
42
# We will retrain the FastText version of our pretrained model.
43
address_parser = AddressParser(model_type="fasttext", device=0)
44
45
# Now, let's retrain for 5 epochs using a batch size of 8 since the data is really small for the example.
46
# Let's start with the default learning rate of 0.01 and use a learning rate scheduler to lower the learning rate
47
# as we progress.
48
lr_scheduler = poutyne.StepLR(step_size=1, gamma=0.1) # reduce LR by a factor of 10 each epoch
49
50
# The checkpoints (ckpt) are saved in the default "./checkpoints" directory, so if you wish to retrain
51
# another model (let's say BPEmb), you need to change the `logging_path` directory; otherwise, you will get
52
# an error when retraining since Poutyne will try to use the last checkpoint.
53
address_parser.retrain(
54
training_container,
55
train_ratio=0.8,
56
epochs=5,
57
batch_size=8,
58
num_workers=2,
59
callbacks=[lr_scheduler],
60
)
61
62
# Now, let's test our fine-tuned model using the best checkpoint (default parameter).
63
address_parser.test(test_container, batch_size=256)
64
65
# Now let's retrain the FastText version but with an attention mechanism.
66
address_parser = AddressParser(model_type="fasttext", device=0, attention_mechanism=True)
67
68
# Since the previous checkpoints were saved in the default "./checkpoints" directory, we need to use a new one.
69
# Otherwise, poutyne will try to reload the previous checkpoints, and our model has changed.
70
address_parser.retrain(
71
training_container,
72
train_ratio=0.8,
73
epochs=5,
74
batch_size=8,
75
num_workers=2,
76
callbacks=[lr_scheduler],
77
logging_path="checkpoints_attention",
78
)
79
80
# Now, let's test our fine-tuned model using the best checkpoint (default parameter).
81
address_parser.test(test_container, batch_size=256)
82
83