Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
GRAAL-Research
GitHub Repository: GRAAL-Research/deepparse
Path: blob/main/examples/fine_tuning_uri.py
1232 views
1
# pylint: skip-file
2
###################
3
"""
4
IMPORTANT:
5
THE EXAMPLE IN THIS FILE IS CURRENTLY NOT FUNCTIONAL
6
BECAUSE THE `download_from_public_repository` FUNCTION
7
NO LONGER EXISTS. WE HAD TO MAKE A QUICK RELEASE TO
8
REMEDIATE AN ISSUE IN OUR PREVIOUS STORAGE SOLUTION.
9
THIS WILL BE FIXED IN A FUTURE RELEASE.
10
11
IN THE MEAN TIME IF YOU NEED ANY CLARIFICATION
12
REGARDING THE PACKAGE PLEASE FEEL FREE TO OPEN AN ISSUE.
13
"""
14
import os
15
16
import poutyne
17
18
from deepparse import download_from_public_repository
19
from deepparse.dataset_container import PickleDatasetContainer
20
from deepparse.parser import AddressParser
21
22
# First, let's download the train and test data from the public repository.
23
saving_dir = "./data"
24
file_extension = "p"
25
training_dataset_name = "sample_incomplete_data"
26
test_dataset_name = "test_sample_data"
27
download_from_public_repository(training_dataset_name, saving_dir, file_extension=file_extension)
28
download_from_public_repository(test_dataset_name, saving_dir, file_extension=file_extension)
29
30
# Now let's create a training and test container.
31
training_container = PickleDatasetContainer(os.path.join(saving_dir, training_dataset_name + "." + file_extension))
32
test_container = PickleDatasetContainer(os.path.join(saving_dir, test_dataset_name + "." + file_extension))
33
34
# We will retrain the FastText version of our pretrained model.
35
path_to_your_uri = "s3://<path_to_your_bucket>/fasttext.ckpt"
36
address_parser = AddressParser(model_type="fasttext", device=0, path_to_retrained_model=path_to_your_uri)
37
38
# Now, let's retrain for 5 epochs using a batch size of 8 since the data is really small for the example.
39
# Let's start with the default learning rate of 0.01 and use a learning rate scheduler to lower the learning rate
40
# as we progress.
41
lr_scheduler = poutyne.StepLR(step_size=1, gamma=0.1) # reduce LR by a factor of 10 each epoch
42
43
# The retrained model best checkpoint (ckpt) will be saved in the S3 Bucket <path_to_your_bucket.
44
address_parser.retrain(
45
training_container,
46
logging_path="s3://<path_to_your_bucket/",
47
train_ratio=0.8,
48
epochs=5,
49
batch_size=8,
50
num_workers=2,
51
callbacks=[lr_scheduler],
52
)
53
54
# Now, let's test our fine-tuned model using the best checkpoint (default parameter).
55
address_parser.test(test_container, batch_size=256)
56
57