Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
GRAAL-Research
GitHub Repository: GRAAL-Research/deepparse
Path: blob/main/models_evaluation/speed_test_evaluation.py
1232 views
1
import os.path
2
from statistics import mean
3
4
from memory_profiler import profile
5
6
from deepparse import download_from_public_repository
7
from deepparse.dataset_container import PickleDatasetContainer
8
from deepparse.parser import AddressParser
9
from models_evaluation.timer.timer import Timer
10
11
download_from_public_repository("speed_test_dataset", "./data", "p")
12
13
address_container = PickleDatasetContainer("./data/speed_test_dataset.p")
14
addresses, tags = zip(*address_container)
15
16
speed_test_directory = "results/speed_test_results"
17
os.makedirs(speed_test_directory, exist_ok=True)
18
19
20
@profile
21
def process_fn(batch_size_arg):
22
address_parser(addresses, batch_size=batch_size_arg)
23
24
25
if __name__ == '__main__':
26
for model in ["fasttext", "bpemb", "fasttext-light"]:
27
for attention_mechanism in [True, False]:
28
for device in [0, "cpu"]:
29
times = []
30
for batch_size in [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]:
31
address_parser = AddressParser(
32
model_type=model,
33
device=device,
34
attention_mechanism=attention_mechanism,
35
)
36
timer = Timer()
37
with timer:
38
process_fn()
39
with open(
40
os.path.join(
41
speed_test_directory,
42
f"speed_test_results_on_{device}_with_{model}_attention-{attention_mechanism}.txt",
43
),
44
"w",
45
) as file:
46
if batch_size == 1:
47
print(
48
"Temps moyen pour batch size avec ",
49
device,
50
"et batch size de ",
51
batch_size,
52
" : ",
53
timer.elapsed_time / len(addresses),
54
file=file,
55
)
56
if batch_size > 1:
57
times.append(timer.elapsed_time / len(addresses))
58
print(
59
"temps moyen pour batch size avec batch size > 1:",
60
mean(times),
61
file=file,
62
)
63
64