Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
S2-group
GitHub Repository: S2-group/android-runner
Path: blob/master/AndroidRunner/Plugins/trepn/Trepn.py
621 views
1
import csv
2
import errno
3
import json
4
import os
5
import os.path as op
6
import time
7
from collections import OrderedDict
8
9
import lxml.etree as et
10
from lxml.etree import ElementTree
11
12
from AndroidRunner.Plugins.Profiler import Profiler
13
from functools import reduce
14
from AndroidRunner import util
15
16
17
class Trepn(Profiler):
18
DEVICE_PATH = '/sdcard/trepn/'
19
20
def dependencies(self):
21
return ['com.quicinc.trepn']
22
23
def __init__(self, config, paths):
24
super(Trepn, self).__init__(config, paths)
25
self.output_dir = ''
26
self.paths = paths
27
self.pref_dir = None
28
self.remote_pref_dir = op.join(Trepn.DEVICE_PATH, 'saved_preferences/')
29
self.data_points = []
30
self.build_preferences(config)
31
32
def override_preferences(self, params: OrderedDict, preferences_file: ElementTree) -> ElementTree:
33
"""Read the preferences XML file and override configurations provided by the user"""
34
# Parse XML preferences only if the user is overriding a preference
35
if 'preferences' not in params:
36
return preferences_file
37
38
# Parse all preferences in the XML file
39
for xml_preference in preferences_file.getroot().iter():
40
# Verifies if this XML contains a preference
41
xml_preference_name = xml_preference.get('name')
42
if xml_preference_name is not None:
43
# Verifies if the user configuration file is overriding this preference
44
xml_preference_override_name = xml_preference.get('name').rsplit('.', 1)[1]
45
if xml_preference_override_name in params['preferences']:
46
# Replace the default preference with the configuration provided by the user
47
preference_value = str(params['preferences'][xml_preference_override_name])
48
if xml_preference.tag == 'string':
49
xml_preference.text = preference_value
50
else:
51
xml_preference.set('value', preference_value)
52
return preferences_file
53
54
def build_preferences(self, params):
55
"""Build the XML files to setup Trepn and the data points"""
56
current_dir = op.dirname(op.realpath(__file__))
57
# lxml is not the most secure parser, it is up to the user for valid configurations
58
# https://docs.python.org/2/library/xml.html#xml-vulnerabilities
59
self.pref_dir = op.join(self.paths['OUTPUT_DIR'], 'trepn.pref/')
60
util.makedirs(self.pref_dir)
61
62
preferences_file = et.parse(op.join(current_dir, 'preferences.xml'))
63
preferences_file = self.override_preferences(params, preferences_file)
64
preferences_file.write(op.join(self.pref_dir, 'com.quicinc.trepn_preferences.xml'), encoding='utf-8',
65
xml_declaration=True, standalone=True)
66
datapoints_file = et.parse(op.join(current_dir, 'data_points.xml'))
67
dp_root = datapoints_file.getroot()
68
data_points_dict = util.load_json(op.join(current_dir, 'data_points.json'))
69
for dp in params['data_points']:
70
dp = str(data_points_dict[dp])
71
self.data_points.append(dp)
72
dp_root.append(et.Element('int', {'name': dp, 'value': dp}))
73
datapoints_file.write(op.join(self.pref_dir, 'com.quicinc.preferences.saved_data_points.xml'), encoding='utf-8',
74
xml_declaration=True, standalone=True)
75
76
def load(self, device):
77
device.push(self.pref_dir, self.remote_pref_dir)
78
# There is no way to know if the following succeeded
79
device.launch_package('com.quicinc.trepn')
80
time.sleep(5) # launch_package returns instantly
81
# Trepn needs to be started for this to work
82
device.shell('am broadcast -a com.quicinc.trepn.load_preferences '
83
'-e com.quicinc.trepn.load_preferences_file "%s"'
84
% op.join(self.remote_pref_dir, 'trepn.pref'))
85
time.sleep(1) # am broadcast returns instantly
86
device.force_stop('com.quicinc.trepn')
87
time.sleep(2) # am force-stop returns instantly
88
device.shell('am startservice com.quicinc.trepn/.TrepnService')
89
90
def start_profiling(self, device, **kwargs):
91
device.shell('am broadcast -a com.quicinc.trepn.start_profiling')
92
93
def stop_profiling(self, device, **kwargs):
94
device.shell('am broadcast -a com.quicinc.trepn.stop_profiling')
95
96
def file_exists_and_not_empty(self, device, path, csv_filename):
97
""" Checks whether the file <csv_filename> exists on the device <device> in the folder <path>
98
and that the file is not empty.
99
100
101
Parameters
102
----------
103
device : Device
104
Device on which we want to check whether the file exists.
105
path : string, bytes
106
A string or bytes object representing a folder on the device.
107
csv_filename : string
108
The file
109
110
Returns
111
-------
112
bool
113
Whether the file exists and is not empty on the device.
114
"""
115
ls = device.shell(f"ls {path}")
116
cat = device.shell(f"cat {os.path.join(path, csv_filename)}")
117
118
return (csv_filename in ls) and bool(cat)
119
120
def collect_results(self, device):
121
# Gives the latest result
122
db = device.shell(r'ls %s | grep "\.db$"' % Trepn.DEVICE_PATH).strip().splitlines()
123
newest_db = db[len(db) - 1]
124
csv_filename = '%s_%s.csv' % (util.slugify(device.id), op.splitext(newest_db)[0])
125
if newest_db:
126
device.shell('am broadcast -a com.quicinc.trepn.export_to_csv '
127
'-e com.quicinc.trepn.export_db_input_file "%s" '
128
'-e com.quicinc.trepn.export_csv_output_file "%s"' % (newest_db, csv_filename))
129
130
# adb returns instantly, while the command takes time so we wait till Trepn converted the databsae to a
131
# csv file on the mobile device.
132
util.wait_until(self.file_exists_and_not_empty, 5, 1, device, Trepn.DEVICE_PATH, csv_filename)
133
134
device.pull(op.join(Trepn.DEVICE_PATH, csv_filename), self.output_dir)
135
136
# adb returns instantly, while the command takes time so we wait till the files are transferred from the
137
# device to the host.
138
util.wait_until(os.path.exists, 5, 1, op.join(self.output_dir, csv_filename))
139
140
# Delete the originals
141
device.shell('rm %s' % op.join(Trepn.DEVICE_PATH, newest_db))
142
device.shell('rm %s' % op.join(Trepn.DEVICE_PATH, csv_filename))
143
self.filter_results(op.join(self.output_dir, csv_filename))
144
145
@staticmethod
146
def read_csv(filename):
147
result = []
148
with open(filename, mode='r') as csv_file:
149
csv_reader = csv.reader(csv_file)
150
for row in csv_reader:
151
result.append(row)
152
return result
153
154
def filter_results(self, filename):
155
file_content = self.read_csv(filename)[3:]
156
split_line = file_content.index(['System Statistics:'])
157
data = file_content[:split_line - 2]
158
system_statistics = file_content[split_line + 2:]
159
system_statistics_dict = {str(statistic[0]): statistic[1] for statistic in system_statistics if
160
not statistic == []}
161
wanted_statistics = [system_statistics_dict[data_point] for data_point in self.data_points]
162
filtered_data = self.filter_data(wanted_statistics, data)
163
self.write_list_to_file(filename, filtered_data)
164
165
@staticmethod
166
def write_list_to_file(filename, rows):
167
with open(filename, 'w') as f:
168
writer = csv.writer(f)
169
writer.writerows(rows)
170
171
def filter_data(self, wanted_statistics, data):
172
wanted_columns = self.get_wanted_columns(wanted_statistics, data[0])
173
filtered_data = self.filter_columns(wanted_columns, data)
174
return filtered_data
175
176
@staticmethod
177
def filter_columns(wanted_columns, data):
178
remaining_data = []
179
for row in data:
180
new_row = [row[column] for column in wanted_columns]
181
remaining_data.append(new_row)
182
return remaining_data
183
184
@staticmethod
185
def get_wanted_columns(statistics, header_row):
186
wanted_columns = []
187
last_time = None
188
for statistic in statistics:
189
last_time_added = False
190
for i in range(len(header_row)):
191
header_item = header_row[i].split('[')[0].strip()
192
if header_item == 'Time':
193
last_time = i
194
if header_item == statistic:
195
if not last_time_added:
196
wanted_columns.append(last_time)
197
last_time_added = True
198
wanted_columns.append(i)
199
wanted_columns.sort()
200
return wanted_columns
201
202
def unload(self, device):
203
device.shell('am stopservice com.quicinc.trepn/.TrepnService')
204
device.shell('rm -r %s' % op.join(self.remote_pref_dir, 'trepn.pref'))
205
206
def set_output(self, output_dir):
207
self.output_dir = output_dir
208
209
def aggregate_subject(self):
210
filename = os.path.join(self.output_dir, 'Aggregated.csv')
211
subject_rows = list()
212
subject_rows.append(self.aggregate_trepn_subject(self.output_dir))
213
util.write_to_file(filename, subject_rows)
214
215
def aggregate_end(self, data_dir, output_file):
216
rows = self.aggregate_final(data_dir)
217
util.write_to_file(output_file, rows)
218
219
def aggregate_trepn_subject(self, logs_dir):
220
def add_row(accum, new):
221
row = {key: value + float(new[key]) for key, value in list(accum.items()) if
222
key not in ['Component', 'count']}
223
count = accum['count'] + 1
224
return dict(row, **{'count': count})
225
226
runs = []
227
for run_file in [f for f in os.listdir(logs_dir) if os.path.isfile(os.path.join(logs_dir, f))]:
228
with open(os.path.join(logs_dir, run_file), 'r') as run:
229
run_dict = {}
230
reader = csv.DictReader(run)
231
column_readers = self.split_reader(reader)
232
for k, v in list(column_readers.items()):
233
init = dict({k: 0}, **{'count': 0})
234
run_total = reduce(add_row, v, init)
235
if not run_total['count'] == 0:
236
run_dict[k] = run_total[k] / run_total['count']
237
runs.append(run_dict)
238
init = dict({fn: 0 for fn in list(runs[0].keys())}, **{'count': 0})
239
runs_total = reduce(add_row, runs, init)
240
return OrderedDict(
241
sorted(list({k: v / len(runs) for k, v in list(runs_total.items()) if not k == 'count'}.items()),
242
key=lambda x: x[0]))
243
244
@staticmethod
245
def split_reader(reader):
246
column_dicts = {fn: [] for fn in reader.fieldnames if not fn.split('[')[0].strip() == 'Time'}
247
for row in reader:
248
for k, v in list(row.items()):
249
if not k.split('[')[0].strip() == 'Time' and not v == '':
250
column_dicts[k].append({k: v})
251
return column_dicts
252
253
def aggregate_final(self, data_dir):
254
rows = []
255
for device in util.list_subdir(data_dir):
256
row = OrderedDict({'device': device})
257
device_dir = os.path.join(data_dir, device)
258
for subject in util.list_subdir(device_dir):
259
row.update({'subject': subject})
260
subject_dir = os.path.join(device_dir, subject)
261
if os.path.isdir(os.path.join(subject_dir, 'trepn')):
262
row.update(self.aggregate_trepn_final(os.path.join(subject_dir, 'trepn')))
263
rows.append(row.copy())
264
else:
265
for browser in util.list_subdir(subject_dir):
266
row.update({'browser': browser})
267
browser_dir = os.path.join(subject_dir, browser)
268
if os.path.isdir(os.path.join(browser_dir, 'trepn')):
269
row.update(self.aggregate_trepn_final(os.path.join(browser_dir, 'trepn')))
270
rows.append(row.copy())
271
return rows
272
273
@staticmethod
274
def aggregate_trepn_final(logs_dir):
275
for aggregated_file in [f for f in os.listdir(logs_dir) if os.path.isfile(os.path.join(logs_dir, f))]:
276
if aggregated_file == "Aggregated.csv":
277
with open(os.path.join(logs_dir, aggregated_file), 'r') as aggregated:
278
reader = csv.DictReader(aggregated)
279
row_dict = OrderedDict()
280
for row in reader:
281
for f in reader.fieldnames:
282
row_dict.update({f: row[f]})
283
return OrderedDict(row_dict)
284
285