import argparse
import datetime
import logging
import os
import pickle
import re
import subprocess
import sys
import urllib
from google.auth.transport.requests import Request
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
HOME_DIR = os.path.expanduser('~')
SCRIPT_DIR = sys.path[0]
ROOT_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, '..'))
LOGGER = logging.getLogger('generate_stats')
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
BOT_NAMES = [
'mac-angle-amd',
'mac-angle-intel',
'win10-angle-x64-nvidia',
'win10-angle-x64-intel',
'win7-angle-x64-nvidia',
'win7-angle-x86-amd',
'Linux FYI dEQP Release (Intel HD 630)',
'Linux FYI dEQP Release (NVIDIA)',
'Android FYI dEQP Release (Nexus 5X)',
'Android FYI 32 dEQP Vk Release (Pixel 2)',
'Android FYI 64 dEQP Vk Release (Pixel 2)',
]
BOT_NAME_PREFIX = 'chromium/ci/'
BUILD_LINK_PREFIX = 'https://ci.chromium.org/p/chromium/builders/ci/'
REQUIRED_COLUMNS = ['build_link', 'time', 'date', 'revision', 'angle_revision', 'duplicate']
MAIN_RESULT_COLUMNS = ['Passed', 'Failed', 'Skipped', 'Not Supported', 'Exception', 'Crashed']
INFO_TAG = '*RESULT'
WORKAROUND_FORMATTING_ERROR_STRING = "Still waiting for the following processes to finish:"
def get_latest_success_build_info(bot_name):
bb = subprocess.Popen(['bb', 'ls', bot_name, '-n', '1', '-status', 'success', '-p'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOGGER.debug("Ran [bb ls '" + bot_name + "' -n 1 -status success -p]")
out, err = bb.communicate()
if err:
raise ValueError("Unexpected error from bb ls: '" + err + "'")
if not out:
raise ValueError("Unexpected empty result from bb ls of bot '" + bot_name + "'")
if 'SUCCESS' not in out:
raise ValueError("Unexpected result from bb ls: '" + out + "'")
info = {}
for line in out.splitlines():
if 'build_name' not in info:
info['build_name'] = line.strip().split("'")[1]
info['build_link'] = BUILD_LINK_PREFIX + urllib.quote(
info['build_name'].split(BOT_NAME_PREFIX)[1])
if 'Created' in line:
info['time'] = re.findall(r'[0-9]{1,2}:[0-9]{2}:[0-9]{2}', line.split(',', 1)[0])[0]
info['date'] = datetime.datetime.now().strftime('%m/%d/%y')
if 'got_angle_revision' in line:
info['angle_revision'] = filter(str.isalnum, line.split(':')[1])
if '"revision"' in line:
info['revision'] = filter(str.isalnum, line.split(':')[1])
if 'build_name' not in info:
raise ValueError("Could not find build_name from bot '" + bot_name + "'")
return info
def get_step_names(build_name):
bb = subprocess.Popen(['bb', 'get', build_name, '-steps'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOGGER.debug("Ran [bb get '" + build_name + "' -steps]")
out, err = bb.communicate()
if err:
raise ValueError("Unexpected error from bb get: '" + err + "'")
step_names = []
for line in out.splitlines():
if 'Step "angle_' not in line:
continue
step_names.append(line.split('"')[1])
return step_names
def validate_step_info(step_info, build_name, step_name):
print_name = "'" + build_name + "': '" + step_name + "'"
if not step_info:
LOGGER.warning('Step info empty for ' + print_name + '\n')
return False
if 'Total' in step_info:
partial_sum_keys = MAIN_RESULT_COLUMNS
partial_sum_values = [int(step_info[key]) for key in partial_sum_keys if key in step_info]
computed_total = sum(partial_sum_values)
if step_info['Total'] != computed_total:
LOGGER.warning('Step info does not sum to total for ' + print_name + ' | Total: ' +
str(step_info['Total']) + ' - Computed total: ' + str(computed_total) +
'\n')
return True
def get_step_info(build_name, step_name):
bb = subprocess.Popen(['bb', 'log', build_name, step_name],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOGGER.debug("Ran [bb log '" + build_name + "' '" + step_name + "']")
out, err = bb.communicate()
if err:
LOGGER.warning("Unexpected error from bb log '" + build_name + "' '" + step_name + "': '" +
err + "'")
return None
step_info = {}
append_errors = []
workaround_prev_line = ""
workaround_prev_line_count = 0
for line in out.splitlines():
if workaround_prev_line_count > 0:
workaround_prev_line_count -= 1
continue
elif workaround_prev_line != "":
line = workaround_prev_line + line
workaround_prev_line = ""
workaround_prev_line_count = 0
LOGGER.debug("Formatting error workaround rebuilt line as: '" + line + "'\n")
if INFO_TAG not in line:
continue
if WORKAROUND_FORMATTING_ERROR_STRING in line:
workaround_prev_line = line.split(WORKAROUND_FORMATTING_ERROR_STRING)[0]
workaround_prev_line_count = 1
continue
found_stat = True
line_columns = line.split(INFO_TAG, 1)[1].split(':')
if len(line_columns) is not 3:
LOGGER.warning("Line improperly formatted: '" + line + "'\n")
continue
key = line_columns[1].strip()
isInt = False
intVal = 0
try:
intVal = int(line_columns[2])
if intVal is not None:
isInt = True
except Exception as error:
isInt = False
if isInt:
if key not in step_info:
step_info[key] = 0
step_info[key] += intVal
else:
if key not in step_info:
step_info[key] = line_columns[2].strip()
else:
append_string = '\n' + line_columns[2].strip()
if len(step_info[key]) + len(append_string) < 50000:
step_info[key] += append_string
else:
if key not in append_errors:
append_errors.append(key)
LOGGER.warning("Too many characters in column '" + key +
"'. Output capped.")
return step_info
def get_bot_info(bot_name):
info = get_latest_success_build_info(bot_name)
info['step_names'] = get_step_names(info['build_name'])
broken_step_names = []
for step_name in info['step_names']:
LOGGER.info("Parsing step '" + step_name + "'...")
step_info = get_step_info(info['build_name'], step_name)
if validate_step_info(step_info, info['build_name'], step_name):
info[step_name] = step_info
else:
broken_step_names += step_name
for step_name in broken_step_names:
info['step_names'].remove(step_name)
return info
def get_spreadsheet(service, spreadsheet_id):
LOGGER.debug("Called [spreadsheets.get(spreadsheetId='" + spreadsheet_id + "')]")
request = service.get(spreadsheetId=spreadsheet_id)
spreadsheet = request.execute()
if not spreadsheet:
raise Exception("Did not open spreadsheet '" + spreadsheet_id + "'")
return spreadsheet
def format_sheet_name(bot_name, step_name):
unneccesary_tokens = ['FYI', 'Release', 'Vk', 'dEQP', '(', ')']
for token in unneccesary_tokens:
bot_name = bot_name.replace(token, '')
bot_name = ' '.join(bot_name.strip().split())
step_name = re.findall(r'angle\w*', step_name)[0]
new_step_name = ''
if '_egl_' in step_name:
step_name = step_name.replace('_egl_', '_')
new_step_name += ' EGL'
if '_gles2_' in step_name:
step_name = step_name.replace('_gles2_', '_')
new_step_name += ' GLES 2.0 '
if '_gles3_' in step_name:
step_name = step_name.replace('_gles3_', '_')
new_step_name += ' GLES 3.0 '
if '_gles31_' in step_name:
step_name = step_name.replace('_gles31_', '_')
new_step_name += ' GLES 3.1 '
if '_d3d9_' in step_name:
step_name = step_name.replace('_d3d9_', '_')
new_step_name += ' D3D9 '
if '_d3d11' in step_name:
step_name = step_name.replace('_d3d11_', '_')
new_step_name += ' D3D11 '
if '_gl_' in step_name:
step_name = step_name.replace('_gl_', '_')
new_step_name += ' Desktop OpenGL '
if '_gles_' in step_name:
step_name = step_name.replace('_gles_', '_')
new_step_name += ' OpenGLES '
if '_vulkan_' in step_name:
step_name = step_name.replace('_vulkan_', '_')
new_step_name += ' Vulkan '
step_name = step_name.replace('angle_', '_')
step_name = step_name.replace('_deqp_', '_')
step_name = step_name.replace('_tests', '_')
step_name = step_name.replace('_', ' ').strip()
new_step_name += ' ' + step_name
new_step_name = ' '.join(new_step_name.strip().split())
return new_step_name + ' ' + bot_name
def get_sheet_names(info):
sheet_names = []
for bot_name in info:
for step_name in info[bot_name]['step_names']:
sheet_name = format_sheet_name(bot_name, step_name)
sheet_names.append(sheet_name)
return sheet_names
def sheet_exists(spreadsheet, step_name):
for sheet in spreadsheet['sheets']:
if sheet['properties']['title'] == step_name:
return True
return False
def validate_sheets(spreadsheet, sheet_names):
create_sheets = []
for sheet_name in sheet_names:
if not sheet_exists(spreadsheet, sheet_name):
create_sheets.append(sheet_name)
return create_sheets
def batch_update(service, spreadsheet_id, updates):
batch_update_request_body = {
'requests': updates,
}
LOGGER.debug("Called [spreadsheets.batchUpdate(spreadsheetId='" + spreadsheet_id + "', body=" +
str(batch_update_request_body) + ')]')
request = service.batchUpdate(spreadsheetId=spreadsheet_id, body=batch_update_request_body)
request.execute()
def create_sheets(service, spreadsheet_id, sheet_names):
updates = [{'addSheet': {'properties': {'title': sheet_name,}}} for sheet_name in sheet_names]
batch_update(service, spreadsheet_id, updates)
def get_headers(service, spreadsheet_id, sheet_names):
header_ranges = [sheet_name + '!A1:Z' for sheet_name in sheet_names]
LOGGER.debug("Called [spreadsheets.values().batchGet(spreadsheetId='" + spreadsheet_id +
', ranges=' + str(header_ranges) + "')]")
request = service.values().batchGet(spreadsheetId=spreadsheet_id, ranges=header_ranges)
response = request.execute()
headers = {}
for k, sheet_name in enumerate(sheet_names):
if 'values' in response['valueRanges'][k]:
headers[sheet_name] = response['valueRanges'][k]['values'][0]
else:
headers[sheet_name] = []
return headers
def batch_update_values(service, spreadsheet_id, data):
batch_update_values_request_body = {
'valueInputOption': 'USER_ENTERED',
'data': data,
}
LOGGER.debug("Called [spreadsheets.values().batchUpdate(spreadsheetId='" + spreadsheet_id +
"', body=" + str(batch_update_values_request_body) + ')]')
request = service.values().batchUpdate(
spreadsheetId=spreadsheet_id, body=batch_update_values_request_body)
request.execute()
def get_sheet_id(spreadsheet, sheet_name):
for sheet in spreadsheet['sheets']:
if sheet['properties']['title'] == sheet_name:
return sheet['properties']['sheetId']
return -1
def update_filters(service, spreadsheet_id, headers, info, spreadsheet):
updates = []
for bot_name in info:
for step_name in info[bot_name]['step_names']:
sheet_name = format_sheet_name(bot_name, step_name)
duplicate_found = 'duplicate' in headers[sheet_name]
if duplicate_found:
sheet_id = get_sheet_id(spreadsheet, sheet_name)
if sheet_id > -1:
updates.append({
"setBasicFilter": {
"filter": {
"range": {
"sheetId": sheet_id,
"startColumnIndex": 0,
"endColumnIndex": len(headers[sheet_name])
},
"sortSpecs": [{
"dimensionIndex": headers[sheet_name].index('date'),
"sortOrder": "ASCENDING"
}],
"criteria": {
str(headers[sheet_name].index('duplicate')): {
"hiddenValues":
["1"]
}
}
}
}
})
if updates:
LOGGER.info('Updating sheet filters...')
batch_update(service, spreadsheet_id, updates)
def update_headers(service, spreadsheet_id, headers, info):
data = []
sheet_names = []
for bot_name in info:
for step_name in info[bot_name]['step_names']:
if not step_name in info[bot_name]:
LOGGER.error("Missing info for step name: '" + step_name + "'")
sheet_name = format_sheet_name(bot_name, step_name)
headers_stale = False
for req in REQUIRED_COLUMNS:
if req not in headers[sheet_name]:
headers_stale = True
headers[sheet_name].append(req)
for key in info[bot_name][step_name].keys():
if key not in headers[sheet_name]:
headers_stale = True
headers[sheet_name].append(key)
if headers_stale:
sheet_names.append(sheet_name)
header_range = sheet_name + '!A1:Z'
data.append({
'range': header_range,
'majorDimension': 'ROWS',
'values': [headers[sheet_name]]
})
if data:
LOGGER.info('Updating sheet headers...')
batch_update_values(service, spreadsheet_id, data)
def append_values(service, spreadsheet_id, sheet_name, values):
header_range = sheet_name + '!A1:Z'
insert_data_option = 'INSERT_ROWS'
value_input_option = 'USER_ENTERED'
append_values_request_body = {
'range': header_range,
'majorDimension': 'ROWS',
'values': [values],
}
LOGGER.debug("Called [spreadsheets.values().append(spreadsheetId='" + spreadsheet_id +
"', body=" + str(append_values_request_body) + ", range='" + header_range +
"', insertDataOption='" + insert_data_option + "', valueInputOption='" +
value_input_option + "')]")
request = service.values().append(
spreadsheetId=spreadsheet_id,
body=append_values_request_body,
range=header_range,
insertDataOption=insert_data_option,
valueInputOption=value_input_option)
request.execute()
def generate_duplicate_formula(headers, filter_columns):
if len(filter_columns) == 0:
return '1'
for i in range(len(headers)):
if headers[i] == filter_columns[0]:
col = str(i + 1)
formula = "IF(INDIRECT(ADDRESS(ROW(), " + col + "))=INDIRECT(ADDRESS(ROW() - 1, " + \
col + "))," + generate_duplicate_formula(headers, filter_columns[1:]) + ",0)"
return formula
return generate_duplicate_formula(headers, filter_columns[1:])
def generate_duplicate_formula_helper(headers):
filter_columns = MAIN_RESULT_COLUMNS
formula = generate_duplicate_formula(headers, filter_columns)
if (formula == "1"):
return ""
else:
return "=" + formula
def update_values(service, spreadsheet_id, headers, info):
data = []
for bot_name in info:
for step_name in info[bot_name]['step_names']:
sheet_name = format_sheet_name(bot_name, step_name)
values = []
for key in headers[sheet_name]:
if key in info[bot_name] and key in REQUIRED_COLUMNS:
values.append(info[bot_name][key])
elif key in info[bot_name][step_name]:
values.append(info[bot_name][step_name][key])
elif key == "duplicate" and key in REQUIRED_COLUMNS:
values.append(generate_duplicate_formula_helper(headers[sheet_name]))
else:
values.append('')
LOGGER.info("Appending new rows to sheet '" + sheet_name + "'...")
try:
append_values(service, spreadsheet_id, sheet_name, values)
except Exception as error:
LOGGER.warning('%s\n' % str(error))
def update_spreadsheet(service, spreadsheet_id, info):
LOGGER.info('Opening spreadsheet...')
spreadsheet = get_spreadsheet(service, spreadsheet_id)
LOGGER.info('Parsing sheet names...')
sheet_names = get_sheet_names(info)
new_sheets = validate_sheets(spreadsheet, sheet_names)
if new_sheets:
LOGGER.info('Creating new sheets...')
create_sheets(service, spreadsheet_id, new_sheets)
LOGGER.info('Parsing sheet headers...')
headers = get_headers(service, spreadsheet_id, sheet_names)
update_headers(service, spreadsheet_id, headers, info)
update_filters(service, spreadsheet_id, headers, info, spreadsheet)
update_values(service, spreadsheet_id, headers, info)
def get_sheets_service(auth_path):
credentials_path = auth_path + '/credentials.json'
token_path = auth_path + '/token.pickle'
creds = None
if not os.path.exists(auth_path):
LOGGER.info("Creating auth dir '" + auth_path + "'")
os.makedirs(auth_path)
if not os.path.exists(credentials_path):
raise Exception('Missing credentials.json.\n'
'Go to: https://developers.google.com/sheets/api/quickstart/python\n'
"Under Step 1, click 'ENABLE THE GOOGLE SHEETS API'\n"
"Click 'DOWNLOAD CLIENT CONFIGURATION'\n"
'Save to your auth_path (' + auth_path + ') as credentials.json')
if os.path.exists(token_path):
with open(token_path, 'rb') as token:
creds = pickle.load(token)
LOGGER.info('Loaded credentials from ' + token_path)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
LOGGER.info('Refreshing credentials...')
creds.refresh(Request())
else:
LOGGER.info('Could not find credentials. Requesting new credentials.')
flow = InstalledAppFlow.from_client_secrets_file(credentials_path, SCOPES)
creds = flow.run_local_server()
with open(token_path, 'wb') as token:
pickle.dump(creds, token)
service = build('sheets', 'v4', credentials=creds)
sheets = service.spreadsheets()
return sheets
def parse_args():
parser = argparse.ArgumentParser(os.path.basename(sys.argv[0]))
parser.add_argument(
'--auth_path',
default=HOME_DIR + '/.auth',
nargs='?',
help='path to directory containing authorization data '
'(credentials.json and token.pickle). '
'[default=<home>/.auth]')
parser.add_argument(
'--spreadsheet',
default='1uttk1z8lJ4ZsUY7wMdFauMzUxb048nh5l52zdrAznek',
nargs='?',
help='ID of the spreadsheet to write stats to. '
"[default='1uttk1z8lJ4ZsUY7wMdFauMzUxb048nh5l52zdrAznek']")
parser.add_argument(
'--verbosity',
default='INFO',
nargs='?',
help='Verbosity of output. Valid options are '
'[DEBUG, INFO, WARNING, ERROR]. '
'[default=INFO]')
return parser.parse_args()
def initialize_logging(verbosity):
handler = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(levelname)s: %(message)s')
handler.setFormatter(formatter)
LOGGER.addHandler(handler)
if 'DEBUG' in verbosity:
LOGGER.setLevel(level=logging.DEBUG)
elif 'INFO' in verbosity:
LOGGER.setLevel(level=logging.INFO)
elif 'WARNING' in verbosity:
LOGGER.setLevel(level=logging.WARNING)
elif 'ERROR' in verbosity:
LOGGER.setLevel(level=logging.ERROR)
else:
LOGGER.setLevel(level=logging.INFO)
def main():
os.chdir(ROOT_DIR)
args = parse_args()
verbosity = args.verbosity.strip().upper()
initialize_logging(verbosity)
auth_path = args.auth_path.replace('\\', '/')
try:
service = get_sheets_service(auth_path)
except Exception as error:
LOGGER.error('%s\n' % str(error))
exit(1)
info = {}
LOGGER.info('Building info struct...')
for bot_name in BOT_NAMES:
LOGGER.info("Parsing bot '" + bot_name + "'...")
try:
info[bot_name] = get_bot_info(BOT_NAME_PREFIX + bot_name)
except Exception as error:
LOGGER.error('%s\n' % str(error))
LOGGER.info('Updating sheets...')
try:
update_spreadsheet(service, args.spreadsheet, info)
except Exception as error:
LOGGER.error('%s\n' % str(error))
quit(1)
LOGGER.info('Info was successfully parsed to sheet: https://docs.google.com/spreadsheets/d/' +
args.spreadsheet)
if __name__ == '__main__':
sys.exit(main())