testing infrastructure, PR #2195: Add aggregated docker-ci email report

This commit is contained in:
Daniel Mizyrycki 2013-10-16 13:26:39 -07:00
parent 6e48b73955
commit a2ffa637ce
3 changed files with 303 additions and 0 deletions

View file

@ -0,0 +1,28 @@
# VERSION: 0.22
# DOCKER-VERSION 0.6.3
# AUTHOR: Daniel Mizyrycki <daniel@dotcloud.com>
# DESCRIPTION: Generate docker-ci daily report
# COMMENTS: The build process is initiated by deployment.py
Report configuration is passed through ./credentials.json at
# deployment time.
# TO_BUILD: docker build -t report .
# TO_DEPLOY: docker run report
from ubuntu:12.04
maintainer Daniel Mizyrycki <daniel@dotcloud.com>
env PYTHONPATH /report
# Add report dependencies
run echo 'deb http://archive.ubuntu.com/ubuntu precise main universe' > \
/etc/apt/sources.list
run apt-get update; apt-get install -y python2.7 python-pip ssh rsync
# Set San Francisco timezone
run echo "America/Los_Angeles" >/etc/timezone
run dpkg-reconfigure --frontend noninteractive tzdata
# Add report code and set default container command
add . /report
cmd "/report/report.py"

View file

@ -0,0 +1,130 @@
#!/usr/bin/env python
'''Deploy docker-ci report container on Digital Ocean.
Usage:
export CONFIG_JSON='
{ "DROPLET_NAME": "Digital_Ocean_dropplet_name",
"DO_CLIENT_ID": "Digital_Ocean_client_id",
"DO_API_KEY": "Digital_Ocean_api_key",
"DOCKER_KEY_ID": "Digital_Ocean_ssh_key_id",
"DOCKER_CI_KEY_PATH": "docker-ci_private_key_path",
"DOCKER_CI_PUB": "$(cat docker-ci_ssh_public_key.pub)",
"DOCKER_CI_ADDRESS" "user@docker-ci_fqdn_server",
"SMTP_USER": "SMTP_server_user",
"SMTP_PWD": "SMTP_server_password",
"EMAIL_SENDER": "Buildbot_mailing_sender",
"EMAIL_RCP": "Buildbot_mailing_receipient" }'
python deployment.py
'''
import re, json, requests, base64
from fabric import api
from fabric.api import cd, run, put, sudo
from os import environ as env
from time import sleep
from datetime import datetime
# Populate environment variables
CONFIG = json.loads(env['CONFIG_JSON'])
for key in CONFIG:
env[key] = CONFIG[key]
# Load DOCKER_CI_KEY
env['DOCKER_CI_KEY'] = open(env['DOCKER_CI_KEY_PATH']).read()
DROPLET_NAME = env.get('DROPLET_NAME','report')
TIMEOUT = 120 # Seconds before timeout droplet creation
IMAGE_ID = 894856 # Docker on Ubuntu 13.04
REGION_ID = 4 # New York 2
SIZE_ID = 66 # memory 512MB
DO_IMAGE_USER = 'root' # Image user on Digital Ocean
API_URL = 'https://api.digitalocean.com/'
class digital_ocean():
def __init__(self, key, client):
'''Set default API parameters'''
self.key = key
self.client = client
self.api_url = API_URL
def api(self, cmd_path, api_arg={}):
'''Make api call'''
api_arg.update({'api_key':self.key, 'client_id':self.client})
resp = requests.get(self.api_url + cmd_path, params=api_arg).text
resp = json.loads(resp)
if resp['status'] != 'OK':
raise Exception(resp['error_message'])
return resp
def droplet_data(self, name):
'''Get droplet data'''
data = self.api('droplets')
data = [droplet for droplet in data['droplets']
if droplet['name'] == name]
return data[0] if data else {}
def json_fmt(data):
'''Format json output'''
return json.dumps(data, sort_keys = True, indent = 2)
do = digital_ocean(env['DO_API_KEY'], env['DO_CLIENT_ID'])
# Get DROPLET_NAME data
data = do.droplet_data(DROPLET_NAME)
# Stop processing if DROPLET_NAME exists on Digital Ocean
if data:
print ('Droplet: {} already deployed. Not further processing.'
.format(DROPLET_NAME))
exit(1)
# Create droplet
do.api('droplets/new', {'name':DROPLET_NAME, 'region_id':REGION_ID,
'image_id':IMAGE_ID, 'size_id':SIZE_ID,
'ssh_key_ids':[env['DOCKER_KEY_ID']]})
# Wait for droplet to be created.
start_time = datetime.now()
while (data.get('status','') != 'active' and (
datetime.now()-start_time).seconds < TIMEOUT):
data = do.droplet_data(DROPLET_NAME)
print data['status']
sleep(3)
# Wait for the machine to boot
sleep(15)
# Get droplet IP
ip = str(data['ip_address'])
print 'droplet: {} ip: {}'.format(DROPLET_NAME, ip)
api.env.host_string = ip
api.env.user = DO_IMAGE_USER
api.env.key_filename = env['DOCKER_CI_KEY_PATH']
# Correct timezone
sudo('echo "America/Los_Angeles" >/etc/timezone')
sudo('dpkg-reconfigure --frontend noninteractive tzdata')
# Load JSON_CONFIG environment for Dockerfile
CONFIG_JSON= base64.b64encode(
'{{"DOCKER_CI_PUB": "{DOCKER_CI_PUB}",'
' "DOCKER_CI_KEY": "{DOCKER_CI_KEY}",'
' "DOCKER_CI_ADDRESS": "{DOCKER_CI_ADDRESS}",'
' "SMTP_USER": "{SMTP_USER}",'
' "SMTP_PWD": "{SMTP_PWD}",'
' "EMAIL_SENDER": "{EMAIL_SENDER}",'
' "EMAIL_RCP": "{EMAIL_RCP}"}}'.format(**env))
run('mkdir -p /data/report')
put('./', '/data/report')
with cd('/data/report'):
run('chmod 700 report.py')
run('echo "{}" > credentials.json'.format(CONFIG_JSON))
run('docker build -t report .')
run('rm credentials.json')
run("echo -e '30 09 * * * /usr/bin/docker run report\n' |"
" /usr/bin/crontab -")

View file

@ -0,0 +1,145 @@
#!/usr/bin/python
'''CONFIG_JSON is a json encoded string base64 environment variable. It is used
to clone docker-ci database, generate docker-ci report and submit it by email.
CONFIG_JSON data comes from the file /report/credentials.json inserted in this
container by deployment.py:
{ "DOCKER_CI_PUB": "$(cat docker-ci_ssh_public_key.pub)",
"DOCKER_CI_KEY": "$(cat docker-ci_ssh_private_key.key)",
"DOCKER_CI_ADDRESS": "user@docker-ci_fqdn_server",
"SMTP_USER": "SMTP_server_user",
"SMTP_PWD": "SMTP_server_password",
"EMAIL_SENDER": "Buildbot_mailing_sender",
"EMAIL_RCP": "Buildbot_mailing_receipient" } '''
import os, re, json, sqlite3, datetime, base64
import smtplib
from datetime import timedelta
from subprocess import call
from os import environ as env
TODAY = datetime.date.today()
# Load credentials to the environment
env['CONFIG_JSON'] = base64.b64decode(open('/report/credentials.json').read())
# Remove SSH private key as it needs more processing
CONFIG = json.loads(re.sub(r'("DOCKER_CI_KEY".+?"(.+?)",)','',
env['CONFIG_JSON'], flags=re.DOTALL))
# Populate environment variables
for key in CONFIG:
env[key] = CONFIG[key]
# Load SSH private key
env['DOCKER_CI_KEY'] = re.sub('^.+"DOCKER_CI_KEY".+?"(.+?)".+','\\1',
env['CONFIG_JSON'],flags=re.DOTALL)
# Prevent rsync to validate host on first connection to docker-ci
os.makedirs('/root/.ssh')
open('/root/.ssh/id_rsa','w').write(env['DOCKER_CI_KEY'])
os.chmod('/root/.ssh/id_rsa',0600)
open('/root/.ssh/config','w').write('StrictHostKeyChecking no\n')
# Sync buildbot database from docker-ci
call('rsync {}:/data/buildbot/master/state.sqlite .'.format(
env['DOCKER_CI_ADDRESS']), shell=True)
class SQL:
def __init__(self, database_name):
sql = sqlite3.connect(database_name)
# Use column names as keys for fetchall rows
sql.row_factory = sqlite3.Row
sql = sql.cursor()
self.sql = sql
def query(self,query_statement):
return self.sql.execute(query_statement).fetchall()
sql = SQL("state.sqlite")
class Report():
def __init__(self,period='',date=''):
self.data = []
self.period = 'date' if not period else period
self.date = str(TODAY) if not date else date
self.compute()
def compute(self):
'''Compute report'''
if self.period == 'week':
self.week_report(self.date)
else:
self.date_report(self.date)
def date_report(self,date):
'''Create a date test report'''
builds = []
# Get a queryset with all builds from date
rows = sql.query('SELECT * FROM builds JOIN buildrequests'
' WHERE builds.brid=buildrequests.id and'
' date(start_time, "unixepoch", "localtime") = "{0}"'
' GROUP BY number'.format(date))
build_names = sorted(set([row['buildername'] for row in rows]))
# Create a report build line for a given build
for build_name in build_names:
tried = len([row['buildername']
for row in rows if row['buildername'] == build_name])
fail_tests = [row['buildername'] for row in rows if (
row['buildername'] == build_name and row['results'] != 0)]
fail = len(fail_tests)
fail_details = ''
fail_pct = int(100.0*fail/tried) if tried != 0 else 100
builds.append({'name': build_name, 'tried': tried, 'fail': fail,
'fail_pct': fail_pct, 'fail_details':fail_details})
if builds:
self.data.append({'date': date, 'builds': builds})
def week_report(self,date):
'''Add the week's date test reports to report.data'''
date = datetime.datetime.strptime(date,'%Y-%m-%d').date()
last_monday = date - datetime.timedelta(days=date.weekday())
week_dates = [last_monday + timedelta(days=x) for x in range(7,-1,-1)]
for date in week_dates:
self.date_report(str(date))
def render_text(self):
'''Return rendered report in text format'''
retval = ''
fail_tests = {}
for builds in self.data:
retval += 'Test date: {0}\n'.format(builds['date'],retval)
table = ''
for build in builds['builds']:
table += ('Build {name:15} Tried: {tried:4} '
' Failures: {fail:4} ({fail_pct}%)\n'.format(**build))
if build['name'] in fail_tests:
fail_tests[build['name']] += build['fail_details']
else:
fail_tests[build['name']] = build['fail_details']
retval += '{0}\n'.format(table)
retval += '\n Builds failing'
for fail_name in fail_tests:
retval += '\n' + fail_name + '\n'
for (fail_id,fail_url,rn_tests,nr_errors,log_errors,
tracelog_errors) in fail_tests[fail_name]:
retval += fail_url + '\n'
retval += '\n\n'
return retval
# Send email
smtp_from = env['EMAIL_SENDER']
subject = '[docker-ci] Daily report for {}'.format(str(TODAY))
msg = "From: {}\r\nTo: {}\r\nSubject: {}\r\n\r\n".format(
smtp_from, env['EMAIL_RCP'], subject)
msg = msg + Report('week').render_text()
server = smtplib.SMTP_SSL('smtp.mailgun.org')
server.login(env['SMTP_USER'], env['SMTP_PWD'])
server.sendmail(smtp_from, env['EMAIL_RCP'], msg)