As far as I know this code is not used or maintained

Docker-DCO-1.1-Signed-off-by: Michael Crosby <michael@crosbymichael.com> (github: crosbymichael)
This commit is contained in:
Michael Crosby 2014-03-24 12:39:56 +00:00
parent 5294bf7e67
commit f41135bc11
27 changed files with 0 additions and 1148 deletions

View file

@ -1,29 +0,0 @@
# DOCKER-VERSION: 0.7.6
# AUTHOR: Daniel Mizyrycki <daniel@dotcloud.com>
# DESCRIPTION: docker-ci continuous integration service
# TO_BUILD: docker build -t docker-ci/docker-ci .
# TO_RUN: docker run --rm -i -t -p 8000:80 -p 2222:22 -v /run:/var/socket \
# -v /data/docker-ci:/data/docker-ci docker-ci/docker-ci
from ubuntu:12.04
maintainer Daniel Mizyrycki <daniel@dotcloud.com>
ENV DEBIAN_FRONTEND noninteractive
RUN echo 'deb http://archive.ubuntu.com/ubuntu precise main universe' > \
/etc/apt/sources.list; apt-get update
RUN apt-get install -y --no-install-recommends python2.7 python-dev \
libevent-dev git supervisor ssh rsync less vim sudo gcc wget nginx
RUN cd /tmp; wget http://python-distribute.org/distribute_setup.py
RUN cd /tmp; python distribute_setup.py; easy_install pip; rm distribute_setup.py
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9
RUN echo 'deb http://get.docker.io/ubuntu docker main' > \
/etc/apt/sources.list.d/docker.list; apt-get update
RUN apt-get install -y lxc-docker-0.8.0
RUN pip install SQLAlchemy==0.7.10 buildbot buildbot-slave pyopenssl boto
RUN ln -s /var/socket/docker.sock /run/docker.sock
ADD . /docker-ci
RUN /docker-ci/setup.sh
ENTRYPOINT ["supervisord", "-n"]

View file

@ -1 +0,0 @@
Daniel Mizyrycki <daniel@dotcloud.com> (@mzdaniel)

View file

@ -1,65 +0,0 @@
=========
docker-ci
=========
This directory contains docker-ci continuous integration system.
As expected, it is a fully dockerized and deployed using
docker-container-runner.
docker-ci is based on Buildbot, a continuous integration system designed
to automate the build/test cycle. By automatically rebuilding and testing
the tree each time something has changed, build problems are pinpointed
quickly, before other developers are inconvenienced by the failure.
We are running buildbot at Rackspace to verify docker and docker-registry
pass tests, and check for coverage code details.
docker-ci instance is at https://docker-ci.docker.io/waterfall
Inside docker-ci container we have the following directory structure:
/docker-ci source code of docker-ci
/data/backup/docker-ci/ daily backup (replicated over S3)
/data/docker-ci/coverage/{docker,docker-registry}/ mapped to host volumes
/data/buildbot/{master,slave}/ main docker-ci buildbot config and database
/var/socket/{docker.sock} host volume access to docker socket
Production deployment
=====================
::
# Clone docker-ci repository
git clone https://github.com/dotcloud/docker
cd docker/hack/infrastructure/docker-ci
export DOCKER_PROD=[PRODUCTION_SERVER_IP]
# Create data host volume. (only once)
docker -H $DOCKER_PROD run -v /home:/data ubuntu:12.04 \
mkdir -p /data/docker-ci/coverage/docker
docker -H $DOCKER_PROD run -v /home:/data ubuntu:12.04 \
mkdir -p /data/docker-ci/coverage/docker-registry
docker -H $DOCKER_PROD run -v /home:/data ubuntu:12.04 \
chown -R 1000.1000 /data/docker-ci
# dcr deployment. Define credentials and special environment dcr variables
# ( retrieved at /hack/infrastructure/docker-ci/dcr/prod/docker-ci.yml )
export WEB_USER=[DOCKER-CI-WEBSITE-USERNAME]
export WEB_IRC_PWD=[DOCKER-CI-WEBSITE-PASSWORD]
export BUILDBOT_PWD=[BUILDSLAVE_PASSWORD]
export AWS_ACCESS_KEY=[DOCKER_RELEASE_S3_ACCESS]
export AWS_SECRET_KEY=[DOCKER_RELEASE_S3_SECRET]
export GPG_PASSPHRASE=[DOCKER_RELEASE_PASSPHRASE]
export BACKUP_AWS_ID=[S3_BUCKET_CREDENTIAL_ACCESS]
export BACKUP_AWS_SECRET=[S3_BUCKET_CREDENTIAL_SECRET]
export SMTP_USER=[MAILGUN_SMTP_USERNAME]
export SMTP_PWD=[MAILGUN_SMTP_PASSWORD]
export EMAIL_RCP=[EMAIL_FOR_BUILD_ERRORS]
# Build docker-ci and testbuilder docker images
docker -H $DOCKER_PROD build -t docker-ci/docker-ci .
(cd testbuilder; docker -H $DOCKER_PROD build --rm -t docker-ci/testbuilder .)
# Run docker-ci container ( assuming no previous container running )
(cd dcr/prod; dcr docker-ci.yml start)
(cd dcr/prod; dcr docker-ci.yml register docker-ci.docker.io)

View file

@ -1 +0,0 @@
0.5.6

View file

@ -1,176 +0,0 @@
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
#!/usr/bin/env python
"""
github_buildbot.py is based on git_buildbot.py
github_buildbot.py will determine the repository information from the JSON
HTTP POST it receives from github.com and build the appropriate repository.
If your github repository is private, you must add a ssh key to the github
repository for the user who initiated the build on the buildslave.
"""
import re
import datetime
from twisted.python import log
import calendar
try:
import json
assert json
except ImportError:
import simplejson as json
# python is silly about how it handles timezones
class fixedOffset(datetime.tzinfo):
"""
fixed offset timezone
"""
def __init__(self, minutes, hours, offsetSign = 1):
self.minutes = int(minutes) * offsetSign
self.hours = int(hours) * offsetSign
self.offset = datetime.timedelta(minutes = self.minutes,
hours = self.hours)
def utcoffset(self, dt):
return self.offset
def dst(self, dt):
return datetime.timedelta(0)
def convertTime(myTestTimestamp):
#"1970-01-01T00:00:00+00:00"
# Normalize myTestTimestamp
if myTestTimestamp[-1] == 'Z':
myTestTimestamp = myTestTimestamp[:-1] + '-00:00'
matcher = re.compile(r'(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)([-+])(\d\d):(\d\d)')
result = matcher.match(myTestTimestamp)
(year, month, day, hour, minute, second, offsetsign, houroffset, minoffset) = \
result.groups()
if offsetsign == '+':
offsetsign = 1
else:
offsetsign = -1
offsetTimezone = fixedOffset( minoffset, houroffset, offsetsign )
myDatetime = datetime.datetime( int(year),
int(month),
int(day),
int(hour),
int(minute),
int(second),
0,
offsetTimezone)
return calendar.timegm( myDatetime.utctimetuple() )
def getChanges(request, options = None):
"""
Reponds only to POST events and starts the build process
:arguments:
request
the http request object
"""
payload = json.loads(request.args['payload'][0])
import urllib,datetime
fname = str(datetime.datetime.now()).replace(' ','_').replace(':','-')[:19]
# Github event debug
# open('github_{0}.json'.format(fname),'w').write(json.dumps(json.loads(urllib.unquote(request.args['payload'][0])), sort_keys = True, indent = 2))
if 'pull_request' in payload:
user = payload['pull_request']['user']['login']
repo = payload['pull_request']['head']['repo']['name']
repo_url = payload['pull_request']['head']['repo']['html_url']
else:
user = payload['repository']['owner']['name']
repo = payload['repository']['name']
repo_url = payload['repository']['url']
project = request.args.get('project', None)
if project:
project = project[0]
elif project is None:
project = ''
# This field is unused:
#private = payload['repository']['private']
changes = process_change(payload, user, repo, repo_url, project)
log.msg("Received %s changes from github" % len(changes))
return (changes, 'git')
def process_change(payload, user, repo, repo_url, project):
"""
Consumes the JSON as a python object and actually starts the build.
:arguments:
payload
Python Object that represents the JSON sent by GitHub Service
Hook.
"""
changes = []
newrev = payload['after'] if 'after' in payload else payload['pull_request']['head']['sha']
refname = payload['ref'] if 'ref' in payload else payload['pull_request']['head']['ref']
# We only care about regular heads, i.e. branches
match = re.match(r"^(refs\/heads\/|)([^/]+)$", refname)
if not match:
log.msg("Ignoring refname `%s': Not a branch" % refname)
return []
branch = match.groups()[1]
if re.match(r"^0*$", newrev):
log.msg("Branch `%s' deleted, ignoring" % branch)
return []
else:
if 'pull_request' in payload:
if payload['action'] == 'closed':
log.msg("PR#{} closed, ignoring".format(payload['number']))
return []
changes = [{
'category' : 'github_pullrequest',
'who' : '{0} - PR#{1}'.format(user,payload['number']),
'files' : [],
'comments' : payload['pull_request']['title'],
'revision' : newrev,
'when' : convertTime(payload['pull_request']['updated_at']),
'branch' : branch,
'revlink' : '{0}/commit/{1}'.format(repo_url,newrev),
'repository' : repo_url,
'project' : project }]
return changes
for commit in payload['commits']:
files = []
if 'added' in commit:
files.extend(commit['added'])
if 'modified' in commit:
files.extend(commit['modified'])
if 'removed' in commit:
files.extend(commit['removed'])
when = convertTime( commit['timestamp'])
log.msg("New revision: %s" % commit['id'][:8])
chdict = dict(
who = commit['author']['name']
+ " <" + commit['author']['email'] + ">",
files = files,
comments = commit['message'],
revision = commit['id'],
when = when,
branch = branch,
revlink = commit['url'],
repository = repo_url,
project = project)
changes.append(chdict)
return changes

View file

@ -1,161 +0,0 @@
import os, re
from buildbot.buildslave import BuildSlave
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.basic import SingleBranchScheduler
from buildbot.schedulers.timed import Nightly
from buildbot.changes import filter
from buildbot.config import BuilderConfig
from buildbot.process.factory import BuildFactory
from buildbot.process.properties import Property
from buildbot.steps.shell import ShellCommand
from buildbot.status import html, words
from buildbot.status.web import authz, auth
from buildbot.status.mail import MailNotifier
def ENV(x):
'''Promote an environment variable for global use returning its value'''
retval = os.environ.get(x, '')
globals()[x] = retval
return retval
class TestCommand(ShellCommand):
'''Extend ShellCommand with optional summary logs'''
def __init__(self, *args, **kwargs):
super(TestCommand, self).__init__(*args, **kwargs)
def createSummary(self, log):
exit_status = re.sub(r'.+\n\+ exit (\d+).+',
r'\1', log.getText()[-100:], flags=re.DOTALL)
if exit_status != '0':
return
# Infer coverage path from log
if '+ COVERAGE_PATH' in log.getText():
path = re.sub(r'.+\+ COVERAGE_PATH=((.+?)-\d+).+',
r'\2/\1', log.getText(), flags=re.DOTALL)
url = '{}coverage/{}/index.html'.format(c['buildbotURL'], path)
self.addURL('coverage', url)
elif 'COVERAGE_FILE' in log.getText():
path = re.sub(r'.+\+ COVERAGE_FILE=((.+?)-\d+).+',
r'\2/\1', log.getText(), flags=re.DOTALL)
url = '{}coverage/{}/index.html'.format(c['buildbotURL'], path)
self.addURL('coverage', url)
PORT_WEB = 8000 # Buildbot webserver port
PORT_GITHUB = 8011 # Buildbot github hook port
PORT_MASTER = 9989 # Port where buildbot master listen buildworkers
BUILDBOT_URL = '//localhost:{}/'.format(PORT_WEB)
DOCKER_REPO = 'https://github.com/docker-test/docker'
DOCKER_TEST_ARGV = 'HEAD {}'.format(DOCKER_REPO)
REGISTRY_REPO = 'https://github.com/docker-test/docker-registry'
REGISTRY_TEST_ARGV = 'HEAD {}'.format(REGISTRY_REPO)
if ENV('DEPLOYMENT') == 'staging':
BUILDBOT_URL = "//docker-ci-stage.docker.io/"
if ENV('DEPLOYMENT') == 'production':
BUILDBOT_URL = '//docker-ci.docker.io/'
DOCKER_REPO = 'https://github.com/dotcloud/docker'
DOCKER_TEST_ARGV = ''
REGISTRY_REPO = 'https://github.com/dotcloud/docker-registry'
REGISTRY_TEST_ARGV = ''
# Credentials set by setup.sh from deployment.py
ENV('WEB_USER')
ENV('WEB_IRC_PWD')
ENV('BUILDBOT_PWD')
ENV('SMTP_USER')
ENV('SMTP_PWD')
ENV('EMAIL_RCP')
ENV('IRC_CHANNEL')
c = BuildmasterConfig = {}
c['title'] = "docker-ci"
c['titleURL'] = "waterfall"
c['buildbotURL'] = BUILDBOT_URL
c['db'] = {'db_url':"sqlite:///state.sqlite"}
c['slaves'] = [BuildSlave('buildworker', BUILDBOT_PWD)]
c['slavePortnum'] = PORT_MASTER
# Schedulers
c['schedulers'] = [ForceScheduler(name='trigger', builderNames=[
'docker', 'docker-registry', 'nightlyrelease', 'backup'])]
c['schedulers'] += [SingleBranchScheduler(name="docker", treeStableTimer=None,
change_filter=filter.ChangeFilter(branch='master',
repository=DOCKER_REPO), builderNames=['docker'])]
c['schedulers'] += [SingleBranchScheduler(name="registry", treeStableTimer=None,
change_filter=filter.ChangeFilter(branch='master',
repository=REGISTRY_REPO), builderNames=['docker-registry'])]
c['schedulers'] += [SingleBranchScheduler(name='docker-pr', treeStableTimer=None,
change_filter=filter.ChangeFilter(category='github_pullrequest',
project='docker'), builderNames=['docker-pr'])]
c['schedulers'] += [SingleBranchScheduler(name='docker-registry-pr', treeStableTimer=None,
change_filter=filter.ChangeFilter(category='github_pullrequest',
project='docker-registry'), builderNames=['docker-registry-pr'])]
c['schedulers'] += [Nightly(name='daily', branch=None, builderNames=[
'nightlyrelease', 'backup'], hour=7, minute=00)]
# Builders
# Backup
factory = BuildFactory()
factory.addStep(TestCommand(description='backup', logEnviron=False,
usePTY=True, command='/docker-ci/tool/backup.py'))
c['builders'] = [BuilderConfig(name='backup',slavenames=['buildworker'],
factory=factory)]
# Docker test
factory = BuildFactory()
factory.addStep(TestCommand(description='docker', logEnviron=False,
usePTY=True, command='/docker-ci/dockertest/docker {}'.format(DOCKER_TEST_ARGV)))
c['builders'] += [BuilderConfig(name='docker',slavenames=['buildworker'],
factory=factory)]
# Docker pull request test
factory = BuildFactory()
factory.addStep(TestCommand(description='docker-pr', logEnviron=False,
usePTY=True, command=['/docker-ci/dockertest/docker',
Property('revision'), Property('repository'), Property('branch')]))
c['builders'] += [BuilderConfig(name='docker-pr',slavenames=['buildworker'],
factory=factory)]
# docker-registry test
factory = BuildFactory()
factory.addStep(TestCommand(description='docker-registry', logEnviron=False,
usePTY=True, command='/docker-ci/dockertest/docker-registry {}'.format(REGISTRY_TEST_ARGV)))
c['builders'] += [BuilderConfig(name='docker-registry',slavenames=['buildworker'],
factory=factory)]
# Docker registry pull request test
factory = BuildFactory()
factory.addStep(TestCommand(description='docker-registry-pr', logEnviron=False,
usePTY=True, command=['/docker-ci/dockertest/docker-registry',
Property('revision'), Property('repository'), Property('branch')]))
c['builders'] += [BuilderConfig(name='docker-registry-pr',slavenames=['buildworker'],
factory=factory)]
# Docker nightly release
factory = BuildFactory()
factory.addStep(ShellCommand(description='NightlyRelease',logEnviron=False,
usePTY=True, command=['/docker-ci/dockertest/nightlyrelease']))
c['builders'] += [BuilderConfig(name='nightlyrelease',slavenames=['buildworker'],
factory=factory)]
# Status
authz_cfg = authz.Authz(auth=auth.BasicAuth([(WEB_USER, WEB_IRC_PWD)]),
forceBuild='auth')
c['status'] = [html.WebStatus(http_port=PORT_WEB, authz=authz_cfg)]
c['status'].append(html.WebStatus(http_port=PORT_GITHUB, allowForce=True,
change_hook_dialects={ 'github': True }))
c['status'].append(MailNotifier(fromaddr='docker-test@docker.io',
sendToInterestedUsers=False, extraRecipients=[EMAIL_RCP],
mode='failing', relayhost='smtp.mailgun.org', smtpPort=587, useTls=True,
smtpUser=SMTP_USER, smtpPassword=SMTP_PWD))
c['status'].append(words.IRC("irc.freenode.net", "dockerqabot",
channels=[IRC_CHANNEL], password=WEB_IRC_PWD, allowForce=True,
notify_events={'exception':1, 'successToFailure':1, 'failureToSuccess':1}))

View file

@ -1,22 +0,0 @@
docker-ci:
image: "docker-ci/docker-ci"
release_name: "docker-ci-0.5.6"
ports: ["80","2222:22","8011:8011"]
register: "80"
volumes: ["/run:/var/socket","/home/docker-ci:/data/docker-ci"]
command: []
env:
- "DEPLOYMENT=production"
- "IRC_CHANNEL=docker-testing"
- "BACKUP_BUCKET=backup-ci"
- "$WEB_USER"
- "$WEB_IRC_PWD"
- "$BUILDBOT_PWD"
- "$AWS_ACCESS_KEY"
- "$AWS_SECRET_KEY"
- "$GPG_PASSPHRASE"
- "$BACKUP_AWS_ID"
- "$BACKUP_AWS_SECRET"
- "$SMTP_USER"
- "$SMTP_PWD"
- "$EMAIL_RCP"

View file

@ -1,5 +0,0 @@
default:
hipaches: ['192.168.100.67:6379']
daemons: ['192.168.100.67:4243']
use_ssh: False

View file

@ -1,22 +0,0 @@
docker-ci:
image: "docker-ci/docker-ci"
release_name: "docker-ci-stage"
ports: ["80","2222:22","8011:8011"]
register: "80"
volumes: ["/run:/var/socket","/home/docker-ci:/data/docker-ci"]
command: []
env:
- "DEPLOYMENT=staging"
- "IRC_CHANNEL=docker-testing-staging"
- "BACKUP_BUCKET=ci-backup-stage"
- "$BACKUP_AWS_ID"
- "$BACKUP_AWS_SECRET"
- "$WEB_USER"
- "$WEB_IRC_PWD"
- "$BUILDBOT_PWD"
- "$AWS_ACCESS_KEY"
- "$AWS_SECRET_KEY"
- "$GPG_PASSPHRASE"
- "$SMTP_USER"
- "$SMTP_PWD"
- "$EMAIL_RCP"

View file

@ -1,5 +0,0 @@
default:
hipaches: ['192.168.100.65:6379']
daemons: ['192.168.100.65:4243']
use_ssh: False

View file

@ -1,52 +0,0 @@
#!/bin/bash
export PATH='/go/bin':$PATH
export DOCKER_PATH='/go/src/github.com/dotcloud/docker'
# Signal coverage report name, parsed by docker-ci
set -x
COVERAGE_PATH=$(date +"docker-%Y%m%d%H%M%S")
set +x
REPORTS="/data/$COVERAGE_PATH"
INDEX="$REPORTS/index.html"
# Test docker
cd $DOCKER_PATH
./hack/make.sh test; exit_status=$?
PROFILE_PATH="$(ls -d $DOCKER_PATH/bundles/* | sed -n '$ p')/test/coverprofiles"
if [ "$exit_status" -eq "0" ]; then
# Download coverage dependencies
go get github.com/axw/gocov/gocov
go get -u github.com/matm/gocov-html
# Create coverage report
mkdir -p $REPORTS
cd $PROFILE_PATH
cat > $INDEX << "EOF"
<!DOCTYPE html><head><meta charset="utf-8">
<script type="text/javascript" src="//tablesorter.com/jquery-latest.js"></script>
<script type="text/javascript" src="//tablesorter.com/__jquery.tablesorter.min.js"></script>
<script type="text/javascript">$(document).ready(function() {
$("table").tablesorter({ sortForce: [[1,0]] }); });</script>
<style>table,th,td{border:1px solid black;}</style>
<title>Docker Coverage Report</title>
</head><body>
<h1><strong>Docker Coverage Report</strong></h1>
<table class="tablesorter">
<thead><tr><th>package</th><th>pct</th></tr></thead><tbody>
EOF
for profile in *; do
gocov convert $profile | gocov-html >$REPORTS/$profile.html
echo "<tr><td><a href=\"${profile}.html\">$profile</a></td><td>" >> $INDEX
go tool cover -func=$profile | sed -En '$ s/.+\t(.+)/\1/p' >> $INDEX
echo "</td></tr>" >> $INDEX
done
echo "</tbody></table></body></html>" >> $INDEX
fi
# Signal test and coverage result, parsed by docker-ci
set -x
exit $exit_status

View file

@ -1 +0,0 @@
project

View file

@ -1,13 +0,0 @@
#!/usr/bin/env bash
if [ "$DEPLOYMENT" == "production" ]; then
AWS_S3_BUCKET='test.docker.io'
else
AWS_S3_BUCKET='get-staging.docker.io'
fi
docker run --rm --privileged -v /run:/var/socket \
-e AWS_S3_BUCKET=$AWS_S3_BUCKET -e AWS_ACCESS_KEY=$AWS_ACCESS_KEY \
-e AWS_SECRET_KEY=$AWS_SECRET_KEY -e GPG_PASSPHRASE=$GPG_PASSPHRASE \
-e DOCKER_RELEASE=1 -e DEPLOYMENT=$DEPLOYMENT docker-ci/testbuilder docker

View file

@ -1,8 +0,0 @@
#!/usr/bin/env bash
set -x
PROJECT_NAME=$(basename $0)
docker run --rm -u sysadmin -e DEPLOYMENT=$DEPLOYMENT -v /run:/var/socket \
-v /home/docker-ci/coverage/$PROJECT_NAME:/data docker-ci/testbuilder $PROJECT_NAME $1 $2 $3

View file

@ -1,61 +0,0 @@
#!/usr/bin/python
import os
username, password = os.environ['DOCKER_CREDS'].split(':')
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import unittest, time, re
class Docker(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS()
self.driver.implicitly_wait(30)
self.base_url = "http://www.docker.io/"
self.verificationErrors = []
self.accept_next_alert = True
def test_docker(self):
driver = self.driver
print "Login into {0} as login user {1} ...".format(self.base_url,username)
driver.get(self.base_url + "/")
driver.find_element_by_link_text("INDEX").click()
driver.find_element_by_link_text("login").click()
driver.find_element_by_id("id_username").send_keys(username)
driver.find_element_by_id("id_password").send_keys(password)
print "Checking login user ..."
driver.find_element_by_css_selector("input[type=\"submit\"]").click()
try: self.assertEqual("test", driver.find_element_by_css_selector("h3").text)
except AssertionError as e: self.verificationErrors.append(str(e))
print "Login user {0} found".format(username)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()

View file

@ -1,27 +0,0 @@
#!/bin/sh
set -x
# Cleanup
rm -rf docker-registry
# Setup the environment
export SETTINGS_FLAVOR=test
export DOCKER_REGISTRY_CONFIG=config_test.yml
export PYTHONPATH=$(pwd)/docker-registry/test
# Get latest docker registry
git clone -q https://github.com/dotcloud/docker-registry.git
cd docker-registry
sed -Ei "s#(boto_bucket: ).+#\1_env:S3_BUCKET#" config_test.yml
# Get dependencies
pip install -q -r requirements.txt
pip install -q -r test-requirements.txt
pip install -q tox
# Run registry tests
tox || exit 1
python -m unittest discover -p s3.py -s test || exit 1
python -m unittest discover -p workflow.py -s test

View file

@ -1,12 +0,0 @@
server {
listen 80;
root /data/docker-ci;
location / {
proxy_pass http://localhost:8000/;
}
location /coverage {
root /data/docker-ci;
}
}

View file

@ -1,28 +0,0 @@
# VERSION: 0.22
# DOCKER-VERSION 0.6.3
# AUTHOR: Daniel Mizyrycki <daniel@dotcloud.com>
# DESCRIPTION: Generate docker-ci daily report
# COMMENTS: The build process is initiated by deployment.py
Report configuration is passed through ./credentials.json at
# deployment time.
# TO_BUILD: docker build -t report .
# TO_DEPLOY: docker run report
from ubuntu:12.04
maintainer Daniel Mizyrycki <daniel@dotcloud.com>
env PYTHONPATH /report
# Add report dependencies
run echo 'deb http://archive.ubuntu.com/ubuntu precise main universe' > \
/etc/apt/sources.list
run apt-get update; apt-get install -y python2.7 python-pip ssh rsync
# Set San Francisco timezone
run echo "America/Los_Angeles" >/etc/timezone
run dpkg-reconfigure --frontend noninteractive tzdata
# Add report code and set default container command
add . /report
cmd "/report/report.py"

View file

@ -1,130 +0,0 @@
#!/usr/bin/env python
'''Deploy docker-ci report container on Digital Ocean.
Usage:
export CONFIG_JSON='
{ "DROPLET_NAME": "Digital_Ocean_dropplet_name",
"DO_CLIENT_ID": "Digital_Ocean_client_id",
"DO_API_KEY": "Digital_Ocean_api_key",
"DOCKER_KEY_ID": "Digital_Ocean_ssh_key_id",
"DOCKER_CI_KEY_PATH": "docker-ci_private_key_path",
"DOCKER_CI_PUB": "$(cat docker-ci_ssh_public_key.pub)",
"DOCKER_CI_ADDRESS" "user@docker-ci_fqdn_server",
"SMTP_USER": "SMTP_server_user",
"SMTP_PWD": "SMTP_server_password",
"EMAIL_SENDER": "Buildbot_mailing_sender",
"EMAIL_RCP": "Buildbot_mailing_receipient" }'
python deployment.py
'''
import re, json, requests, base64
from fabric import api
from fabric.api import cd, run, put, sudo
from os import environ as env
from time import sleep
from datetime import datetime
# Populate environment variables
CONFIG = json.loads(env['CONFIG_JSON'])
for key in CONFIG:
env[key] = CONFIG[key]
# Load DOCKER_CI_KEY
env['DOCKER_CI_KEY'] = open(env['DOCKER_CI_KEY_PATH']).read()
DROPLET_NAME = env.get('DROPLET_NAME','report')
TIMEOUT = 120 # Seconds before timeout droplet creation
IMAGE_ID = 1004145 # Docker on Ubuntu 13.04
REGION_ID = 4 # New York 2
SIZE_ID = 66 # memory 512MB
DO_IMAGE_USER = 'root' # Image user on Digital Ocean
API_URL = 'https://api.digitalocean.com/'
class digital_ocean():
def __init__(self, key, client):
'''Set default API parameters'''
self.key = key
self.client = client
self.api_url = API_URL
def api(self, cmd_path, api_arg={}):
'''Make api call'''
api_arg.update({'api_key':self.key, 'client_id':self.client})
resp = requests.get(self.api_url + cmd_path, params=api_arg).text
resp = json.loads(resp)
if resp['status'] != 'OK':
raise Exception(resp['error_message'])
return resp
def droplet_data(self, name):
'''Get droplet data'''
data = self.api('droplets')
data = [droplet for droplet in data['droplets']
if droplet['name'] == name]
return data[0] if data else {}
def json_fmt(data):
'''Format json output'''
return json.dumps(data, sort_keys = True, indent = 2)
do = digital_ocean(env['DO_API_KEY'], env['DO_CLIENT_ID'])
# Get DROPLET_NAME data
data = do.droplet_data(DROPLET_NAME)
# Stop processing if DROPLET_NAME exists on Digital Ocean
if data:
print ('Droplet: {} already deployed. Not further processing.'
.format(DROPLET_NAME))
exit(1)
# Create droplet
do.api('droplets/new', {'name':DROPLET_NAME, 'region_id':REGION_ID,
'image_id':IMAGE_ID, 'size_id':SIZE_ID,
'ssh_key_ids':[env['DOCKER_KEY_ID']]})
# Wait for droplet to be created.
start_time = datetime.now()
while (data.get('status','') != 'active' and (
datetime.now()-start_time).seconds < TIMEOUT):
data = do.droplet_data(DROPLET_NAME)
print data['status']
sleep(3)
# Wait for the machine to boot
sleep(15)
# Get droplet IP
ip = str(data['ip_address'])
print 'droplet: {} ip: {}'.format(DROPLET_NAME, ip)
api.env.host_string = ip
api.env.user = DO_IMAGE_USER
api.env.key_filename = env['DOCKER_CI_KEY_PATH']
# Correct timezone
sudo('echo "America/Los_Angeles" >/etc/timezone')
sudo('dpkg-reconfigure --frontend noninteractive tzdata')
# Load JSON_CONFIG environment for Dockerfile
CONFIG_JSON= base64.b64encode(
'{{"DOCKER_CI_PUB": "{DOCKER_CI_PUB}",'
' "DOCKER_CI_KEY": "{DOCKER_CI_KEY}",'
' "DOCKER_CI_ADDRESS": "{DOCKER_CI_ADDRESS}",'
' "SMTP_USER": "{SMTP_USER}",'
' "SMTP_PWD": "{SMTP_PWD}",'
' "EMAIL_SENDER": "{EMAIL_SENDER}",'
' "EMAIL_RCP": "{EMAIL_RCP}"}}'.format(**env))
run('mkdir -p /data/report')
put('./', '/data/report')
with cd('/data/report'):
run('chmod 700 report.py')
run('echo "{}" > credentials.json'.format(CONFIG_JSON))
run('docker build -t report .')
run('rm credentials.json')
run("echo -e '30 09 * * * /usr/bin/docker run report\n' |"
" /usr/bin/crontab -")

View file

@ -1,145 +0,0 @@
#!/usr/bin/python
'''CONFIG_JSON is a json encoded string base64 environment variable. It is used
to clone docker-ci database, generate docker-ci report and submit it by email.
CONFIG_JSON data comes from the file /report/credentials.json inserted in this
container by deployment.py:
{ "DOCKER_CI_PUB": "$(cat docker-ci_ssh_public_key.pub)",
"DOCKER_CI_KEY": "$(cat docker-ci_ssh_private_key.key)",
"DOCKER_CI_ADDRESS": "user@docker-ci_fqdn_server",
"SMTP_USER": "SMTP_server_user",
"SMTP_PWD": "SMTP_server_password",
"EMAIL_SENDER": "Buildbot_mailing_sender",
"EMAIL_RCP": "Buildbot_mailing_receipient" } '''
import os, re, json, sqlite3, datetime, base64
import smtplib
from datetime import timedelta
from subprocess import call
from os import environ as env
TODAY = datetime.date.today()
# Load credentials to the environment
env['CONFIG_JSON'] = base64.b64decode(open('/report/credentials.json').read())
# Remove SSH private key as it needs more processing
CONFIG = json.loads(re.sub(r'("DOCKER_CI_KEY".+?"(.+?)",)','',
env['CONFIG_JSON'], flags=re.DOTALL))
# Populate environment variables
for key in CONFIG:
env[key] = CONFIG[key]
# Load SSH private key
env['DOCKER_CI_KEY'] = re.sub('^.+"DOCKER_CI_KEY".+?"(.+?)".+','\\1',
env['CONFIG_JSON'],flags=re.DOTALL)
# Prevent rsync to validate host on first connection to docker-ci
os.makedirs('/root/.ssh')
open('/root/.ssh/id_rsa','w').write(env['DOCKER_CI_KEY'])
os.chmod('/root/.ssh/id_rsa',0600)
open('/root/.ssh/config','w').write('StrictHostKeyChecking no\n')
# Sync buildbot database from docker-ci
call('rsync {}:/data/buildbot/master/state.sqlite .'.format(
env['DOCKER_CI_ADDRESS']), shell=True)
class SQL:
def __init__(self, database_name):
sql = sqlite3.connect(database_name)
# Use column names as keys for fetchall rows
sql.row_factory = sqlite3.Row
sql = sql.cursor()
self.sql = sql
def query(self,query_statement):
return self.sql.execute(query_statement).fetchall()
sql = SQL("state.sqlite")
class Report():
def __init__(self,period='',date=''):
self.data = []
self.period = 'date' if not period else period
self.date = str(TODAY) if not date else date
self.compute()
def compute(self):
'''Compute report'''
if self.period == 'week':
self.week_report(self.date)
else:
self.date_report(self.date)
def date_report(self,date):
'''Create a date test report'''
builds = []
# Get a queryset with all builds from date
rows = sql.query('SELECT * FROM builds JOIN buildrequests'
' WHERE builds.brid=buildrequests.id and'
' date(start_time, "unixepoch", "localtime") = "{0}"'
' GROUP BY number'.format(date))
build_names = sorted(set([row['buildername'] for row in rows]))
# Create a report build line for a given build
for build_name in build_names:
tried = len([row['buildername']
for row in rows if row['buildername'] == build_name])
fail_tests = [row['buildername'] for row in rows if (
row['buildername'] == build_name and row['results'] != 0)]
fail = len(fail_tests)
fail_details = ''
fail_pct = int(100.0*fail/tried) if tried != 0 else 100
builds.append({'name': build_name, 'tried': tried, 'fail': fail,
'fail_pct': fail_pct, 'fail_details':fail_details})
if builds:
self.data.append({'date': date, 'builds': builds})
def week_report(self,date):
'''Add the week's date test reports to report.data'''
date = datetime.datetime.strptime(date,'%Y-%m-%d').date()
last_monday = date - datetime.timedelta(days=date.weekday())
week_dates = [last_monday + timedelta(days=x) for x in range(7,-1,-1)]
for date in week_dates:
self.date_report(str(date))
def render_text(self):
'''Return rendered report in text format'''
retval = ''
fail_tests = {}
for builds in self.data:
retval += 'Test date: {0}\n'.format(builds['date'],retval)
table = ''
for build in builds['builds']:
table += ('Build {name:15} Tried: {tried:4} '
' Failures: {fail:4} ({fail_pct}%)\n'.format(**build))
if build['name'] in fail_tests:
fail_tests[build['name']] += build['fail_details']
else:
fail_tests[build['name']] = build['fail_details']
retval += '{0}\n'.format(table)
retval += '\n Builds failing'
for fail_name in fail_tests:
retval += '\n' + fail_name + '\n'
for (fail_id,fail_url,rn_tests,nr_errors,log_errors,
tracelog_errors) in fail_tests[fail_name]:
retval += fail_url + '\n'
retval += '\n\n'
return retval
# Send email
smtp_from = env['EMAIL_SENDER']
subject = '[docker-ci] Daily report for {}'.format(str(TODAY))
msg = "From: {}\r\nTo: {}\r\nSubject: {}\r\n\r\n".format(
smtp_from, env['EMAIL_RCP'], subject)
msg = msg + Report('week').render_text()
server = smtplib.SMTP_SSL('smtp.mailgun.org')
server.login(env['SMTP_USER'], env['SMTP_PWD'])
server.sendmail(smtp_from, env['EMAIL_RCP'], msg)

View file

@ -1,54 +0,0 @@
#!/usr/bin/env bash
# Set timezone
echo "GMT" >/etc/timezone
dpkg-reconfigure --frontend noninteractive tzdata
# Set ssh superuser
mkdir -p /data/buildbot /var/run/sshd /run
useradd -m -d /home/sysadmin -s /bin/bash -G sudo,docker -p '*' sysadmin
sed -Ei 's/(\%sudo.*) ALL/\1 NOPASSWD:ALL/' /etc/sudoers
cd /home/sysadmin
mkdir .ssh
chmod 700 .ssh
cat > .ssh/authorized_keys << 'EOF'
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC7ALVhwQ68q1SjrKaAduOuOEAcWmb8kDZf5qA7T1fM8AP07EDC7nSKRJ8PXUBGTOQfxm89coJDuSJsTAZ+1PvglXhA0Mq6+knc6ZrZY+SuZlDIDAk4TOdVPoDZnmR1YW2McxHkhcGIOKeC8MMig5NeEjtgQwXzauUSPqeh8HMlLZRMooFYyyluIpn7NaCLzyWjwAQz2s3KyI7VE7hl+ncCrW86v+dciEdwqtzNoUMFb3iDpPxaiCl3rv+SB7co/5eUDTs1FZvUcYMXKQuf8R+2ZKzXOpwr0Zs8sKQXvXavCeWykwGgXLBjVkvrDcHuDD6UXCW63UKgmRECpLZaMBVIIRWLEEgTS5OSQTcxpMVe5zUW6sDvXHTcdPwWrcn1dE9F/0vLC0HJ4ADKelLX5zyTpmXGbuZuntIf1JO67D/K/P++uV1rmVIH+zgtOf23w5rX2zKb4BSTqP0sv61pmWV7MEVoEz6yXswcTjS92tb775v7XLU9vKAkt042ORFdE4/++hejhL/Lj52IRgjt1CJZHZsR9JywJZrz3kYuf8eU2J2FYh0Cpz5gmf0f+12Rt4HztnZxGPP4KuMa66e4+hpx1jynjMZ7D5QUnNYEmuvJByopn8HSluuY/kS5MMyZCZtJLEPGX4+yECX0Di/S0vCRl2NyqfCBqS+yXXT5SA1nFw== docker-test@docker.io
EOF
chmod 600 .ssh/authorized_keys
chown -R sysadmin .ssh
# Fix docker group id for use of host dockerd by sysadmin
sed -Ei 's/(docker:x:)[^:]+/\1999/' /etc/group
# Create buildbot configuration
cd /data/buildbot; buildbot create-master master
cp -a /data/buildbot/master/master.cfg.sample \
/data/buildbot/master/master.cfg
cd /data/buildbot; \
buildslave create-slave slave localhost:9989 buildworker pass
cp /docker-ci/buildbot/master.cfg /data/buildbot/master
# Patch github webstatus to capture pull requests
cp /docker-ci/buildbot/github.py /usr/local/lib/python2.7/dist-packages/buildbot/status/web/hooks
chown -R sysadmin.sysadmin /data
# Create nginx configuration
rm /etc/nginx/sites-enabled/default
cp /docker-ci/nginx/nginx.conf /etc/nginx/conf.d/buildbot.conf
/bin/echo -e '\ndaemon off;\n' >> /etc/nginx/nginx.conf
# Set supervisord buildbot, nginx and sshd processes
/bin/echo -e "\
[program:buildmaster]\n\
command=twistd --nodaemon --no_save -y buildbot.tac\n\
directory=/data/buildbot/master\n\
user=sysadmin\n\n\
[program:buildworker]\n\
command=twistd --nodaemon --no_save -y buildbot.tac\n\
directory=/data/buildbot/slave\n\
user=sysadmin\n" > \
/etc/supervisor/conf.d/buildbot.conf
/bin/echo -e "[program:nginx]\ncommand=/usr/sbin/nginx\n" > \
/etc/supervisor/conf.d/nginx.conf
/bin/echo -e "[program:sshd]\ncommand=/usr/sbin/sshd -D\n" > \
/etc/supervisor/conf.d/sshd.conf

View file

@ -1,12 +0,0 @@
# TO_BUILD: docker build --no-cache -t docker-ci/testbuilder .
# TO_RUN: docker run --rm -u sysadmin \
# -v /run:/var/socket docker-ci/testbuilder docker-registry
#
FROM docker-ci/docker-ci
ENV HOME /home/sysadmin
RUN mkdir /testbuilder
ADD . /testbuilder
ENTRYPOINT ["/testbuilder/testbuilder.sh"]

View file

@ -1,12 +0,0 @@
#!/usr/bin/env bash
set -x
set -e
PROJECT_PATH=$1
# Build the docker project
cd /data/$PROJECT_PATH
sg docker -c "docker build -q -t registry ."
cd test; sg docker -c "docker build -q -t docker-registry-test ."
# Run the tests
sg docker -c "docker run --rm -v /home/docker-ci/coverage/docker-registry:/data docker-registry-test"

View file

@ -1,18 +0,0 @@
#!/usr/bin/env bash
set -x
set -e
PROJECT_PATH=$1
# Build the docker project
cd /data/$PROJECT_PATH
sg docker -c "docker build -q -t docker ."
if [ "$DOCKER_RELEASE" == "1" ]; then
# Do nightly release
echo sg docker -c "docker run --rm --privileged -v /run:/var/socket -e AWS_S3_BUCKET=$AWS_S3_BUCKET -e AWS_ACCESS_KEY= -e AWS_SECRET_KEY= -e GPG_PASSPHRASE= docker hack/release.sh"
set +x
sg docker -c "docker run --rm --privileged -v /run:/var/socket -e AWS_S3_BUCKET=$AWS_S3_BUCKET -e AWS_ACCESS_KEY=$AWS_ACCESS_KEY -e AWS_SECRET_KEY=$AWS_SECRET_KEY -e GPG_PASSPHRASE=$GPG_PASSPHRASE docker hack/release.sh"
else
# Run the tests
sg docker -c "docker run --rm --privileged -v /home/docker-ci/coverage/docker:/data docker ./hack/infrastructure/docker-ci/docker-coverage/gocoverage.sh"
fi

View file

@ -1,40 +0,0 @@
#!/usr/bin/env bash
# Download, build and run a docker project tests
# Environment variables: DEPLOYMENT
cat $0
set -e
set -x
PROJECT=$1
COMMIT=${2-HEAD}
REPO=${3-https://github.com/dotcloud/$PROJECT}
BRANCH=${4-master}
REPO_PROJ="https://github.com/docker-test/$PROJECT"
if [ "$DEPLOYMENT" == "production" ]; then
REPO_PROJ="https://github.com/dotcloud/$PROJECT"
fi
set +x
# Generate a random string of $1 characters
function random {
cat /dev/urandom | tr -cd 'a-f0-9' | head -c $1
}
PROJECT_PATH="$PROJECT-tmp-$(random 12)"
# Set docker-test git user
set -x
git config --global user.email "docker-test@docker.io"
git config --global user.name "docker-test"
# Fetch project
git clone -q $REPO_PROJ -b master /data/$PROJECT_PATH
cd /data/$PROJECT_PATH
echo "Git commit: $(git rev-parse HEAD)"
git fetch -q $REPO $BRANCH
git merge --no-edit $COMMIT
# Build the project dockertest
/testbuilder/$PROJECT.sh $PROJECT_PATH
rm -rf /data/$PROJECT_PATH

View file

@ -1,47 +0,0 @@
#!/usr/bin/env python
import os,sys,json
from datetime import datetime
from filecmp import cmp
from subprocess import check_call
from boto.s3.key import Key
from boto.s3.connection import S3Connection
def ENV(x):
'''Promote an environment variable for global use returning its value'''
retval = os.environ.get(x, '')
globals()[x] = retval
return retval
ROOT_PATH = '/data/backup/docker-ci'
TODAY = str(datetime.today())[:10]
BACKUP_FILE = '{}/docker-ci_{}.tgz'.format(ROOT_PATH, TODAY)
BACKUP_LINK = '{}/docker-ci.tgz'.format(ROOT_PATH)
ENV('BACKUP_BUCKET')
ENV('BACKUP_AWS_ID')
ENV('BACKUP_AWS_SECRET')
'''Create full master buildbot backup, avoiding duplicates'''
# Ensure backup path exist
if not os.path.exists(ROOT_PATH):
os.makedirs(ROOT_PATH)
# Make actual backups
check_call('/bin/tar czf {} -C /data --exclude=backup --exclude=buildbot/slave'
' . 1>/dev/null 2>&1'.format(BACKUP_FILE),shell=True)
# remove previous dump if it is the same as the latest
if (os.path.exists(BACKUP_LINK) and cmp(BACKUP_FILE, BACKUP_LINK) and
os.path._resolve_link(BACKUP_LINK) != BACKUP_FILE):
os.unlink(os.path._resolve_link(BACKUP_LINK))
# Recreate backup link pointing to latest backup
try:
os.unlink(BACKUP_LINK)
except:
pass
os.symlink(BACKUP_FILE, BACKUP_LINK)
# Make backup on S3
bucket = S3Connection(BACKUP_AWS_ID,BACKUP_AWS_SECRET).get_bucket(BACKUP_BUCKET)
k = Key(bucket)
k.key = BACKUP_FILE
k.set_contents_from_filename(BACKUP_FILE)
bucket.copy_key(os.path.basename(BACKUP_LINK),BACKUP_BUCKET,BACKUP_FILE[1:])