upload android base code part3

This commit is contained in:
August 2018-08-08 16:48:17 +08:00
parent 71b83c22f1
commit b9e30e05b1
15122 changed files with 2089659 additions and 0 deletions

View file

@ -0,0 +1,89 @@
bionicbb
========
The bionic buildbot contains two services: a gmail polling service, and a web
service that interacts with gerrit.
Dependencies
------------
* Python 2.7
* [Advanced Python Scheduler](https://apscheduler.readthedocs.org/en/latest/)
* [Flask](http://flask.pocoo.org/)
* [Google API Client Library](https://developers.google.com/api-client-library/python/start/installation)
* [jenkinsapi](https://pypi.python.org/pypi/jenkinsapi)
* [Requests](http://docs.python-requests.org/en/latest/)
Setup
-----
Create a `config.py` in the same directory as the sources. The structure of the
configuration file is as follows:
```python
client_secret_file = 'CLIENT_SECRET_FILE.json'
build_listener_url = 'BUILD_LISTENER_URL'
jenkins_url = 'JENKINS_URL'
jenkins_credentials = {
'username': 'JENKINS_USERNAME',
'password': 'JENKINS_PASSWORD',
}
```
The client secret file comes from the Gmail API page of the [Google Developers
Console](https://console.developers.google.com/). The Jenkins credentials are
for a Jenkins account that has the appropriate permissions to launch the jobs
the buildbot will use.
You will also need to add the HTTP password for the buildbot's Gerrit account to
`~/.netrc`. The HTTP password can be obtained from the [Gerrit HTTP password
settings](https://android-review.googlesource.com/#/settings/http-password).
To launch the services:
```bash
$ python build_listener.py >build.log 2>&1 &
$ python gmail_listener.py >mail.log 2>&1 &
```
The mail listener will direct your browser to an authentication page for the
Gmail API.
gmail\_listener.py
------------------
Bionicbb polls a gmail account to find changes that need to be built. The gmail
account needs to have a gerrit account set up with project watches on anything
it finds interesting. This is a rather ugly hack, but it seems to be the
simplest option available.
Gerrit does offer a streaming notification service that would be _far_ better,
but it is only available over an SSH conection to gerrit, and the AOSP gerrit
does not support this connection.
Another option would be polling gerrit itself, but we'd have to process each
change every time to see if it should be built, whereas project watches allow us
to treat these as semi-push notifications (we still have to poll gmail).
One drawback to this approach is that it's a hassle to set up the project
watches for a large number of projects. Since bionicbb is only interested in a
small subset of projects, this is a non-issue.
If the buildbot has applied Verified-1 to a patchset, the user may add their own
Verified+1 to the change and the buildbot will remove its rejection the next
time the services polls (by default, every five minutes).
The service will also listen for the following commands:
* `bionicbb:clean`: Something is very broken and the buildbot's output
directory needs to be nuked.
* `bionicbb:retry`: Something went wrong and the buildbot should retry the
build.
build\_listener.py
------------------
The build listener service responds to HTTP POST events sent from Jenkins and
updates CLs accordingly. The only other API endpoint is `/drop-rejection`, which
will remove a Verified-1 from a previously rejected patchset. The actually
invocation of this is handled by the gmail listener.

View file

@ -0,0 +1,134 @@
#!/usr/bin/env python2
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import logging
import os
from apscheduler.schedulers.background import BackgroundScheduler
from flask import Flask, request
import requests
import gerrit
import tasks
app = Flask(__name__)
def gerrit_url(endpoint):
gerrit_base_url = 'https://android-review.googlesource.com'
return gerrit_base_url + endpoint
@app.route('/', methods=['POST'])
def handle_build_message():
result = json.loads(request.data)
name = result['name']
number = result['build']['number']
status = result['build']['status']
go_url = 'http://go/bionicbb/' + result['build']['url']
full_url = result['build']['full_url']
params = result['build']['parameters']
change_id = params['CHANGE_ID']
ref = params['REF']
patch_set = ref.split('/')[-1]
logging.debug('%s #%s %s: %s', name, number, status, full_url)
# bionic-lint is always broken, so we don't want to reject changes for
# those failures until we clean things up.
if name == 'bionic-presubmit':
message_lines = ['{} #{} checkbuild {}: {}'.format(
name, number, status, go_url)]
if status == 'FAILURE':
message_lines += ['If you believe this Verified-1 was in error, '
'+1 the change and bionicbb will remove the -1 '
'shortly.']
request_data = {
'message': '\n'.join(message_lines)
}
label = 'Verified'
if status == 'FAILURE':
request_data['labels'] = {label: -1}
elif status == 'SUCCESS':
request_data['labels'] = {label: +1}
url = gerrit_url('/a/changes/{}/revisions/{}/review'.format(change_id,
patch_set))
headers = {'Content-Type': 'application/json;charset=UTF-8'}
logging.debug('POST %s: %s', url, request_data)
requests.post(url, headers=headers, json=request_data)
elif name == 'clean-bionic-presubmit':
request_data = {'message': 'out/ directory removed'}
url = gerrit_url('/a/changes/{}/revisions/{}/review'.format(change_id,
patch_set))
headers = {'Content-Type': 'application/json;charset=UTF-8'}
logging.debug('POST %s: %s', url, request_data)
requests.post(url, headers=headers, json=request_data)
elif name == 'bionic-lint':
logging.warning('Result for bionic-lint ignored')
else:
logging.error('Unknown project: %s', name)
return ''
@app.route('/drop-rejection', methods=['POST'])
def drop_rejection():
revision_info = json.loads(request.data)
change_id = revision_info['changeid']
patch_set = revision_info['patchset']
bb_email = 'bionicbb@android.com'
labels = gerrit.get_labels(change_id, patch_set)
if bb_email in labels['Verified']:
bb_review = labels['Verified'][bb_email]
else:
bb_review = 0
if bb_review >= 0:
logging.info('No rejection to drop: %s %s', change_id, patch_set)
return ''
logging.info('Dropping rejection: %s %s', change_id, patch_set)
request_data = {'labels': {'Verified': 0}}
url = gerrit_url('/a/changes/{}/revisions/{}/review'.format(change_id,
patch_set))
headers = {'Content-Type': 'application/json;charset=UTF-8'}
logging.debug('POST %s: %s', url, request_data)
requests.post(url, headers=headers, json=request_data)
return ''
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
fh = logging.FileHandler('bionicbb.log')
fh.setLevel(logging.INFO)
logger.addHandler(fh)
# Prevent the job from being rescheduled by the reloader.
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
scheduler = BackgroundScheduler()
scheduler.start()
scheduler.add_job(tasks.get_and_process_jobs, 'interval', minutes=5)
app.run(host='0.0.0.0', debug=True)

View file

@ -0,0 +1,80 @@
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import requests
class GerritError(RuntimeError):
def __init__(self, code, url):
self.code = code
self.url = url
super(GerritError, self).__init__('Error {}: {}'.format(code, url))
def get_commit(change_id, revision):
return json.loads(
call('/changes/{}/revisions/{}/commit'.format(change_id, revision)))
def get_files_for_revision(change_id, revision):
return json.loads(
call('/changes/{}/revisions/{}/files'.format(
change_id, revision))).keys()
def call(endpoint, method='GET'):
if method != 'GET':
raise NotImplementedError('Currently only HTTP GET is supported.')
gerrit_url = 'https://android-review.googlesource.com'
url = gerrit_url + endpoint
response = requests.get(url)
if response.status_code != 200:
raise GerritError(response.status_code, url)
return response.text[5:]
def ref_for_change(change_id):
endpoint = '/changes/{}/detail?o=CURRENT_REVISION'.format(change_id)
change = json.loads(call(endpoint))
commit = change['current_revision']
return change['revisions'][commit]['fetch']['http']['ref']
def get_labels(change_id, patch_set):
"""Returns labels attached to a revision.
Returned data is in the following format:
{
'Code-Review': {
<email>: <value>,
...
},
'Verified': {
<email>: <value>,
...
}
}
"""
details = json.loads(call('/changes/{}/revisions/{}/review'.format(
change_id, patch_set)))
labels = {'Code-Review': {}, 'Verified': {}}
for review in details['labels']['Code-Review']['all']:
if 'value' in review and 'email' in review:
labels['Code-Review'][review['email']] = int(review['value'])
for review in details['labels']['Verified']['all']:
if 'value' in review and 'email' in review:
labels['Verified'][review['email']] = int(review['value'])
return labels

View file

@ -0,0 +1,71 @@
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import base64
import httplib2
import config
def get_body(msg):
if 'attachmentId' in msg['payload']['body']:
raise NotImplementedError('Handling of messages contained in '
'attachments not yet implemented.')
b64_body = msg['payload']['body']['data']
return base64.urlsafe_b64decode(b64_body.encode('ASCII'))
def build_service():
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import run
OAUTH_SCOPE = 'https://www.googleapis.com/auth/gmail.modify'
STORAGE = Storage('oauth.storage')
# Start the OAuth flow to retrieve credentials
flow = flow_from_clientsecrets(config.client_secret_file,
scope=OAUTH_SCOPE)
http = httplib2.Http()
# Try to retrieve credentials from storage or run the flow to generate them
credentials = STORAGE.get()
if credentials is None or credentials.invalid:
credentials = run(flow, STORAGE, http=http)
http = credentials.authorize(http)
return build('gmail', 'v1', http=http)
def get_gerrit_label(labels):
for label in labels:
if label['name'] == 'gerrit':
return label['id']
return None
def get_all_messages(service, label):
msgs = []
response = service.users().messages().list(
userId='me', labelIds=label).execute()
if 'messages' in response:
msgs.extend(response['messages'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(
userId='me', pageToken=page_token).execute()
msgs.extend(response['messages'])
return msgs

View file

@ -0,0 +1,205 @@
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import json
import logging
import os.path
import re
import requests
import jenkinsapi
import gerrit
import config
def is_untrusted_committer(change_id, patch_set):
# TODO(danalbert): Needs to be based on the account that made the comment.
commit = gerrit.get_commit(change_id, patch_set)
committer = commit['committer']['email']
return not committer.endswith('@google.com')
def contains_cleanspec(change_id, patch_set):
files = gerrit.get_files_for_revision(change_id, patch_set)
return 'CleanSpec.mk' in [os.path.basename(f) for f in files]
def contains_bionicbb(change_id, patch_set):
files = gerrit.get_files_for_revision(change_id, patch_set)
return any('tools/bionicbb' in f for f in files)
def should_skip_build(info):
if info['MessageType'] not in ('newchange', 'newpatchset', 'comment'):
raise ValueError('should_skip_build() is only valid for new '
'changes, patch sets, and commits.')
change_id = info['Change-Id']
patch_set = info['PatchSet']
checks = [
is_untrusted_committer,
contains_cleanspec,
contains_bionicbb,
]
for check in checks:
if check(change_id, patch_set):
return True
return False
def clean_project(dry_run):
username = config.jenkins_credentials['username']
password = config.jenkins_credentials['password']
jenkins_url = config.jenkins_url
jenkins = jenkinsapi.api.Jenkins(jenkins_url, username, password)
build = 'clean-bionic-presubmit'
if build in jenkins:
if not dry_run:
_ = jenkins[build].invoke()
# https://issues.jenkins-ci.org/browse/JENKINS-27256
# url = job.get_build().baseurl
url = 'URL UNAVAILABLE'
else:
url = 'DRY_RUN_URL'
logging.info('Cleaning: %s %s', build, url)
else:
logging.error('Failed to clean: could not find project %s', build)
return True
def build_project(gerrit_info, dry_run, lunch_target=None):
project_to_jenkins_map = {
'platform/bionic': 'bionic-presubmit',
'platform/build': 'bionic-presubmit',
'platform/external/jemalloc': 'bionic-presubmit',
'platform/external/libcxx': 'bionic-presubmit',
'platform/external/libcxxabi': 'bionic-presubmit',
'platform/external/compiler-rt': 'bionic-presubmit',
}
username = config.jenkins_credentials['username']
password = config.jenkins_credentials['password']
jenkins_url = config.jenkins_url
jenkins = jenkinsapi.api.Jenkins(jenkins_url, username, password)
project = gerrit_info['Project']
change_id = gerrit_info['Change-Id']
if project in project_to_jenkins_map:
build = project_to_jenkins_map[project]
else:
build = 'bionic-presubmit'
if build in jenkins:
project_path = '/'.join(project.split('/')[1:])
if not project_path:
raise RuntimeError('bogus project: {}'.format(project))
if project_path.startswith('platform/'):
raise RuntimeError('Bad project mapping: {} => {}'.format(
project, project_path))
ref = gerrit.ref_for_change(change_id)
params = {
'REF': ref,
'CHANGE_ID': change_id,
'PROJECT': project_path
}
if lunch_target is not None:
params['LUNCH_TARGET'] = lunch_target
if not dry_run:
_ = jenkins[build].invoke(build_params=params)
# https://issues.jenkins-ci.org/browse/JENKINS-27256
# url = job.get_build().baseurl
url = 'URL UNAVAILABLE'
else:
url = 'DRY_RUN_URL'
logging.info('Building: %s => %s %s %s', project, build, url,
change_id)
else:
logging.error('Unknown build: %s => %s %s', project, build, change_id)
return True
def handle_change(gerrit_info, _, dry_run):
if should_skip_build(gerrit_info):
return True
return build_project(gerrit_info, dry_run)
def drop_rejection(gerrit_info, dry_run):
request_data = {
'changeid': gerrit_info['Change-Id'],
'patchset': gerrit_info['PatchSet']
}
url = '{}/{}'.format(config.build_listener_url, 'drop-rejection')
headers = {'Content-Type': 'application/json;charset=UTF-8'}
if not dry_run:
try:
requests.post(url, headers=headers, data=json.dumps(request_data))
except requests.exceptions.ConnectionError as ex:
logging.error('Failed to drop rejection: %s', ex)
return False
logging.info('Dropped rejection: %s', gerrit_info['Change-Id'])
return True
def handle_comment(gerrit_info, body, dry_run):
if 'Verified+1' in body:
drop_rejection(gerrit_info, dry_run)
if should_skip_build(gerrit_info):
return True
command_map = {
'clean': lambda: clean_project(dry_run),
'retry': lambda: build_project(gerrit_info, dry_run),
'arm': lambda: build_project(gerrit_info, dry_run,
lunch_target='aosp_arm-eng'),
'aarch64': lambda: build_project(gerrit_info, dry_run,
lunch_target='aosp_arm64-eng'),
'mips': lambda: build_project(gerrit_info, dry_run,
lunch_target='aosp_mips-eng'),
'mips64': lambda: build_project(gerrit_info, dry_run,
lunch_target='aosp_mips64-eng'),
'x86': lambda: build_project(gerrit_info, dry_run,
lunch_target='aosp_x86-eng'),
'x86_64': lambda: build_project(gerrit_info, dry_run,
lunch_target='aosp_x86_64-eng'),
}
def handle_unknown_command():
pass # TODO(danalbert): should complain to the commenter.
commands = [match.group(1).strip() for match in
re.finditer(r'^bionicbb:\s*(.+)$', body, flags=re.MULTILINE)]
for command in commands:
if command in command_map:
command_map[command]()
else:
handle_unknown_command()
return True
def skip_handler(gerrit_info, _, __):
logging.info('Skipping %s: %s', gerrit_info['MessageType'],
gerrit_info['Change-Id'])
return True

View file

@ -0,0 +1,108 @@
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import httplib
import httplib2
import logging
import re
import socket
import apiclient.errors
import gerrit
import gmail
import presubmit
def get_gerrit_info(body):
info = {}
gerrit_pattern = r'^Gerrit-(\S+): (.+)$'
for match in re.finditer(gerrit_pattern, body, flags=re.MULTILINE):
info[match.group(1)] = match.group(2).strip()
return info
def process_message(msg, dry_run):
try:
body = gmail.get_body(msg)
gerrit_info = get_gerrit_info(body)
if not gerrit_info:
logging.fatal('No Gerrit info found: %s', msg.subject)
msg_type = gerrit_info['MessageType']
handlers = {
'comment': presubmit.handle_comment,
'newchange': presubmit.handle_change,
'newpatchset': presubmit.handle_change,
'abandon': presubmit.skip_handler,
'merge-failed': presubmit.skip_handler,
'merged': presubmit.skip_handler,
'restore': presubmit.skip_handler,
'revert': presubmit.skip_handler,
}
message_type = gerrit_info['MessageType']
if message_type in handlers:
return handlers[message_type](gerrit_info, body, dry_run)
else:
logging.warning('MessageType %s unhandled.', msg_type)
return False
except NotImplementedError as ex:
logging.error("%s", ex)
return False
except gerrit.GerritError as ex:
change_id = gerrit_info['Change-Id']
logging.error('Gerrit error (%d): %s %s', ex.code, change_id, ex.url)
return ex.code == 404
def get_and_process_jobs():
dry_run = False
gmail_service = gmail.build_service()
msg_service = gmail_service.users().messages()
# We run in a loop because some of the exceptions thrown here mean we just
# need to retry. For errors where we should back off (typically any gmail
# API exceptions), process_changes catches the error and returns normally.
while True:
try:
process_changes(gmail_service, msg_service, dry_run)
return
except httplib.BadStatusLine:
pass
except httplib2.ServerNotFoundError:
pass
except socket.error:
pass
def process_changes(gmail_service, msg_service, dry_run):
try:
labels = gmail_service.users().labels().list(userId='me').execute()
if not labels['labels']:
logging.error('Could not retrieve Gmail labels')
return
label_id = gmail.get_gerrit_label(labels['labels'])
if not label_id:
logging.error('Could not find gerrit label')
return
for msg in gmail.get_all_messages(gmail_service, label_id):
msg = msg_service.get(userId='me', id=msg['id']).execute()
if process_message(msg, dry_run) and not dry_run:
msg_service.trash(userId='me', id=msg['id']).execute()
except apiclient.errors.HttpError as ex:
logging.error('API Client HTTP error: %s', ex)

View file

@ -0,0 +1,94 @@
import mock
import unittest
import presubmit
class TestShouldSkipBuild(unittest.TestCase):
@mock.patch('presubmit.contains_bionicbb')
@mock.patch('presubmit.contains_cleanspec')
@mock.patch('gerrit.get_commit')
def test_accepts_googlers(self, mock_commit, *other_checks):
mock_commit.return_value = {
'committer': {'email': 'googler@google.com'}
}
for other_check in other_checks:
other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertFalse(presubmit.should_skip_build({
'MessageType': message_type,
'Change-Id': '',
'PatchSet': '',
}))
@mock.patch('presubmit.contains_bionicbb')
@mock.patch('presubmit.contains_cleanspec')
@mock.patch('gerrit.get_commit')
def test_rejects_googlish_domains(self, mock_commit, *other_checks):
mock_commit.return_value = {
'committer': {'email': 'fakegoogler@google.com.fake.com'}
}
for other_check in other_checks:
other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type,
'Change-Id': '',
'PatchSet': '',
}))
@mock.patch('presubmit.contains_bionicbb')
@mock.patch('presubmit.contains_cleanspec')
@mock.patch('gerrit.get_commit')
def test_rejects_non_googlers(self, mock_commit, *other_checks):
mock_commit.return_value = {
'committer': {'email': 'johndoe@example.com'}
}
for other_check in other_checks:
other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type,
'Change-Id': '',
'PatchSet': '',
}))
@mock.patch('presubmit.contains_bionicbb')
@mock.patch('presubmit.is_untrusted_committer')
@mock.patch('gerrit.get_files_for_revision')
def test_skips_cleanspecs(self, mock_files, *other_checks):
mock_files.return_value = ['foo/CleanSpec.mk']
for other_check in other_checks:
other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type,
'Change-Id': '',
'PatchSet': '',
}))
@mock.patch('presubmit.contains_cleanspec')
@mock.patch('presubmit.is_untrusted_committer')
@mock.patch('gerrit.get_files_for_revision')
def test_skips_bionicbb(self, mock_files, *other_checks):
mock_files.return_value = ['tools/bionicbb/common.sh']
for other_check in other_checks:
other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type,
'Change-Id': '',
'PatchSet': '',
}))
if __name__ == '__main__':
unittest.main()