Update from https://crrev.com/316786
List of manually-modified files:
gpu/command_buffer/service/in_process_command_buffer.cc
examples/sample_app/BUILD.gn
examples/sample_app/spinning_cube.cc
mojo/android/javatests/src/org/chromium/mojo/MojoTestCase.java
mojo/cc/context_provider_mojo.cc
mojo/cc/context_provider_mojo.h
mojo/common/trace_controller_impl.cc
mojo/gles2/command_buffer_client_impl.cc
mojo/gles2/command_buffer_client_impl.h
services/gles2/gpu_impl.cc
shell/android/apk/src/org/chromium/mojo/shell/MojoShellApplication.java
sky/engine/core/dom/Node.cpp
sky/shell/apk/src/org/domokit/sky/shell/SkyShellApplication.java
ui/events/latency_info.cc
ui/gfx/transform.cc
ui/gfx/transform.h
ui/gfx/transform_util.cc
ui/gfx/transform_util.h
Review URL: https://codereview.chromium.org/935333002
diff --git a/build/all.gyp b/build/all.gyp
index 8556aa7..93cac8c 100644
--- a/build/all.gyp
+++ b/build/all.gyp
@@ -59,7 +59,6 @@
'../third_party/mojo/mojo_edk.gyp:mojo_system_impl',
'../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests',
'../third_party/mojo/mojo_edk_tests.gyp:mojo_public_environment_unittests',
- '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_perftests',
'../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_unittests',
'../third_party/mojo/mojo_edk_tests.gyp:mojo_public_utility_unittests',
'../third_party/mojo/mojo_edk_tests.gyp:mojo_system_unittests',
@@ -230,6 +229,7 @@
['use_aura==1', {
'dependencies': [
'../ui/aura/aura.gyp:*',
+ '../ui/aura_extra/aura_extra.gyp:*',
],
}],
['use_ash==1', {
@@ -1191,13 +1191,6 @@
'../url/url.gyp:url_unittests',
],
},
- {
- 'target_name': 'webkit_builder_win',
- 'type': 'none',
- 'dependencies': [
- 'blink_tests',
- ],
- },
], # targets
'conditions': [
['branding=="Chrome"', {
@@ -1272,6 +1265,7 @@
'../skia/skia_tests.gyp:skia_unittests',
'../ui/app_list/app_list.gyp:*',
'../ui/aura/aura.gyp:*',
+ '../ui/aura_extra/aura_extra.gyp:*',
'../ui/base/ui_base_tests.gyp:ui_base_unittests',
'../ui/compositor/compositor.gyp:*',
'../ui/display/display.gyp:display_unittests',
@@ -1328,21 +1322,6 @@
],
},
], # targets
- }, {
- 'conditions': [
- ['OS=="linux"', {
- # TODO(thakis): Remove this once the linux gtk bot no longer references
- # it (probably after the first aura release on linux), see r249162
- 'targets': [
- {
- 'target_name': 'aura_builder',
- 'type': 'none',
- 'dependencies': [
- '../chrome/chrome.gyp:chrome',
- ],
- },
- ], # targets
- }]], # OS=="linux"
}], # "use_aura==1"
['test_isolation_mode != "noop"', {
'targets': [
diff --git a/build/android/buildbot/bb_device_status_check.py b/build/android/buildbot/bb_device_status_check.py
index 69c17f8..6de2723 100755
--- a/build/android/buildbot/bb_device_status_check.py
+++ b/build/android/buildbot/bb_device_status_check.py
@@ -336,6 +336,13 @@
types, builds, batteries, reports, errors, fail_step_lst, json_data = (
zip(*[DeviceInfo(dev, options) for dev in devices]))
+ # Write device info to file for buildbot info display.
+ with open('/home/chrome-bot/.adb_device_info', 'w') as f:
+ for device in json_data:
+ f.write('%s %s %s %.1fC %s%%\n' % (device['serial'], device['type'],
+ device['build'], float(device['battery']['temperature']) / 10,
+ device['battery']['level']))
+
err_msg = CheckForMissingDevices(options, devices) or []
unique_types = list(set(types))
diff --git a/build/android/buildbot/bb_device_steps.py b/build/android/buildbot/bb_device_steps.py
index e1858ce..a53bb56 100755
--- a/build/android/buildbot/bb_device_steps.py
+++ b/build/android/buildbot/bb_device_steps.py
@@ -43,36 +43,40 @@
# annotation: Annotation of the tests to include.
# exclude_annotation: The annotation of the tests to exclude.
I_TEST = collections.namedtuple('InstrumentationTest', [
- 'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'host_driven_root',
- 'annotation', 'exclude_annotation', 'extra_flags'])
+ 'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'isolate_file_path',
+ 'host_driven_root', 'annotation', 'exclude_annotation', 'extra_flags'])
def SrcPath(*path):
return os.path.join(CHROME_SRC_DIR, *path)
-def I(name, apk, apk_package, test_apk, test_data, host_driven_root=None,
- annotation=None, exclude_annotation=None, extra_flags=None):
- return I_TEST(name, apk, apk_package, test_apk, test_data, host_driven_root,
- annotation, exclude_annotation, extra_flags)
+def I(name, apk, apk_package, test_apk, test_data, isolate_file_path=None,
+ host_driven_root=None, annotation=None, exclude_annotation=None,
+ extra_flags=None):
+ return I_TEST(name, apk, apk_package, test_apk, test_data, isolate_file_path,
+ host_driven_root, annotation, exclude_annotation, extra_flags)
INSTRUMENTATION_TESTS = dict((suite.name, suite) for suite in [
I('ContentShell',
'ContentShell.apk',
'org.chromium.content_shell_apk',
'ContentShellTest',
- 'content:content/test/data/android/device_files'),
+ 'content:content/test/data/android/device_files',
+ isolate_file_path='content/content_shell_test_apk.isolate'),
I('ChromeShell',
'ChromeShell.apk',
'org.chromium.chrome.shell',
'ChromeShellTest',
'chrome:chrome/test/data/android/device_files',
- constants.CHROME_SHELL_HOST_DRIVEN_DIR),
+ isolate_file_path='chrome/chrome_shell_test_apk.isolate',
+ host_driven_root=constants.CHROME_SHELL_HOST_DRIVEN_DIR),
I('AndroidWebView',
'AndroidWebView.apk',
'org.chromium.android_webview.shell',
'AndroidWebViewTest',
- 'webview:android_webview/test/data/device_files'),
+ 'webview:android_webview/test/data/device_files',
+ isolate_file_path='android_webview/android_webview_test_apk.isolate'),
I('ChromeSyncShell',
'ChromeSyncShell.apk',
'org.chromium.chrome.browser.sync',
@@ -265,6 +269,8 @@
options.flakiness_server)
if options.coverage_bucket:
args.append('--coverage-dir=%s' % options.coverage_dir)
+ if test.isolate_file_path:
+ args.append('--isolate-file-path=%s' % test.isolate_file_path)
if test.host_driven_root:
args.append('--host-driven-root=%s' % test.host_driven_root)
if test.annotation:
@@ -340,13 +346,13 @@
unexpected_passes, unexpected_failures, unexpected_flakes = (
_ParseLayoutTestResults(full_results))
if unexpected_failures:
- _PrintDashboardLink('failed', unexpected_failures,
+ _PrintDashboardLink('failed', unexpected_failures.keys(),
max_tests=25)
elif unexpected_passes:
- _PrintDashboardLink('unexpected passes', unexpected_passes,
+ _PrintDashboardLink('unexpected passes', unexpected_passes.keys(),
max_tests=10)
if unexpected_flakes:
- _PrintDashboardLink('unexpected flakes', unexpected_flakes,
+ _PrintDashboardLink('unexpected flakes', unexpected_flakes.keys(),
max_tests=10)
if exit_code == 0 and (unexpected_passes or unexpected_flakes):
diff --git a/build/android/developer_recommended_flags.gypi b/build/android/developer_recommended_flags.gypi
index 3a3db0a..79c201d 100644
--- a/build/android/developer_recommended_flags.gypi
+++ b/build/android/developer_recommended_flags.gypi
@@ -38,6 +38,7 @@
# This comes with some caveats:
# Only works with a single device connected (it will print a warning if
# zero or multiple devices are attached).
+ # Device must be flashed with a user-debug unsigned Android build.
# Some actions are always run (i.e. ninja will never say "no work to do").
'gyp_managed_install%': 1,
diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py
index 15fe73d..3738062 100755
--- a/build/android/gyp/javac.py
+++ b/build/android/gyp/javac.py
@@ -106,7 +106,8 @@
_MAX_MANIFEST_LINE_LEN = 72
-def CreateManifest(manifest_path, classpath, main_class=None):
+def CreateManifest(manifest_path, classpath, main_class=None,
+ manifest_entries=None):
"""Creates a manifest file with the given parameters.
This generates a manifest file that compiles with the spec found at
@@ -117,11 +118,16 @@
classpath: The JAR files that should be listed on the manifest file's
classpath.
main_class: If present, the class containing the main() function.
+ manifest_entries: If present, a list of (key, value) pairs to add to
+ the manifest.
"""
output = ['Manifest-Version: 1.0']
if main_class:
output.append('Main-Class: %s' % main_class)
+ if manifest_entries:
+ for k, v in manifest_entries:
+ output.append('%s: %s' % (k, v))
if classpath:
sanitized_paths = []
for path in classpath:
@@ -183,6 +189,10 @@
parser.add_option(
'--main-class',
help='The class containing the main method.')
+ parser.add_option(
+ '--manifest-entry',
+ action='append',
+ help='Key:value pairs to add to the .jar manifest.')
parser.add_option('--stamp', help='Path to touch on success.')
@@ -232,10 +242,13 @@
java_files)
if options.jar_path:
- if options.main_class:
+ if options.main_class or options.manifest_entry:
+ if options.manifest_entry:
+ entries = map(lambda e: e.split(":"), options.manifest_entry)
+ else:
+ entries = []
manifest_file = os.path.join(temp_dir, 'manifest')
- CreateManifest(manifest_file, classpath,
- options.main_class)
+ CreateManifest(manifest_file, classpath, options.main_class, entries)
else:
manifest_file = None
jar.JarDirectory(classes_dir,
diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py
index d6f9d32..1ed1877 100644
--- a/build/android/pylib/android_commands.py
+++ b/build/android/pylib/android_commands.py
@@ -694,8 +694,8 @@
assert os.path.exists(run_pie_dist_path), 'Please build run_pie'
# The PIE loader must be pushed manually (i.e. no PushIfNeeded) because
# PushIfNeeded requires md5sum and md5sum requires the wrapper as well.
- command = 'push %s %s' % (run_pie_dist_path, PIE_WRAPPER_PATH)
- assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+ adb_command = 'push %s %s' % (run_pie_dist_path, PIE_WRAPPER_PATH)
+ assert _HasAdbPushSucceeded(self._adb.SendCommand(adb_command))
self._pie_wrapper = PIE_WRAPPER_PATH
if self._pie_wrapper:
diff --git a/build/android/pylib/device/OWNERS b/build/android/pylib/device/OWNERS
index 65ce0dc..c35d7ac 100644
--- a/build/android/pylib/device/OWNERS
+++ b/build/android/pylib/device/OWNERS
@@ -1,3 +1,2 @@
-set noparent
jbudorick@chromium.org
perezju@chromium.org
diff --git a/build/android/pylib/device/adb_wrapper.py b/build/android/pylib/device/adb_wrapper.py
index 7d11671..f29f5c7 100644
--- a/build/android/pylib/device/adb_wrapper.py
+++ b/build/android/pylib/device/adb_wrapper.py
@@ -253,6 +253,20 @@
command, output, status=status, device_serial=self._device_serial)
return output
+ def IterShell(self, command, timeout):
+ """Runs a shell command and returns an iterator over its output lines.
+
+ Args:
+ command: A string with the shell command to run.
+ timeout: Timeout in seconds.
+
+ Yields:
+ The output of the command line by line.
+ """
+ args = ['shell', command]
+ return cmd_helper.IterCmdOutputLines(
+ self._BuildAdbCmd(args, self._device_serial), timeout=timeout)
+
def Ls(self, path, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
"""List the contents of a directory on the device.
@@ -286,26 +300,42 @@
device_serial=self._device_serial)
def Logcat(self, clear=False, dump=False, filter_spec=None,
- logcat_format=None, timeout=None):
- """Get an iterator over the logcat output.
+ logcat_format=None, timeout=None, retries=_DEFAULT_RETRIES):
+ """Get an iterable over the logcat output.
Args:
- filter_spec: (optional) Spec to filter the logcat.
- timeout: (optional) Timeout per try in seconds.
+ clear: If true, clear the logcat.
+ dump: If true, dump the current logcat contents.
+ filter_spec: If set, spec to filter the logcat.
+ logcat_format: If set, the format in which the logcat should be output.
+ Options include "brief", "process", "tag", "thread", "raw", "time",
+ "threadtime", and "long"
+ timeout: (optional) If set, timeout per try in seconds. If clear or dump
+ is set, defaults to _DEFAULT_TIMEOUT.
+ retries: (optional) If clear or dump is set, the number of retries to
+ attempt. Otherwise, does nothing.
Yields:
logcat output line by line.
"""
cmd = ['logcat']
+ use_iter = True
if clear:
cmd.append('-c')
+ use_iter = False
if dump:
cmd.append('-d')
+ use_iter = False
if logcat_format:
cmd.extend(['-v', logcat_format])
if filter_spec is not None:
cmd.append(filter_spec)
- return self._IterRunDeviceAdbCmd(cmd, timeout)
+
+ if use_iter:
+ return self._IterRunDeviceAdbCmd(cmd, timeout)
+ else:
+ timeout = timeout if timeout is not None else _DEFAULT_TIMEOUT
+ return self._RunDeviceAdbCmd(cmd, timeout, retries)
def Forward(self, local, remote, timeout=_DEFAULT_TIMEOUT,
retries=_DEFAULT_RETRIES):
diff --git a/build/android/pylib/device/device_utils.py b/build/android/pylib/device/device_utils.py
index a1b4117..eba5e02 100644
--- a/build/android/pylib/device/device_utils.py
+++ b/build/android/pylib/device/device_utils.py
@@ -412,8 +412,8 @@
@decorators.WithTimeoutAndRetriesFromInstance()
def RunShellCommand(self, cmd, check_return=False, cwd=None, env=None,
- as_root=False, single_line=False,
- timeout=None, retries=None):
+ as_root=False, single_line=False, timeout=None,
+ retries=None):
"""Run an ADB shell command.
The command to run |cmd| should be a sequence of program arguments or else
@@ -847,7 +847,7 @@
self.RunShellCommand(
['unzip', zip_on_device],
as_root=True,
- env={'PATH': '$PATH:%s' % install_commands.BIN_DIR},
+ env={'PATH': '%s:$PATH' % install_commands.BIN_DIR},
check_return=True)
finally:
if zip_proc.is_alive():
@@ -1297,23 +1297,6 @@
return host_path
@decorators.WithTimeoutAndRetriesFromInstance()
- def GetIOStats(self, timeout=None, retries=None):
- """Gets cumulative disk IO stats since boot for all processes.
-
- Args:
- timeout: timeout in seconds
- retries: number of retries
-
- Returns:
- A dict containing |num_reads|, |num_writes|, |read_ms|, and |write_ms|.
-
- Raises:
- CommandTimeoutError on timeout.
- DeviceUnreachableError on missing device.
- """
- return self.old_interface.GetIoStats()
-
- @decorators.WithTimeoutAndRetriesFromInstance()
def GetMemoryUsageForPid(self, pid, timeout=None, retries=None):
"""Gets the memory usage for the given PID.
@@ -1349,6 +1332,41 @@
"""Returns the device serial."""
return self.adb.GetDeviceSerial()
+ @decorators.WithTimeoutAndRetriesFromInstance()
+ def GetDevicePieWrapper(self, timeout=None, retries=None):
+ """Gets the absolute path to the run_pie wrapper on the device.
+
+ We have to build our device executables to be PIE, but they need to be able
+ to run on versions of android that don't support PIE (i.e. ICS and below).
+ To do so, we push a wrapper to the device that lets older android versions
+ run PIE executables. This method pushes that wrapper to the device if
+ necessary and returns the path to it.
+
+ This is exposed publicly to allow clients to write scripts using run_pie
+ (e.g. md5sum.CalculateDeviceMd5Sum).
+
+ Args:
+ timeout: timeout in seconds
+ retries: number of retries
+
+ Returns:
+ The path to the PIE wrapper on the device, or an empty string if the
+ device does not require the wrapper.
+ """
+ if 'run_pie' not in self._cache:
+ pie = ''
+ if (self.build_version_sdk <
+ constants.ANDROID_SDK_VERSION_CODES.JELLY_BEAN):
+ host_pie_path = os.path.join(constants.GetOutDirectory(), 'run_pie')
+ if not os.path.exists(host_pie_path):
+ raise device_errors.CommandFailedError('Please build run_pie')
+ pie = '%s/run_pie' % constants.TEST_EXECUTABLE_DIR
+ self.adb.Push(host_pie_path, pie)
+
+ self._cache['run_pie'] = pie
+
+ return self._cache['run_pie']
+
@classmethod
def parallel(cls, devices=None, async=False):
"""Creates a Parallelizer to operate over the provided list of devices.
diff --git a/build/android/pylib/device/device_utils_test.py b/build/android/pylib/device/device_utils_test.py
index 6071fd5..8a25f25 100755
--- a/build/android/pylib/device/device_utils_test.py
+++ b/build/android/pylib/device/device_utils_test.py
@@ -656,6 +656,24 @@
self.device.RunShellCommand(cmd, check_return=False))
+class DeviceUtilsGetDevicePieWrapper(DeviceUtilsNewImplTest):
+
+ def testGetDevicePieWrapper_jb(self):
+ with self.assertCall(
+ self.call.device.build_version_sdk(),
+ constants.ANDROID_SDK_VERSION_CODES.JELLY_BEAN):
+ self.assertEqual('', self.device.GetDevicePieWrapper())
+
+ def testGetDevicePieWrapper_ics(self):
+ with self.assertCalls(
+ (self.call.device.build_version_sdk(),
+ constants.ANDROID_SDK_VERSION_CODES.ICE_CREAM_SANDWICH),
+ (mock.call.pylib.constants.GetOutDirectory(), '/foo/bar'),
+ (mock.call.os.path.exists(mock.ANY), True),
+ (self.call.adb.Push(mock.ANY, mock.ANY), '')):
+ self.assertNotEqual('', self.device.GetDevicePieWrapper())
+
+
@mock.patch('time.sleep', mock.Mock())
class DeviceUtilsKillAllTest(DeviceUtilsNewImplTest):
@@ -1045,7 +1063,7 @@
self.call.device.RunShellCommand(
['unzip', '/test/device/external_dir/tmp.zip'],
as_root=True,
- env={'PATH': '$PATH:/data/local/tmp/bin'},
+ env={'PATH': '/data/local/tmp/bin:$PATH'},
check_return=True),
(self.call.device.IsOnline(), True),
self.call.device.RunShellCommand(
@@ -1383,22 +1401,6 @@
self.device.TakeScreenshot('/test/host/screenshot.png')
-class DeviceUtilsGetIOStatsTest(DeviceUtilsOldImplTest):
-
- def testGetIOStats(self):
- with self.assertCalls(
- "adb -s 0123456789abcdef shell 'cat \"/proc/diskstats\" 2>/dev/null'",
- '179 0 mmcblk0 1 2 3 4 5 6 7 8 9 10 11\r\n'):
- self.assertEqual(
- {
- 'num_reads': 1,
- 'num_writes': 5,
- 'read_ms': 4,
- 'write_ms': 8,
- },
- self.device.GetIOStats())
-
-
class DeviceUtilsGetMemoryUsageForPidTest(DeviceUtilsOldImplTest):
def setUp(self):
diff --git a/build/android/pylib/gtest/setup.py b/build/android/pylib/gtest/setup.py
index 72c4b05..1b882ca 100644
--- a/build/android/pylib/gtest/setup.py
+++ b/build/android/pylib/gtest/setup.py
@@ -38,6 +38,7 @@
'media_unittests': 'media/media_unittests.isolate',
'net_unittests': 'net/net_unittests.isolate',
'sql_unittests': 'sql/sql_unittests.isolate',
+ 'sync_unit_tests': 'sync/sync_unit_tests.isolate',
'ui_base_unittests': 'ui/base/ui_base_tests.isolate',
'unit_tests': 'chrome/unit_tests.isolate',
'webkit_unit_tests':
diff --git a/build/android/pylib/instrumentation/instrumentation_parser.py b/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000..1859f14
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,96 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile('^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+ def __init__(self, stream):
+ """An incremental parser for the output of Android instrumentation tests.
+
+ Example:
+
+ stream = adb.IterShell('am instrument -r ...')
+ parser = InstrumentationParser(stream)
+
+ for code, bundle in parser.IterStatus():
+ # do something with each instrumentation status
+ print 'status:', code, bundle
+
+ # do something with the final instrumentation result
+ code, bundle = parser.GetResult()
+ print 'result:', code, bundle
+
+ Args:
+ stream: a sequence of lines as produced by the raw output of an
+ instrumentation test (e.g. by |am instrument -r| or |uiautomator|).
+ """
+ self._stream = stream
+ self._code = None
+ self._bundle = None
+
+ def IterStatus(self):
+ """Iterate over statuses as they are produced by the instrumentation test.
+
+ Yields:
+ A tuple (code, bundle) for each instrumentation status found in the
+ output.
+ """
+ def join_bundle_values(bundle):
+ for key in bundle:
+ bundle[key] = '\n'.join(bundle[key])
+ return bundle
+
+ bundle = {'STATUS': {}, 'RESULT': {}}
+ header = None
+ key = None
+ for line in self._stream:
+ m = _INSTR_LINE_RE.match(line)
+ if m:
+ header, value = m.groups()
+ key = None
+ if header in ['STATUS', 'RESULT'] and '=' in value:
+ key, value = value.split('=', 1)
+ bundle[header][key] = [value]
+ elif header == 'STATUS_CODE':
+ yield int(value), join_bundle_values(bundle['STATUS'])
+ bundle['STATUS'] = {}
+ elif header == 'CODE':
+ self._code = int(value)
+ else:
+ logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+ elif key is not None:
+ bundle[header][key].append(line)
+
+ self._bundle = join_bundle_values(bundle['RESULT'])
+
+ def GetResult(self):
+ """Return the final instrumentation result.
+
+ Returns:
+ A pair (code, bundle) with the final instrumentation result. The |code|
+ may be None if no instrumentation result was found in the output.
+
+ Raises:
+ AssertionError if attempting to get the instrumentation result before
+ exhausting |IterStatus| first.
+ """
+ assert self._bundle is not None, (
+ 'The IterStatus generator must be exhausted before reading the final'
+ ' instrumentation result.')
+ return self._code, self._bundle
diff --git a/build/android/pylib/instrumentation/instrumentation_parser_test.py b/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000..092d10f
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+ def testInstrumentationParser_nothing(self):
+ parser = instrumentation_parser.InstrumentationParser([''])
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(None, code)
+ self.assertEqual({}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_noMatchingStarts(self):
+ raw_output = [
+ '',
+ 'this.is.a.test.package.TestClass:.',
+ 'Test result for =.',
+ 'Time: 1.234',
+ '',
+ 'OK (1 test)',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(None, code)
+ self.assertEqual({}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_resultAndCode(self):
+ raw_output = [
+ 'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+ 'INSTRUMENTATION_RESULT: longMsg=a foo',
+ 'walked into',
+ 'a bar',
+ 'INSTRUMENTATION_CODE: -1',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ self.assertEqual(-1, code)
+ self.assertEqual(
+ {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+ self.assertEqual([], statuses)
+
+ def testInstrumentationParser_oneStatus(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: foo=1',
+ 'INSTRUMENTATION_STATUS: bar=hello',
+ 'INSTRUMENTATION_STATUS: world=false',
+ 'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+ 'INSTRUMENTATION_STATUS: test=testMethod',
+ 'INSTRUMENTATION_STATUS_CODE: 0',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+
+ expected = [
+ (0, {
+ 'foo': '1',
+ 'bar': 'hello',
+ 'world': 'false',
+ 'class': 'this.is.a.test.package.TestClass',
+ 'test': 'testMethod',
+ })
+ ]
+ self.assertEqual(expected, statuses)
+
+ def testInstrumentationParser_multiStatus(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: class=foo',
+ 'INSTRUMENTATION_STATUS: test=bar',
+ 'INSTRUMENTATION_STATUS_CODE: 1',
+ 'INSTRUMENTATION_STATUS: test_skipped=true',
+ 'INSTRUMENTATION_STATUS_CODE: 0',
+ 'INSTRUMENTATION_STATUS: class=hello',
+ 'INSTRUMENTATION_STATUS: test=world',
+ 'INSTRUMENTATION_STATUS: stack=',
+ 'foo/bar.py (27)',
+ 'hello/world.py (42)',
+ 'test/file.py (1)',
+ 'INSTRUMENTATION_STATUS_CODE: -1',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+
+ expected = [
+ (1, {'class': 'foo', 'test': 'bar',}),
+ (0, {'test_skipped': 'true'}),
+ (-1, {
+ 'class': 'hello',
+ 'test': 'world',
+ 'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+ }),
+ ]
+ self.assertEqual(expected, statuses)
+
+ def testInstrumentationParser_statusResultAndCode(self):
+ raw_output = [
+ 'INSTRUMENTATION_STATUS: class=foo',
+ 'INSTRUMENTATION_STATUS: test=bar',
+ 'INSTRUMENTATION_STATUS_CODE: 1',
+ 'INSTRUMENTATION_RESULT: result=hello',
+ 'world',
+ '',
+ '',
+ 'INSTRUMENTATION_CODE: 0',
+ ]
+
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+
+ self.assertEqual(0, code)
+ self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+ self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance.py b/build/android/pylib/instrumentation/instrumentation_test_instance.py
index 0c4f566..45e6ee4 100644
--- a/build/android/pylib/instrumentation/instrumentation_test_instance.py
+++ b/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -14,6 +14,7 @@
from pylib.base import base_test_result
from pylib.base import test_instance
from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
from pylib.utils import apk_helper
from pylib.utils import md5sum
from pylib.utils import proguard
@@ -47,48 +48,10 @@
- the bundle dump as a dict mapping string keys to a list of
strings, one for each line.
"""
- INSTR_STATUS = 'INSTRUMENTATION_STATUS: '
- INSTR_STATUS_CODE = 'INSTRUMENTATION_STATUS_CODE: '
- INSTR_RESULT = 'INSTRUMENTATION_RESULT: '
- INSTR_CODE = 'INSTRUMENTATION_CODE: '
-
- last = None
- instr_code = None
- instr_result = []
- instr_statuses = []
- bundle = {}
- for line in raw_output:
- if line.startswith(INSTR_STATUS):
- instr_var = line[len(INSTR_STATUS):]
- if '=' in instr_var:
- k, v = instr_var.split('=', 1)
- bundle[k] = [v]
- last = INSTR_STATUS
- last_key = k
- else:
- logging.debug('Unknown "%s" line: %s' % (INSTR_STATUS, line))
-
- elif line.startswith(INSTR_STATUS_CODE):
- instr_status = line[len(INSTR_STATUS_CODE):]
- instr_statuses.append((int(instr_status), bundle))
- bundle = {}
- last = INSTR_STATUS_CODE
-
- elif line.startswith(INSTR_RESULT):
- instr_result.append(line[len(INSTR_RESULT):])
- last = INSTR_RESULT
-
- elif line.startswith(INSTR_CODE):
- instr_code = int(line[len(INSTR_CODE):])
- last = INSTR_CODE
-
- elif last == INSTR_STATUS:
- bundle[last_key].append(line)
-
- elif last == INSTR_RESULT:
- instr_result.append(line)
-
- return (instr_code, instr_result, instr_statuses)
+ parser = instrumentation_parser.InstrumentationParser(raw_output)
+ statuses = list(parser.IterStatus())
+ code, bundle = parser.GetResult()
+ return (code, bundle, statuses)
def GenerateTestResult(test_name, instr_statuses, start_ms, duration_ms):
@@ -106,22 +69,15 @@
Returns:
An InstrumentationTestResult object.
"""
- INSTR_STATUS_CODE_START = 1
- INSTR_STATUS_CODE_OK = 0
- INSTR_STATUS_CODE_ERROR = -1
- INSTR_STATUS_CODE_FAIL = -2
-
log = ''
result_type = base_test_result.ResultType.UNKNOWN
for status_code, bundle in instr_statuses:
- if status_code == INSTR_STATUS_CODE_START:
+ if status_code == instrumentation_parser.STATUS_CODE_START:
pass
- elif status_code == INSTR_STATUS_CODE_OK:
- bundle_test = '%s#%s' % (
- ''.join(bundle.get('class', [''])),
- ''.join(bundle.get('test', [''])))
- skipped = ''.join(bundle.get('test_skipped', ['']))
+ elif status_code == instrumentation_parser.STATUS_CODE_OK:
+ bundle_test = '%s#%s' % (bundle.get('class', ''), bundle.get('test', ''))
+ skipped = bundle.get('test_skipped', '')
if (test_name == bundle_test and
result_type == base_test_result.ResultType.UNKNOWN):
@@ -130,13 +86,13 @@
result_type = base_test_result.ResultType.SKIP
logging.info('Skipped ' + test_name)
else:
- if status_code not in (INSTR_STATUS_CODE_ERROR,
- INSTR_STATUS_CODE_FAIL):
+ if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+ instrumentation_parser.STATUS_CODE_FAILURE):
logging.error('Unrecognized status code %d. Handling as an error.',
status_code)
result_type = base_test_result.ResultType.FAIL
if 'stack' in bundle:
- log = '\n'.join(bundle['stack'])
+ log = bundle['stack']
return test_result.InstrumentationTestResult(
test_name, result_type, start_ms, duration_ms, log=log)
@@ -466,24 +422,22 @@
@staticmethod
def GenerateMultiTestResult(errors, statuses):
- INSTR_STATUS_CODE_START = 1
results = []
skip_counter = 1
for status_code, bundle in statuses:
- if status_code != INSTR_STATUS_CODE_START:
+ if status_code != instrumentation_parser.STATUS_CODE_START:
# TODO(rnephew): Make skipped tests still output test name. This is only
# there to give skipped tests a unique name so they are counted
if 'test_skipped' in bundle:
test_name = str(skip_counter)
skip_counter += 1
else:
- test_name = '%s#%s' % (
- ''.join(bundle.get('class', [''])),
- ''.join(bundle.get('test', [''])))
+ test_name = '%s#%s' % (bundle.get('class', ''),
+ bundle.get('test', ''))
results.append(
GenerateTestResult(test_name, [(status_code, bundle)], 0, 0))
- for error in errors:
+ for error in errors.itervalues():
if _NATIVE_CRASH_RE.search(error):
results.append(
base_test_result.BaseTestResult(
diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
index 3bf3939..693f175 100755
--- a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
+++ b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -27,115 +27,6 @@
options = mock.Mock()
options.tool = ''
- def testParseAmInstrumentRawOutput_nothing(self):
- code, result, statuses = (
- instrumentation_test_instance.ParseAmInstrumentRawOutput(['']))
- self.assertEqual(None, code)
- self.assertEqual([], result)
- self.assertEqual([], statuses)
-
- def testParseAmInstrumentRawOutput_noMatchingStarts(self):
- raw_output = [
- '',
- 'this.is.a.test.package.TestClass:.',
- 'Test result for =.',
- 'Time: 1.234',
- '',
- 'OK (1 test)',
- ]
-
- code, result, statuses = (
- instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
- self.assertEqual(None, code)
- self.assertEqual([], result)
- self.assertEqual([], statuses)
-
- def testParseAmInstrumentRawOutput_resultAndCode(self):
- raw_output = [
- 'INSTRUMENTATION_RESULT: foo',
- 'bar',
- 'INSTRUMENTATION_CODE: -1',
- ]
-
- code, result, _ = (
- instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
- self.assertEqual(-1, code)
- self.assertEqual(['foo', 'bar'], result)
-
- def testParseAmInstrumentRawOutput_oneStatus(self):
- raw_output = [
- 'INSTRUMENTATION_STATUS: foo=1',
- 'INSTRUMENTATION_STATUS: bar=hello',
- 'INSTRUMENTATION_STATUS: world=false',
- 'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
- 'INSTRUMENTATION_STATUS: test=testMethod',
- 'INSTRUMENTATION_STATUS_CODE: 0',
- ]
-
- _, _, statuses = (
- instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
-
- expected = [
- (0, {
- 'foo': ['1'],
- 'bar': ['hello'],
- 'world': ['false'],
- 'class': ['this.is.a.test.package.TestClass'],
- 'test': ['testMethod'],
- })
- ]
- self.assertEqual(expected, statuses)
-
- def testParseAmInstrumentRawOutput_multiStatus(self):
- raw_output = [
- 'INSTRUMENTATION_STATUS: class=foo',
- 'INSTRUMENTATION_STATUS: test=bar',
- 'INSTRUMENTATION_STATUS_CODE: 1',
- 'INSTRUMENTATION_STATUS: test_skipped=true',
- 'INSTRUMENTATION_STATUS_CODE: 0',
- 'INSTRUMENTATION_STATUS: class=hello',
- 'INSTRUMENTATION_STATUS: test=world',
- 'INSTRUMENTATION_STATUS: stack=',
- 'foo/bar.py (27)',
- 'hello/world.py (42)',
- 'test/file.py (1)',
- 'INSTRUMENTATION_STATUS_CODE: -1',
- ]
-
- _, _, statuses = (
- instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
-
- expected = [
- (1, {'class': ['foo'], 'test': ['bar'],}),
- (0, {'test_skipped': ['true']}),
- (-1, {
- 'class': ['hello'],
- 'test': ['world'],
- 'stack': ['', 'foo/bar.py (27)', 'hello/world.py (42)',
- 'test/file.py (1)'],
- }),
- ]
- self.assertEqual(expected, statuses)
-
- def testParseAmInstrumentRawOutput_statusResultAndCode(self):
- raw_output = [
- 'INSTRUMENTATION_STATUS: class=foo',
- 'INSTRUMENTATION_STATUS: test=bar',
- 'INSTRUMENTATION_STATUS_CODE: 1',
- 'INSTRUMENTATION_RESULT: hello',
- 'world',
- '',
- '',
- 'INSTRUMENTATION_CODE: 0',
- ]
-
- code, result, statuses = (
- instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
-
- self.assertEqual(0, code)
- self.assertEqual(['hello', 'world', '', ''], result)
- self.assertEqual([(1, {'class': ['foo'], 'test': ['bar']})], statuses)
-
def testGenerateTestResult_noStatus(self):
result = instrumentation_test_instance.GenerateTestResult(
'test.package.TestClass#testMethod', [], 0, 1000)
@@ -147,12 +38,12 @@
def testGenerateTestResult_testPassed(self):
statuses = [
(1, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
(0, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
]
result = instrumentation_test_instance.GenerateTestResult(
@@ -162,15 +53,15 @@
def testGenerateTestResult_testSkipped_first(self):
statuses = [
(0, {
- 'test_skipped': ['true'],
+ 'test_skipped': 'true',
}),
(1, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
(0, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
]
result = instrumentation_test_instance.GenerateTestResult(
@@ -180,15 +71,15 @@
def testGenerateTestResult_testSkipped_last(self):
statuses = [
(1, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
(0, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
(0, {
- 'test_skipped': ['true'],
+ 'test_skipped': 'true',
}),
]
result = instrumentation_test_instance.GenerateTestResult(
@@ -198,15 +89,15 @@
def testGenerateTestResult_testSkipped_false(self):
statuses = [
(0, {
- 'test_skipped': ['false'],
+ 'test_skipped': 'false',
}),
(1, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
(0, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
]
result = instrumentation_test_instance.GenerateTestResult(
@@ -216,12 +107,12 @@
def testGenerateTestResult_testFailed(self):
statuses = [
(1, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
(-2, {
- 'class': ['test.package.TestClass'],
- 'test': ['testMethod'],
+ 'class': 'test.package.TestClass',
+ 'test': 'testMethod',
}),
]
result = instrumentation_test_instance.GenerateTestResult(
diff --git a/build/android/pylib/instrumentation/setup.py b/build/android/pylib/instrumentation/setup.py
index 8dacc3d..bdde80d 100644
--- a/build/android/pylib/instrumentation/setup.py
+++ b/build/android/pylib/instrumentation/setup.py
@@ -89,15 +89,15 @@
if test_options.test_data:
device_utils.DeviceUtils.parallel(devices).pMap(
_PushDataDeps, test_options)
- else:
+
+ if test_options.isolate_file_path:
base_setup.GenerateDepsDirUsingIsolate(test_options.test_apk,
test_options.isolate_file_path,
ISOLATE_FILE_PATHS,
DEPS_EXCLUSION_LIST)
def push_data_deps_to_device_dir(device):
- device_dir = os.path.join(device.GetExternalStoragePath(),
- DEVICE_DATA_DIR)
- base_setup.PushDataDeps(device, device_dir, test_options)
+ base_setup.PushDataDeps(device, device.GetExternalStoragePath(),
+ test_options)
device_utils.DeviceUtils.parallel(devices).pMap(
push_data_deps_to_device_dir)
diff --git a/build/android/pylib/instrumentation/test_runner.py b/build/android/pylib/instrumentation/test_runner.py
index fb9557e..5f095a5 100644
--- a/build/android/pylib/instrumentation/test_runner.py
+++ b/build/android/pylib/instrumentation/test_runner.py
@@ -318,7 +318,7 @@
extras['class'] = test
return self.device.StartInstrumentation(
'%s/%s' % (self.test_pkg.GetPackageName(), self.options.test_runner),
- raw=True, extras=extras, timeout=timeout, retries=0)
+ raw=True, extras=extras, timeout=timeout, retries=3)
def _GenerateTestResult(self, test, instr_statuses, start_ms, duration_ms):
return instrumentation_test_instance.GenerateTestResult(
diff --git a/build/android/pylib/local/device/local_device_test_run.py b/build/android/pylib/local/device/local_device_test_run.py
index 1f1686b..8c322cb 100644
--- a/build/android/pylib/local/device/local_device_test_run.py
+++ b/build/android/pylib/local/device/local_device_test_run.py
@@ -61,7 +61,7 @@
tests = [t for t in tests if self._GetTestName(t) not in results_names]
tries += 1
- all_unknown_test_names = set(self._GetTestName(t) for f in tests)
+ all_unknown_test_names = set(self._GetTestName(t) for t in tests)
all_failed_test_names = set(all_fail_results.iterkeys())
unknown_tests = all_unknown_test_names.difference(all_failed_test_names)
diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py
index b7fadd2..d9227a5 100644
--- a/build/android/pylib/perf/test_runner.py
+++ b/build/android/pylib/perf/test_runner.py
@@ -69,9 +69,10 @@
def OutputJsonList(json_input, json_output):
with file(json_input, 'r') as i:
all_steps = json.load(i)
- step_names = all_steps['steps'].keys()
+ step_values = [{'test': k, 'device_affinity': v['device_affinity']}
+ for k, v in all_steps['steps'].iteritems()]
with file(json_output, 'w') as o:
- o.write(json.dumps(step_names))
+ o.write(json.dumps(step_values))
return 0
diff --git a/build/android/pylib/remote/device/remote_device_environment.py b/build/android/pylib/remote/device/remote_device_environment.py
index cc39112..2561180 100644
--- a/build/android/pylib/remote/device/remote_device_environment.py
+++ b/build/android/pylib/remote/device/remote_device_environment.py
@@ -15,12 +15,15 @@
from pylib.base import environment
from pylib.remote.device import appurify_sanitized
from pylib.remote.device import remote_device_helper
+from pylib.utils import timeout_retry
+from pylib.utils import reraiser_thread
class RemoteDeviceEnvironment(environment.Environment):
"""An environment for running on remote devices."""
_ENV_KEY = 'env'
_DEVICE_KEY = 'device'
+ _DEFAULT_RETRIES = 0
def __init__(self, args, error_func):
"""Constructor.
@@ -74,6 +77,7 @@
self._remote_device_minimum_os = device_json.get(
'remote_device_minimum_os', None)
self._remote_device_os = device_json.get('remote_device_os', None)
+ self._remote_device_timeout = device_json.get('remote_device_timeout', None)
self._results_path = device_json.get('results_path', None)
self._runner_package = device_json.get('runner_package', None)
self._runner_type = device_json.get('runner_type', None)
@@ -110,6 +114,9 @@
'remote_device_minimum_os')
self._remote_device_os = command_line_override(
self._remote_device_os, args.remote_device_os, 'remote_device_os')
+ self._remote_device_timeout = command_line_override(
+ self._remote_device_timeout, args.remote_device_timeout,
+ 'remote_device_timeout')
self._results_path = command_line_override(
self._results_path, args.results_path, 'results_path')
self._runner_package = command_line_override(
@@ -158,6 +165,7 @@
logging.info('Remote device OS: %s', self._remote_device_os)
logging.info('Remote device OEM: %s', self._device_oem)
logging.info('Remote device type: %s', self._device_type)
+ logging.info('Remote device timout: %s', self._remote_device_timeout)
logging.info('Results Path: %s', self._results_path)
logging.info('Runner package: %s', self._runner_package)
logging.info('Runner type: %s', self._runner_type)
@@ -177,7 +185,7 @@
os.environ['APPURIFY_API_PORT'] = self._api_port
self._GetAccessToken()
if self._trigger:
- self._device = self._SelectDevice()
+ self._SelectDevice()
def TearDown(self):
"""Teardown the test environment."""
@@ -228,14 +236,20 @@
'Unable to revoke access token.')
def _SelectDevice(self):
- """Select which device to use."""
+ if self._remote_device_timeout:
+ try:
+ timeout_retry.Run(self._FindDeviceWithTimeout,
+ self._remote_device_timeout, self._DEFAULT_RETRIES)
+ except reraiser_thread.TimeoutError:
+ self._NoDeviceFound()
+ else:
+ if not self._FindDevice():
+ self._NoDeviceFound()
+
+ def _FindDevice(self):
+ """Find which device to use."""
logging.info('Finding device to run tests on.')
- with appurify_sanitized.SanitizeLogging(self._verbose_count,
- logging.WARNING):
- dev_list_res = appurify_sanitized.api.devices_list(self._access_token)
- remote_device_helper.TestHttpResponse(dev_list_res,
- 'Unable to generate access token.')
- device_list = dev_list_res.json()['response']
+ device_list = self._GetDeviceList()
random.shuffle(device_list)
for device in device_list:
if device['os_name'] != self._device_type:
@@ -251,12 +265,16 @@
and distutils.version.LooseVersion(device['os_version'])
< distutils.version.LooseVersion(self._remote_device_minimum_os)):
continue
- if ((self._remote_device and self._remote_device_os)
- or device['available_devices_count']):
+ if device['has_available_device']:
logging.info('Found device: %s %s',
device['name'], device['os_version'])
- return device
- self._NoDeviceFound(device_list)
+ self._device = device
+ return True
+ return False
+
+ def _FindDeviceWithTimeout(self):
+ """Find which device to use with timeout."""
+ timeout_retry.WaitFor(self._FindDevice, wait_period=1)
def _PrintAvailableDevices(self, device_list):
def compare_devices(a,b):
@@ -267,12 +285,23 @@
return 0
logging.critical('Available %s Devices:', self._device_type)
+ logging.critical(' %s %s %s', 'OS'.ljust(7),
+ 'Device Name'.ljust(20), '# Available')
devices = (d for d in device_list if d['os_name'] == self._device_type)
for d in sorted(devices, compare_devices):
- logging.critical(' %s %s', d['os_version'].ljust(7), d['name'])
+ logging.critical(' %s %s %s', d['os_version'].ljust(7),
+ d['name'].ljust(20), d['available_devices_count'])
- def _NoDeviceFound(self, device_list):
- self._PrintAvailableDevices(device_list)
+ def _GetDeviceList(self):
+ with appurify_sanitized.SanitizeLogging(self._verbose_count,
+ logging.WARNING):
+ dev_list_res = appurify_sanitized.api.devices_list(self._access_token)
+ remote_device_helper.TestHttpResponse(dev_list_res,
+ 'Unable to generate access token.')
+ return dev_list_res.json()['response']
+
+ def _NoDeviceFound(self):
+ self._PrintAvailableDevices(self._GetDeviceList())
raise remote_device_helper.RemoteDeviceError('No device found.')
@property
diff --git a/build/android/pylib/remote/device/remote_device_test_run.py b/build/android/pylib/remote/device/remote_device_test_run.py
index 91701b0..43a7399 100644
--- a/build/android/pylib/remote/device/remote_device_test_run.py
+++ b/build/android/pylib/remote/device/remote_device_test_run.py
@@ -170,12 +170,12 @@
"""
if results_path:
logging.info('Downloading results to %s.' % results_path)
- if not os.path.exists(os.path.basename(results_path)):
- os.makedirs(os.path.basename(results_path))
- with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
- logging.WARNING):
- appurify_sanitized.utils.wget(self._results['results']['url'],
- results_path)
+ if not os.path.exists(os.path.dirname(results_path)):
+ os.makedirs(os.path.dirname(results_path))
+ with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+ logging.WARNING):
+ appurify_sanitized.utils.wget(self._results['results']['url'],
+ results_path)
def _GetTestStatus(self, test_run_id):
"""Checks the state of the test, and sets self._results
diff --git a/build/android/pylib/utils/md5sum.py b/build/android/pylib/utils/md5sum.py
index 4d0d703..da3cd15 100644
--- a/build/android/pylib/utils/md5sum.py
+++ b/build/android/pylib/utils/md5sum.py
@@ -19,7 +19,7 @@
MD5SUM_DEVICE_SCRIPT_FORMAT = (
'test -f {path} -o -d {path} '
- '&& LD_LIBRARY_PATH={md5sum_lib} {md5sum_bin} {path}')
+ '&& LD_LIBRARY_PATH={md5sum_lib} {device_pie_wrapper} {md5sum_bin} {path}')
def CalculateHostMd5Sums(paths):
@@ -56,12 +56,15 @@
MD5SUM_DEVICE_LIB_PATH)
out = []
+
with tempfile.NamedTemporaryFile() as md5sum_script_file:
with device_temp_file.DeviceTempFile(
device.adb) as md5sum_device_script_file:
+ device_pie_wrapper = device.GetDevicePieWrapper()
md5sum_script = (
MD5SUM_DEVICE_SCRIPT_FORMAT.format(
path=p, md5sum_lib=MD5SUM_DEVICE_LIB_PATH,
+ device_pie_wrapper=device_pie_wrapper,
md5sum_bin=MD5SUM_DEVICE_BIN_PATH)
for p in paths)
md5sum_script_file.write('; '.join(md5sum_script))
@@ -69,5 +72,5 @@
device.adb.Push(md5sum_script_file.name, md5sum_device_script_file.name)
out = device.RunShellCommand(['sh', md5sum_device_script_file.name])
- return [HashAndPath(*l.split(None, 1)) for l in out]
+ return [HashAndPath(*l.split(None, 1)) for l in out if l]
diff --git a/build/android/pylib/utils/mock_calls.py b/build/android/pylib/utils/mock_calls.py
index fab9f2b..3052b0d 100644
--- a/build/android/pylib/utils/mock_calls.py
+++ b/build/android/pylib/utils/mock_calls.py
@@ -110,7 +110,12 @@
if call.name.startswith('self.'):
target = self.call_target(call.parent)
_, attribute = call.name.rsplit('.', 1)
- return mock.patch.object(target, attribute, **kwargs)
+ if (hasattr(type(target), attribute)
+ and isinstance(getattr(type(target), attribute), property)):
+ return mock.patch.object(
+ type(target), attribute, new_callable=mock.PropertyMock, **kwargs)
+ else:
+ return mock.patch.object(target, attribute, **kwargs)
else:
return mock.patch(call.name, **kwargs)
diff --git a/build/android/pylib/utils/mock_calls_test.py b/build/android/pylib/utils/mock_calls_test.py
index 1b474af..4dbafd4 100755
--- a/build/android/pylib/utils/mock_calls_test.py
+++ b/build/android/pylib/utils/mock_calls_test.py
@@ -38,6 +38,11 @@
def Reboot(self):
logging.debug('(device %s) rebooted!', self)
+ @property
+ def build_version_sdk(self):
+ logging.debug('(device %s) getting build_version_sdk', self)
+ return constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP
+
class TestCaseWithAssertCallsTest(mock_calls.TestCase):
def setUp(self):
@@ -91,6 +96,17 @@
with self.assertRaises(ValueError):
self.adb.Shell('echo hello')
+ def testPatchCall_property(self):
+ self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP,
+ self.adb.build_version_sdk)
+ with self.patch_call(
+ self.call.adb.build_version_sdk,
+ return_value=constants.ANDROID_SDK_VERSION_CODES.KITKAT):
+ self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.KITKAT,
+ self.adb.build_version_sdk)
+ self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP,
+ self.adb.build_version_sdk)
+
def testAssertCalls_succeeds_simple(self):
self.assertEquals(42, self.get_answer())
with self.assertCall(self.call.get_answer(), 123):
diff --git a/build/android/setup.gyp b/build/android/setup.gyp
index b3c3422..7dce19d 100644
--- a/build/android/setup.gyp
+++ b/build/android/setup.gyp
@@ -16,7 +16,7 @@
{
'destination': '<(SHARED_LIB_DIR)/',
'files': [
- '<(android_libcpp_libs_dir)/libc++_shared.so',
+ '<(android_stlport_libs_dir)/libstlport_shared.so',
],
},
],
diff --git a/build/android/test_runner.py b/build/android/test_runner.py
index cc7bbee..f79e13e 100755
--- a/build/android/test_runner.py
+++ b/build/android/test_runner.py
@@ -149,6 +149,8 @@
group.add_argument('--remote-device-file',
help=('File with JSON to select remote device. '
'Overrides all other flags.'))
+ group.add_argument('--remote-device-timeout', type=int,
+ help='Times to retry finding remote device')
device_os_group = group.add_mutually_exclusive_group()
device_os_group.add_argument('--remote-device-minimum-os',
@@ -529,7 +531,7 @@
help='APK to run tests on.')
group.add_argument(
'--minutes', default=5, type=int,
- help='Number of minutes to run uirobot test [default: %default].')
+ help='Number of minutes to run uirobot test [default: %(default)s].')
AddCommonOptions(parser)
AddDeviceOptions(parser)
diff --git a/build/common.gypi b/build/common.gypi
index 153a500..3a071c8 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -391,9 +391,12 @@
# See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
'asan%': 0,
'asan_blacklist%': '<(PRODUCT_DIR)/../../tools/memory/asan/blacklist.txt',
- # Enable coverage gathering instrumentation in ASan. This flag also
- # controls coverage granularity (1 for function-level coverage, 2 for
- # block-level coverage).
+ # Enable coverage gathering instrumentation in sanitizer tools. This flag
+ # also controls coverage granularity (1 for function-level coverage, 2
+ # for block-level coverage).
+ 'sanitizer_coverage%': 0,
+ # Deprecated, only works if |sanitizer_coverage| isn't set.
+ # TODO(glider): remove this flag.
'asan_coverage%': 0,
# Enable intra-object-overflow detection in ASan (experimental).
'asan_field_padding%': 0,
@@ -1119,6 +1122,7 @@
'asan%': '<(asan)',
'asan_blacklist%': '<(asan_blacklist)',
'asan_coverage%': '<(asan_coverage)',
+ 'sanitizer_coverage%': '<(sanitizer_coverage)',
'asan_field_padding%': '<(asan_field_padding)',
'use_sanitizer_options%': '<(use_sanitizer_options)',
'syzyasan%': '<(syzyasan)',
@@ -1668,7 +1672,7 @@
'android_ndk_root%': '<(android_ndk_root)',
'android_sdk_root%': '<(android_sdk_root)',
'android_sdk_version%': '<(android_sdk_version)',
- 'android_libcpp_root': '<(android_ndk_root)/sources/cxx-stl/llvm-libc++',
+ 'android_stlport_root': '<(android_ndk_root)/sources/cxx-stl/stlport',
'host_os%': '<(host_os)',
'android_sdk%': '<(android_sdk_root)/platforms/android-<(android_sdk_version)',
@@ -1745,10 +1749,9 @@
'android_sdk%': '<(android_sdk)',
'android_sdk_jar%': '<(android_sdk)/android.jar',
- 'android_libcpp_root': '<(android_libcpp_root)',
- 'android_libcpp_include': '<(android_libcpp_root)/libcxx/include',
- 'android_libcpp_libs_dir': '<(android_libcpp_root)/libs/<(android_app_abi)',
-
+ 'android_stlport_root': '<(android_stlport_root)',
+ 'android_stlport_include': '<(android_stlport_root)/stlport',
+ 'android_stlport_libs_dir': '<(android_stlport_root)/libs/<(android_app_abi)',
'host_os%': '<(host_os)',
# Location of the "objcopy" binary, used by both gyp and scripts.
@@ -2066,10 +2069,10 @@
},
'grit_defines': [
'-t', 'ios',
- # iOS uses a whitelist to filter resources.
- '-w', '<(DEPTH)/build/ios/grit_whitelist.txt',
'--no-output-all-resource-defines',
],
+ # iOS uses a whitelist to filter resources.
+ 'grit_whitelist%': '<(DEPTH)/build/ios/grit_whitelist.txt',
# Enable host builds when generating with ninja-ios.
'conditions': [
@@ -2077,12 +2080,10 @@
'host_os%': "mac",
}],
- # TODO(sdefresne): Remove the target_subarch check once Apple has
- # upstreamed the support for "arm64". http://crbug.com/341453
# TODO(eugenebut): Remove enable_coverage check once
# libclang_rt.profile_ios.a is bundled with Chromium's clang.
# http://crbug.com/450379
- ['target_subarch!="arm32" or enable_coverage or "<(GENERATOR)"=="xcode"', {
+ ['enable_coverage or "<(GENERATOR)"=="xcode"', {
'clang_xcode%': 1,
}],
],
@@ -2688,74 +2689,47 @@
'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
},
'conditions': [
- ['clang==1 and asan==0 and msan==0 and tsan==0 and ubsan_vptr==0', {
- # Clang creates chubby debug information, which makes linking very
- # slow. For now, don't create debug information with clang. See
- # http://crbug.com/70000
- 'conditions': [
- ['OS=="linux"', {
- 'variables': {
- 'debug_extra_cflags': '-g0',
- },
- }],
- # Android builds symbols on release by default, disable them.
- ['OS=="android"', {
- 'variables': {
- 'debug_extra_cflags': '-g0',
- 'release_extra_cflags': '-g0',
- },
- }],
- ],
- }, { # else clang!=1
- 'conditions': [
- ['OS=="win" and fastbuild==2', {
- # Completely disable debug information.
- 'msvs_settings': {
- 'VCLinkerTool': {
- 'GenerateDebugInformation': 'false',
- },
- 'VCCLCompilerTool': {
- 'DebugInformationFormat': '0',
- },
- },
- }],
- ['OS=="win" and fastbuild==1', {
- 'msvs_settings': {
- 'VCLinkerTool': {
- # This tells the linker to generate .pdbs, so that
- # we can get meaningful stack traces.
- 'GenerateDebugInformation': 'true',
- },
- 'VCCLCompilerTool': {
- # No debug info to be generated by compiler.
- 'DebugInformationFormat': '0',
- },
- },
- }],
- ['OS=="linux" and fastbuild==2', {
- 'variables': {
- 'debug_extra_cflags': '-g0',
- },
- }],
- ['OS=="linux" and fastbuild==1', {
- 'variables': {
- 'debug_extra_cflags': '-g1',
- },
- }],
- ['OS=="android" and fastbuild==2', {
- 'variables': {
- 'debug_extra_cflags': '-g0',
- 'release_extra_cflags': '-g0',
- },
- }],
- ['OS=="android" and fastbuild==1', {
- 'variables': {
- 'debug_extra_cflags': '-g1',
- 'release_extra_cflags': '-g1',
- },
- }],
- ],
- }], # clang!=1
+ ['OS=="win" and fastbuild==2', {
+ # Completely disable debug information.
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'false',
+ },
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '0',
+ },
+ },
+ }],
+ ['OS=="win" and fastbuild==1', {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ # This tells the linker to generate .pdbs, so that
+ # we can get meaningful stack traces.
+ 'GenerateDebugInformation': 'true',
+ },
+ 'VCCLCompilerTool': {
+ # No debug info to be generated by compiler.
+ 'DebugInformationFormat': '0',
+ },
+ },
+ }],
+ ['(OS=="android" or OS=="linux") and fastbuild==2', {
+ 'variables': { 'debug_extra_cflags': '-g0', },
+ }],
+ ['(OS=="android" or OS=="linux") and fastbuild==1', {
+ # TODO(thakis): Change this to -g1 once http://crbug.com/456947 is
+ # fixed.
+ 'variables': { 'debug_extra_cflags': '-g0', },
+ }],
+ # Android builds symbols on release by default, disable them.
+ ['OS=="android" and fastbuild==2', {
+ 'variables': { 'release_extra_cflags': '-g0', },
+ }],
+ ['OS=="android" and fastbuild==1', {
+ # TODO(thakis): Change this to -g1 once http://crbug.com/456947 is
+ # fixed.
+ 'variables': { 'release_extra_cflags': '-g0', },
+ }],
],
}], # fastbuild!=0
['dont_embed_build_metadata==1', {
@@ -3544,7 +3518,7 @@
},
}],
# TODO(thakis): Enable this everywhere. http://crbug.com/371125
- ['(OS=="linux" or OS=="android") and asan==0 and msan==0 and tsan==0 and ubsan==0 and ubsan_vptr==0 and use_ozone!=1', {
+ ['(OS=="linux" or OS=="android") and asan==0 and msan==0 and tsan==0 and ubsan==0 and ubsan_vptr==0', {
'target_defaults': {
'ldflags': [
'-Wl,-z,defs',
@@ -4047,6 +4021,13 @@
'-fstack-protector', # stack protector is always enabled on arm64.
],
}],
+ # TODO: Remove webview test once webview fully compiles from
+ # Chromium. crbug.com/440793
+ ['OS=="android" and android_webview_build==0', {
+ 'ldflags': [
+ '-fuse-ld=gold',
+ ],
+ }],
],
}],
],
@@ -4272,12 +4253,27 @@
}],
],
}],
- ['asan_coverage!=0', {
+ ['asan_coverage!=0 and sanitizer_coverage==0', {
'target_conditions': [
['_toolset=="target"', {
'cflags': [
'-fsanitize-coverage=<(asan_coverage)',
],
+ 'defines': [
+ 'SANITIZER_COVERAGE',
+ ],
+ }],
+ ],
+ }],
+ ['sanitizer_coverage!=0', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '-fsanitize-coverage=<(sanitizer_coverage)',
+ ],
+ 'defines': [
+ 'SANITIZER_COVERAGE',
+ ],
}],
],
}],
@@ -4406,8 +4402,10 @@
# https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
# Only apply this to the target linker, since the host
# linker might not be gold, but isn't used much anyway.
- '-Wl,--threads',
- '-Wl,--thread-count=4',
+ # TODO(raymes): Disable threading because gold is frequently
+ # crashing on the bots: crbug.com/161942.
+ # '-Wl,--threads',
+ # '-Wl,--thread-count=4',
],
}],
],
@@ -4499,9 +4497,9 @@
# Figure this out early since it needs symbols from libgcc.a, so it
# has to be before that in the set of libraries.
['component=="shared_library"', {
- 'android_libcpp_library': 'c++_shared',
+ 'android_stlport_library': 'stlport_shared',
}, {
- 'android_libcpp_library': 'c++_static',
+ 'android_stlport_library': 'stlport_static',
}],
],
@@ -4585,6 +4583,8 @@
'defines': [
'ANDROID',
'__GNU_SOURCE=1', # Necessary for clone()
+ 'USE_STLPORT=1',
+ '_STLP_USE_PTR_SPECIALIZATIONS=1',
'CHROME_BUILD_ID="<(chrome_build_id)"',
],
'ldflags!': [
@@ -4658,13 +4658,12 @@
'-nostdlib',
],
'libraries': [
- '-l<(android_libcpp_library)',
- '-latomic',
+ '-l<(android_stlport_library)',
# Manually link the libgcc.a that the cross compiler uses.
'<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
- '-lm',
'-lc',
'-ldl',
+ '-lm',
],
}],
['android_webview_build==1', {
@@ -4716,20 +4715,20 @@
'-Wl,--icf=safe',
],
}],
+ # NOTE: The stlport header include paths below are specified in
+ # cflags rather than include_dirs because they need to come
+ # after include_dirs. Think of them like system headers, but
+ # don't use '-isystem' because the arm-linux-androideabi-4.4.3
+ # toolchain (circa Gingerbread) will exhibit strange errors.
+ # The include ordering here is important; change with caution.
['android_webview_build==0', {
'cflags': [
- '-isystem<(android_libcpp_include)',
- '-isystem<(android_ndk_root)/sources/cxx-stl/llvm-libc++abi/libcxxabi/include',
- '-isystem<(android_ndk_root)/sources/android/support/include',
+ '-isystem<(android_stlport_include)',
],
'ldflags': [
- '-L<(android_libcpp_libs_dir)',
+ '-L<(android_stlport_libs_dir)',
],
}, { # else: android_webview_build!=0
- 'defines': [
- 'USE_STLPORT=1',
- '_STLP_USE_PTR_SPECIALIZATIONS=1',
- ],
'aosp_build_settings': {
# Specify that we want to statically link stlport from the
# NDK. This will provide all the include and library paths
@@ -4933,12 +4932,27 @@
],
},
}],
- ['asan_coverage!=0', {
+ ['asan_coverage!=0 and sanitizer_coverage==0', {
'target_conditions': [
['_toolset=="target"', {
'cflags': [
'-fsanitize-coverage=<(asan_coverage)',
],
+ 'defines': [
+ 'SANITIZER_COVERAGE',
+ ],
+ }],
+ ],
+ }],
+ ['sanitizer_coverage!=0', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': [
+ '-fsanitize-coverage=<(sanitizer_coverage)',
+ ],
+ 'defines': [
+ 'SANITIZER_COVERAGE',
+ ],
}],
],
}],
@@ -5498,15 +5512,12 @@
}],
],
'conditions': [
+ # Building with Clang on Windows is a work in progress and very
+ # experimental. See crbug.com/82385.
['clang==1', {
- # Building with Clang on Windows is a work in progress and very
- # experimental. See crbug.com/82385.
'VCCLCompilerTool': {
- 'WarnAsError': 'false',
- 'RuntimeTypeInfo': 'false',
'AdditionalOptions': [
'-fmsc-version=1800',
- '/fallback',
# Many files use intrinsics without including this header.
# TODO(hans): Fix those files, or move this to sub-GYPs.
@@ -5549,6 +5560,14 @@
],
},
}],
+ ['clang==1 and target_arch=="ia32"', {
+ 'VCCLCompilerTool': {
+ 'WarnAsError': 'false',
+ 'AdditionalOptions': [
+ '/fallback',
+ ],
+ },
+ }],
],
},
},
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
index 33449ef..d806a17 100644
--- a/build/config/BUILDCONFIG.gn
+++ b/build/config/BUILDCONFIG.gn
@@ -80,7 +80,7 @@
# TODO(brettw) remove this flag (and therefore enable linking all targets) on
# Windows when we have sufficient bot capacity. In the meantime, you can
# enable linking for local compiles.
- link_chrome_on_windows = false
+ link_chrome_on_windows = true
}
# =============================================================================
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
index c6202d4..0105a64 100644
--- a/build/config/android/config.gni
+++ b/build/config/android/config.gni
@@ -20,6 +20,11 @@
default_android_sdk_build_tools_version = "21.0.1"
}
+ if (!defined(google_play_services_library)) {
+ google_play_services_library =
+ "//third_party/android_tools:google_play_services_default_java"
+ }
+
declare_args() {
android_sdk_root = default_android_sdk_root
android_sdk_version = default_android_sdk_version
@@ -93,6 +98,13 @@
"platforms/android-${_android_api_level}/arch-arm"
mips_android_sysroot_subdir =
"platforms/android-${_android_api_level}/arch-mips"
+ _android64_api_level = 21
+ x86_64_android_sysroot_subdir =
+ "platforms/android-${_android64_api_level}/arch-x86_64"
+ arm64_android_sysroot_subdir =
+ "platforms/android-${_android64_api_level}/arch-arm64"
+ mips64_android_sysroot_subdir =
+ "platforms/android-${_android64_api_level}/arch-mips64"
# Toolchain root directory for each build. The actual binaries are inside
# a "bin" directory inside of these.
@@ -100,6 +112,9 @@
x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+ mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
# Location of libgcc. This is only needed for the current GN toolchain, so we
# only need to define the current one, rather than one for every platform
@@ -119,6 +134,21 @@
_binary_prefix = "mipsel-linux-android"
android_toolchain_root = "$mips_android_toolchain_root"
android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_version}/libgcc.a"
+ } else if (cpu_arch == "x64") {
+ android_prebuilt_arch = "android-x86_64"
+ _binary_prefix = "x86_64-linux-android"
+ android_toolchain_root = "$x86_64_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_version}/libgcc.a"
+ } else if (cpu_arch == "arm64") {
+ android_prebuilt_arch = "android-arm64"
+ _binary_prefix = "aarch64-linux-android"
+ android_toolchain_root = "$arm64_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_version}/libgcc.a"
+ } else if (cpu_arch == "mips64el") {
+ android_prebuilt_arch = "android-mips64"
+ _binary_prefix = "mips64el-linux-android"
+ android_toolchain_root = "$mips64_android_toolchain_root"
+ android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_version}/libgcc.a"
} else {
assert(false, "Need android libgcc support for your target arch.")
}
@@ -129,6 +159,14 @@
android_gdbserver =
"$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+ # stlport stuff --------------------------------------------------------------
+
+ if (component_mode == "shared_library") {
+ android_stlport_library = "stlport_shared"
+ } else {
+ android_stlport_library = "stlport_static"
+ }
+
# ABI ------------------------------------------------------------------------
if (cpu_arch == "x86") {
@@ -142,6 +180,12 @@
}
} else if (cpu_arch == "mipsel") {
android_app_abi = "mips"
+ } else if (cpu_arch == "x64") {
+ android_app_abi = "x86_64"
+ } else if (cpu_arch == "arm64") {
+ android_app_abi = "arm64-v8a"
+ } else if (cpu_arch == "mips64el") {
+ android_app_abi = "mips64"
} else {
assert(false, "Unknown Android ABI: " + cpu_arch)
}
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
index 125aa2c..c31d40e 100644
--- a/build/config/android/internal_rules.gni
+++ b/build/config/android/internal_rules.gni
@@ -665,6 +665,10 @@
if (defined(invoker.chromium_code)) {
_chromium_code = invoker.chromium_code
}
+ _manifest_entries = []
+ if (defined(invoker.manifest_entries)) {
+ _manifest_entries = invoker.manifest_entries
+ }
_srcjar_deps = []
if (defined(invoker.srcjar_deps)) {
@@ -718,6 +722,9 @@
"--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)",
"--jar-excluded-classes=$_jar_excluded_patterns",
]
+ foreach(e, _manifest_entries) {
+ args += [ "--manifest-entry=" + e ]
+ }
if (_chromium_code) {
args += [ "--chromium-code=1" ]
}
@@ -861,6 +868,9 @@
if (defined(invoker.dist_jar_path)) {
dist_jar_path = invoker.dist_jar_path
}
+ if (defined(invoker.manifest_entries)) {
+ manifest_entries = invoker.manifest_entries
+ }
}
if (defined(invoker.main_class)) {
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
index 021a7a0..33dfa37 100644
--- a/build/config/android/rules.gni
+++ b/build/config/android/rules.gni
@@ -345,6 +345,11 @@
assert(defined(invoker.outputs))
action("${target_name}__generate_enum") {
+ # The sources aren't compiled so don't check their dependencies.
+ # TODO(brettw) uncomment after GN binary rolled pas 314974 (which added
+ # support for this value on actions).
+ #check_includes = false
+
sources = invoker.sources
script = "//build/android/gyp/java_cpp_enum.py"
gen_dir = "${target_gen_dir}/${target_name}/enums"
@@ -998,6 +1003,9 @@
if (defined(invoker.dex_path)) {
dex_path = invoker.dex_path
}
+ if (defined(invoker.manifest_entries)) {
+ manifest_entries = invoker.manifest_entries
+ }
supports_android = true
requires_android = true
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
index b50a64c..164b97d 100644
--- a/build/config/compiler/BUILD.gn
+++ b/build/config/compiler/BUILD.gn
@@ -380,8 +380,10 @@
# https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
# Only apply this to the target linker, since the host
# linker might not be gold, but isn't used much anyway.
- "-Wl,--threads",
- "-Wl,--thread-count=4",
+ # TODO(raymes): Disable threading because gold is frequently
+ # crashing on the bots: crbug.com/161942.
+ #"-Wl,--threads",
+ #"-Wl,--thread-count=4",
]
}
@@ -529,7 +531,7 @@
]
}
- # Android standard library setup.
+ # Stlport setup. Android uses a different (smaller) version of the STL.
if (is_android) {
if (is_clang) {
# Work around incompatibilities between bionic and clang headers.
@@ -539,44 +541,55 @@
]
}
- defines += [ "__GNU_SOURCE=1" ] # Necessary for clone().
+ defines += [
+ "USE_STLPORT=1",
+ "_STLP_USE_PTR_SPECIALIZATIONS=1",
+ "__GNU_SOURCE=1", # Necessary for clone().
+ ]
ldflags += [
"-Wl,--warn-shared-textrel",
"-nostdlib",
]
- android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
+ # NOTE: The stlport header include paths below are specified in cflags
+ # rather than include_dirs because they need to come after include_dirs.
+ # Think of them like system headers, but don't use '-isystem' because the
+ # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
+ # strange errors. The include ordering here is important; change with
+ # caution.
+ android_stlport_root = "$android_ndk_root/sources/cxx-stl/stlport"
- cflags += [
- "-isystem" +
- rebase_path("$android_libcpp_root/libcxx/include", root_build_dir),
- "-isystem" + rebase_path(
- "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include",
- root_build_dir),
- "-isystem" +
- rebase_path("$android_ndk_root/sources/android/support/include",
- root_build_dir),
- ]
-
- lib_dirs += [ "$android_libcpp_root/libs/$android_app_abi" ]
+ cflags += [ "-isystem" +
+ rebase_path("$android_stlport_root/stlport", root_build_dir) ]
+ lib_dirs += [ "$android_stlport_root/libs/$android_app_abi" ]
if (component_mode == "shared_library") {
- android_libcpp_library = "c++_shared"
+ libs += [ "stlport_shared" ]
} else {
- android_libcpp_library = "c++_static"
+ libs += [ "stlport_static" ]
+ }
+
+ if (cpu_arch == "mipsel") {
+ libs += [
+ # ld linker is used for mips Android, and ld does not accept library
+ # absolute path prefixed by "-l"; Since libgcc does not exist in mips
+ # sysroot the proper library will be linked.
+ # TODO(gordanac): Remove once gold linker is used for mips Android.
+ "gcc",
+ ]
+ } else {
+ libs += [
+ # Manually link the libgcc.a that the cross compiler uses. This is
+ # absolute because the linker will look inside the sysroot if it's not.
+ rebase_path(android_libgcc_file),
+ ]
}
libs += [
- "$android_libcpp_library",
- "atomic",
-
- # Manually link the libgcc.a that the cross compiler uses. This is
- # absolute because the linker will look inside the sysroot if it's not.
- rebase_path(android_libgcc_file),
- "m",
"c",
"dl",
+ "m",
]
}
}
diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni
index 77032f7..a9b250c 100644
--- a/build/config/sysroot.gni
+++ b/build/config/sysroot.gni
@@ -22,6 +22,12 @@
sysroot = rebase_path("$android_ndk_root/$arm_android_sysroot_subdir")
} else if (cpu_arch == "mipsel") {
sysroot = rebase_path("$android_ndk_root/$mips_android_sysroot_subdir")
+ } else if (cpu_arch == "x64") {
+ sysroot = rebase_path("$android_ndk_root/$x86_64_android_sysroot_subdir")
+ } else if (cpu_arch == "arm64") {
+ sysroot = rebase_path("$android_ndk_root/$arm64_android_sysroot_subdir")
+ } else if (cpu_arch == "mips64") {
+ sysroot = rebase_path("$android_ndk_root/$mips64_android_sysroot_subdir")
} else {
sysroot = ""
}
diff --git a/build/download_sdk_extras.py b/build/download_sdk_extras.py
index d38ee86..45e7199 100755
--- a/build/download_sdk_extras.py
+++ b/build/download_sdk_extras.py
@@ -7,7 +7,8 @@
The script expects arguments that specify zips file in the google storage
bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will
-be extracted in the android_tools/sdk/extras directory.
+be extracted in the android_tools/sdk/extras directory on the test bots. This
+script will not do anything for developers.
"""
import json
@@ -17,11 +18,16 @@
import sys
import zipfile
-sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'android'))
-from pylib import constants
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SCRIPT_DIR, 'android'))
+sys.path.insert(1, os.path.join(CHROME_SRC, 'tools'))
-GSUTIL_PATH = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
- os.pardir, os.pardir, os.pardir, os.pardir, 'depot_tools', 'gsutil.py')
+from pylib import constants
+import find_depot_tools
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras'
SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras')
SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
@@ -38,7 +44,7 @@
def main():
- if not os.path.exists(GSUTIL_PATH) or not os.path.exists(SDK_EXTRAS_PATH):
+ if not os.environ.get('CHROME_HEADLESS'):
# This is not a buildbot checkout.
return 0
# Update the android_sdk_extras.json file to update downloaded packages.
@@ -48,8 +54,8 @@
local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip'])
if not os.path.exists(local_zip):
package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip'])
- subprocess.check_call([GSUTIL_PATH, '--force-version', '4.7', 'cp',
- package_zip, local_zip])
+ subprocess.check_call(['python', GSUTIL_PATH, '--force-version', '4.7',
+ 'cp', package_zip, local_zip])
# Always clean dir and extract zip to ensure correct contents.
clean_and_extract(package['dir_name'], package['package'], package['zip'])
diff --git a/build/filename_rules.gypi b/build/filename_rules.gypi
index 1bef75f..bc657d8 100644
--- a/build/filename_rules.gypi
+++ b/build/filename_rules.gypi
@@ -37,7 +37,7 @@
}],
['OS!="android" or _toolset=="host" or >(nacl_untrusted_build)==1', {
'sources/': [
- ['exclude', '_android(_unittest)?\\.cc$'],
+ ['exclude', '_android(_unittest)?\\.(h|cc)$'],
['exclude', '(^|/)android/'],
],
}],
diff --git a/build/get_landmines.py b/build/get_landmines.py
index ac3d7d6..7a918c8 100755
--- a/build/get_landmines.py
+++ b/build/get_landmines.py
@@ -29,6 +29,7 @@
print 'Need to clobber winja goma due to backend cwd cache fix.'
if platform() == 'android':
print 'Clobber: to handle new way of suppressing findbugs failures.'
+ print 'Clobber to fix gyp not rename package name (crbug.com/457038)'
if platform() == 'win' and builder() == 'ninja':
print 'Compile on cc_unittests fails due to symbols removed in r185063.'
if platform() == 'linux' and builder() == 'ninja':
diff --git a/build/grit_action.gypi b/build/grit_action.gypi
index 15ead28..462fb4c 100644
--- a/build/grit_action.gypi
+++ b/build/grit_action.gypi
@@ -21,6 +21,7 @@
# instead of build/common.gypi .
'grit_additional_defines%': [],
'grit_rc_header_format%': [],
+ 'grit_whitelist%': '',
'conditions': [
# These scripts can skip writing generated files if they are identical
@@ -36,6 +37,17 @@
}],
],
},
+ 'conditions': [
+ ['"<(grit_whitelist)"==""', {
+ 'variables': {
+ 'grit_whitelist_flag': [],
+ }
+ }, {
+ 'variables': {
+ 'grit_whitelist_flag': ['-w', '<(grit_whitelist)'],
+ }
+ }]
+ ],
'inputs': [
'<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
'--inputs <(grit_grd_file) -f "<(grit_resource_ids)")',
@@ -51,6 +63,7 @@
'-o', '<(grit_out_dir)',
'--write-only-new=<(write_only_new)',
'<@(grit_defines)',
+ '<@(grit_whitelist_flag)',
'<@(grit_additional_defines)',
'<@(grit_rc_header_format)'],
'message': 'Generating resources from <(grit_grd_file)',
diff --git a/build/install-android-sdks.sh b/build/install-android-sdks.sh
new file mode 100755
index 0000000..5c4edaf
--- /dev/null
+++ b/build/install-android-sdks.sh
@@ -0,0 +1,25 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install SDKs needed to build chromium on android.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+echo 'checking for sdk packages install'
+# Use absolute path to call 'android' so script can be run from any directory.
+cwd=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+# Get the SDK extras packages to install from the DEPS file 'sdkextras' hook.
+packages="$(python ${cwd}/get_sdk_extras_packages.py)"
+for package in "${packages}"; do
+ pkg_id=$(${cwd}/../third_party/android_tools/sdk/tools/android list sdk | \
+ grep -i "$package," | \
+ awk '/^[ ]*[0-9]*- / {gsub("-",""); print $1}')
+ if [[ -n ${pkg_id} ]]; then
+ ${cwd}/../third_party/android_tools/sdk/tools/android update sdk --no-ui \
+ --filter ${pkg_id}
+ fi
+done
+
+echo "install-android-sdks.sh complete."
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
index 5d95ed5..cf87381 100755
--- a/build/install-build-deps-android.sh
+++ b/build/install-build-deps-android.sh
@@ -4,8 +4,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-# Script to install everything needed to build chromium on android that
-# requires sudo privileges.
+# Script to install everything needed to build chromium on android, including
+# items requiring sudo privileges.
# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
@@ -92,21 +92,9 @@
fi
fi
+# Install SDK packages for android
if test "$skip_inst_sdk_packages" != 1; then
- echo 'checking for sdk packages install'
- # Use absolute path to call 'android' so script can be run from any directory.
- cwd=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
- # Get the SDK extras packages to install from the DEPS file 'sdkextras' hook.
- packages="$(python ${cwd}/get_sdk_extras_packages.py)"
- for package in "${packages}"; do
- pkg_id=$(${cwd}/../third_party/android_tools/sdk/tools/android list sdk | \
- grep -i "$package," | \
- awk '/^[ ]*[0-9]*- / {gsub("-",""); print $1}')
- if [[ -n ${pkg_id} ]]; then
- ${cwd}/../third_party/android_tools/sdk/tools/android update sdk --no-ui \
- --filter ${pkg_id}
- fi
- done
+ "$(dirname "${BASH_SOURCE[0]}")/install-android-sdks.sh"
fi
echo "install-build-deps-android.sh complete."
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
index cded452..43b5545 100755
--- a/build/install-build-deps.sh
+++ b/build/install-build-deps.sh
@@ -155,7 +155,7 @@
libfontconfig1:i386 libgconf-2-4:i386 libglib2.0-0:i386 libgpm2:i386
libgtk2.0-0:i386 libncurses5:i386 lib32ncurses5-dev
libnss3:i386 libpango1.0-0:i386
- libssl0.9.8:i386 libtinfo-dev libtinfo-dev:i386 libtool
+ libssl1.0.0:i386 libtinfo-dev libtinfo-dev:i386 libtool
libxcomposite1:i386 libxcursor1:i386 libxdamage1:i386 libxi6:i386
libxrandr2:i386 libxss1:i386 libxtst6:i386 texinfo xvfb
${naclports_list}"
@@ -352,7 +352,7 @@
sudo dpkg --add-architecture i386
fi
fi
-sudo apt-get update
+#sudo apt-get update
# We initially run "apt-get" with the --reinstall option and parse its output.
# This way, we can find all the packages that need to be newly installed
@@ -416,14 +416,32 @@
echo "Skipping installation of Chrome OS fonts."
fi
+# $1 - target name
+# $2 - link name
+create_library_symlink() {
+ target=$1
+ linkname=$2
+ if [ -L $linkname ]; then
+ if [ "$(basename $(readlink $linkname))" != "$(basename $target)" ]; then
+ sudo rm $linkname
+ fi
+ fi
+ if [ ! -r $linkname ]; then
+ echo "Creating link: $linkname"
+ sudo ln -fs $target $linkname
+ fi
+}
+
if test "$do_inst_nacl" = "1"; then
echo "Installing symbolic links for NaCl."
- if [ ! -r /usr/lib/i386-linux-gnu/libcrypto.so ]; then
- sudo ln -fs libcrypto.so.0.9.8 /usr/lib/i386-linux-gnu/libcrypto.so
- fi
- if [ ! -r /usr/lib/i386-linux-gnu/libssl.so ]; then
- sudo ln -fs libssl.so.0.9.8 /usr/lib/i386-linux-gnu/libssl.so
- fi
+ # naclports needs to cross build python for i386, but libssl1.0.0:i386
+ # only contains libcrypto.so.1.0.0 and not the symlink needed for
+ # linking (libcrypto.so).
+ create_library_symlink /lib/i386-linux-gnu/libcrypto.so.1.0.0 \
+ /usr/lib/i386-linux-gnu/libcrypto.so
+
+ create_library_symlink /lib/i386-linux-gnu/libssl.so.1.0.0 \
+ /usr/lib/i386-linux-gnu/libssl.so
else
echo "Skipping symbolic links for NaCl."
fi
diff --git a/build/ios/grit_whitelist.txt b/build/ios/grit_whitelist.txt
index 7fc4e1d..c1edfdf 100644
--- a/build/ios/grit_whitelist.txt
+++ b/build/ios/grit_whitelist.txt
@@ -12,6 +12,7 @@
IDR_CRASHES_JS
IDR_CREDITS_HTML
IDR_CREDITS_JS
+IDR_DATA_REDUCTION_PROXY_INTERSTITIAL_HTML
IDR_DEFAULT_FAVICON
IDR_DEFAULT_FAVICON_32
IDR_DEFAULT_FAVICON_64
@@ -176,6 +177,7 @@
IDS_BOOKMARK_BAR_REDO_EDIT
IDS_BOOKMARK_BAR_REDO_MOVE
IDS_BOOKMARK_BAR_REDO_REORDER
+IDS_BOOKMARK_BAR_SUPERVISED_FOLDER_DEFAULT_NAME
IDS_BOOKMARK_BAR_UNDO
IDS_BOOKMARK_BAR_UNDO_ADD
IDS_BOOKMARK_BAR_UNDO_DELETE
@@ -270,6 +272,12 @@
IDS_CRASHES_TITLE
IDS_CRASHES_UPLOAD_MESSAGE
IDS_CREDIT_CARD_NUMBER_PREVIEW_FORMAT
+IDS_DATA_REDUCTION_PROXY_BACK_BUTTON
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_HEADING
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_PRIMARY_PARAGRAPH
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_SECONDARY_PARAGRAPH
+IDS_DATA_REDUCTION_PROXY_CONTINUE_BUTTON
+IDS_DATA_REDUCTION_PROXY_TITLE
IDS_DEFAULT_AVATAR_NAME_10
IDS_DEFAULT_AVATAR_NAME_11
IDS_DEFAULT_AVATAR_NAME_12
diff --git a/build/java_apk.gypi b/build/java_apk.gypi
index 4243554..8d55307 100644
--- a/build/java_apk.gypi
+++ b/build/java_apk.gypi
@@ -24,9 +24,8 @@
# Optional/automatic variables:
# additional_input_paths - These paths will be included in the 'inputs' list to
# ensure that this target is rebuilt when one of these paths changes.
-# additional_res_dirs - Additional directories containing Android resources.
-# additional_res_packages - Package names of the R.java files corresponding to
-# each directory in additional_res_dirs.
+# additional_res_packages - Package names of R.java files generated in addition
+# to the default package name defined in AndroidManifest.xml.
# additional_src_dirs - Additional directories with .java files to be compiled
# and included in the output of this target.
# additional_bundled_libs - Additional libraries what will be stripped and
diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS
index 10a3e3b..0be2be8 100644
--- a/build/sanitizers/OWNERS
+++ b/build/sanitizers/OWNERS
@@ -1,2 +1,4 @@
glider@chromium.org
+earthdok@chromium.org
per-file tsan_suppressions.cc=*
+per-file lsan_suppressions.cc=*
diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc
new file mode 100644
index 0000000..a076ba0
--- /dev/null
+++ b/build/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,100 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+// Intentional leak used as sanity test for Valgrind/memcheck.
+"leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+// ================ Leaks in third-party code ================
+
+// False positives in libfontconfig. http://crbug.com/39050
+"leak:libfontconfig\n"
+
+// Leaks in Nvidia's libGL.
+"leak:libGL.so\n"
+
+// A small leak in V8. http://crbug.com/46571#c9
+"leak:blink::V8GCController::collectGarbage\n"
+
+// TODO(earthdok): revisit NSS suppressions after the switch to BoringSSL
+// NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
+"leak:net::NSSCertDatabase::ImportFromPKCS12\n"
+"leak:net::NSSCertDatabase::ListCerts\n"
+"leak:net::NSSCertDatabase::DeleteCertAndKey\n"
+"leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
+// Another leak due to not shutting down NSS properly. http://crbug.com/124445
+"leak:error_get_my_stack\n"
+// The NSS suppressions above will not fire when the fast stack unwinder is
+// used, because it can't unwind through NSS libraries. Apply blanket
+// suppressions for now.
+"leak:libnssutil3\n"
+"leak:libnspr4\n"
+"leak:libnss3\n"
+"leak:libplds4\n"
+"leak:libnssckbi\n"
+
+// XRandR has several one time leaks.
+"leak:libxrandr\n"
+
+// xrandr leak. http://crbug.com/119677
+"leak:XRRFindDisplay\n"
+
+// Suppressions for objects which can be owned by the V8 heap. This is a
+// temporary workaround until LeakSanitizer supports the V8 heap.
+// Those should only fire in (browser)tests. If you see one of them in Chrome,
+// then it's a real leak.
+// http://crbug.com/328552
+"leak:WTF::StringImpl::createUninitialized\n"
+"leak:WTF::StringImpl::create8BitIfPossible\n"
+"leak:blink::MouseEvent::create\n"
+"leak:blink::WindowProxy::initializeIfNeeded\n"
+"leak:blink::*::*GetterCallback\n"
+"leak:blink::CSSComputedStyleDeclaration::create\n"
+"leak:blink::V8PerIsolateData::ensureDomInJSContext\n"
+"leak:gin/object_template_builder.h\n"
+"leak:gin::internal::Dispatcher\n"
+"leak:blink::LocalDOMWindow::getComputedStyle\n"
+
+// http://crbug.com/356785
+"leak:content::RenderViewImplTest_DecideNavigationPolicyForWebUI_Test::TestBody\n"
+
+// ================ Leaks in Chromium code ================
+// PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+// Instead, commits that introduce memory leaks should be reverted. Suppressing
+// the leak is acceptable in some cases when reverting is impossible, i.e. when
+// enabling leak detection for the first time for a test target with
+// pre-existing leaks.
+
+// Small test-only leak in ppapi_unittests. http://crbug.com/258113
+"leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_Test\n"
+
+// http://crbug.com/322671
+"leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
+
+// http://crbug.com/355641
+"leak:TrayAccessibilityTest\n"
+
+// http://crbug.com/354644
+"leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
+
+// http://crbug.com/356306
+"leak:content::SetProcessTitleFromCommandLine\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+; // Please keep this semicolon.
+
+#endif // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc
index af78bf8..6668c1e 100644
--- a/build/sanitizers/sanitizer_options.cc
+++ b/build/sanitizers/sanitizer_options.cc
@@ -12,6 +12,8 @@
#include <string.h>
#endif // ADDRESS_SANITIZER && OS_MACOSX
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \
+ defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER)
// Functions returning default options are declared weak in the tools' runtime
// libraries. To make the linker pick the strong replacements for those
// functions from this module, we explicitly force its inclusion by passing
@@ -19,6 +21,19 @@
extern "C"
void _sanitizer_options_link_helper() { }
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE \
+ extern "C" \
+ __attribute__((no_sanitize_address)) \
+ __attribute__((no_sanitize_memory)) \
+ __attribute__((no_sanitize_thread)) \
+ __attribute__((visibility("default"))) \
+ __attribute__((used))
+#endif
+
#if defined(ADDRESS_SANITIZER)
// Default options for AddressSanitizer in various configurations:
// strict_memcmp=1 - disable the strict memcmp() checking
@@ -78,13 +93,7 @@
#endif // OS_LINUX
#if defined(OS_LINUX) || defined(OS_MACOSX)
-extern "C"
-__attribute__((no_sanitize_address))
-__attribute__((visibility("default")))
-// The function isn't referenced from the executable itself. Make sure it isn't
-// stripped by the linker.
-__attribute__((used))
-const char *__asan_default_options() {
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
#if defined(OS_MACOSX)
char*** argvp = _NSGetArgv();
int* argcp = _NSGetArgc();
@@ -120,26 +129,34 @@
"report_thread_leaks=0 print_suppressions=1 history_size=7 "
"strip_path_prefix=Release/../../ ";
-extern "C"
-__attribute__((no_sanitize_thread))
-__attribute__((visibility("default")))
-// The function isn't referenced from the executable itself. Make sure it isn't
-// stripped by the linker.
-__attribute__((used))
-const char *__tsan_default_options() {
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
return kTsanDefaultOptions;
}
extern "C" char kTSanDefaultSuppressions[];
-extern "C"
-__attribute__((no_sanitize_thread))
-__attribute__((visibility("default")))
-// The function isn't referenced from the executable itself. Make sure it isn't
-// stripped by the linker.
-__attribute__((used))
-const char *__tsan_default_suppressions() {
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
return kTSanDefaultSuppressions;
}
#endif // THREAD_SANITIZER && OS_LINUX
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+// print_suppressions=1 - print the list of matched suppressions.
+// strip_path_prefix=Release/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kLsanDefaultOptions[] =
+ "print_suppressions=1 strip_path_prefix=Release/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+ return kLsanDefaultOptions;
+}
+
+extern "C" char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+ return kLSanDefaultSuppressions;
+}
+
+#endif // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizers.gyp b/build/sanitizers/sanitizers.gyp
index 53cc298..4126d22 100644
--- a/build/sanitizers/sanitizers.gyp
+++ b/build/sanitizers/sanitizers.gyp
@@ -36,6 +36,11 @@
'tsan_suppressions.cc',
],
}],
+ ['lsan==1', {
+ 'sources': [
+ 'lsan_suppressions.cc',
+ ],
+ }],
],
'cflags/': [
['exclude', '-fsanitize='],
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
index a15cba1..5704f2d 100644
--- a/build/sanitizers/tsan_suppressions.cc
+++ b/build/sanitizers/tsan_suppressions.cc
@@ -316,15 +316,15 @@
// https://crbug.com/448203
"race:blink::RemoteFrame::detach\n"
+// https://crbug.com/454652
+"race:net::NetworkChangeNotifier::SetTestNotificationsOnly\n"
+
// https://crbug.com/455638
"deadlock:dbus::Bus::ShutdownAndBlock\n"
// https://crbug.com/455665
"race:mojo::common::*::tick_clock\n"
-// https://crbug.com/456095
-"race:blink::Scheduler\n"
-
// End of suppressions.
; // Please keep this semicolon.
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
index fc1ecbf..92a088a 100644
--- a/build/secondary/third_party/android_tools/BUILD.gn
+++ b/build/secondary/third_party/android_tools/BUILD.gn
@@ -64,3 +64,16 @@
]
jar_path = "$android_sdk_root/extras/android/support/v7/mediarouter/libs/android-support-v7-mediarouter.jar"
}
+
+android_resources("google_play_services_default_resources") {
+ v14_verify_only = true
+ resource_dirs = [ "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/res" ]
+ custom_package = "com.google.android.gms"
+}
+android_java_prebuilt("google_play_services_default_java") {
+ deps = [
+ ":android_support_v13_java",
+ ":google_play_services_default_resources",
+ ]
+ jar_path = "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar"
+}
diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn
index d21ed27..e3d950a 100644
--- a/build/toolchain/android/BUILD.gn
+++ b/build/toolchain/android/BUILD.gn
@@ -103,3 +103,27 @@
tool_prefix = "$mips_android_toolchain_root/bin/mipsel-linux-android-"
toolchain_cpu_arch = "mipsel"
}
+
+android_gcc_toolchain("x64") {
+ android_ndk_sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+ android_ndk_lib_dir = "usr/lib64"
+
+ tool_prefix = "$x86_64_android_toolchain_root/bin/x86_64-linux-android-"
+ toolchain_cpu_arch = "x86_64"
+}
+
+android_gcc_toolchain("arm64") {
+ android_ndk_sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+ android_ndk_lib_dir = "usr/lib"
+
+ tool_prefix = "$arm64_android_toolchain_root/bin/arm-linux-androideabi-"
+ toolchain_cpu_arch = "aarch64"
+}
+
+android_gcc_toolchain("mips64el") {
+ android_ndk_sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+ android_ndk_lib_dir = "usr/lib64"
+
+ tool_prefix = "$mips64_android_toolchain_root/bin/mipsel-linux-android-"
+ toolchain_cpu_arch = "mipsel64el"
+}
diff --git a/build/toolchain/get_concurrent_links.py b/build/toolchain/get_concurrent_links.py
index 629d67d..6a40101 100644
--- a/build/toolchain/get_concurrent_links.py
+++ b/build/toolchain/get_concurrent_links.py
@@ -7,6 +7,7 @@
import os
import re
+import subprocess
import sys
def GetDefaultConcurrentLinks():
@@ -31,8 +32,7 @@
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
- stat = MEMORYSTATUSEX()
- stat.dwLength = ctypes.sizeof(stat)
+ stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
@@ -55,7 +55,7 @@
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
- except:
+ except Exception:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
index 26ad865..0560714 100644
--- a/build/toolchain/mac/BUILD.gn
+++ b/build/toolchain/mac/BUILD.gn
@@ -14,6 +14,12 @@
import("//build/toolchain/clang.gni")
import("//build/toolchain/goma.gni")
+if (use_goma) {
+ goma_prefix = "$goma_dir/gomacc "
+} else {
+ goma_prefix = ""
+}
+
if (is_clang) {
cc = rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang",
root_build_dir)
@@ -23,6 +29,8 @@
cc = "gcc"
cxx = "g++"
}
+cc = goma_prefix + cc
+cxx = goma_prefix + cxx
ld = cxx
# This will copy the gyp-mac-tool to the build directory. We pass in the source
diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py
index 569f4da..cc89638 100644
--- a/build/toolchain/win/setup_toolchain.py
+++ b/build/toolchain/win/setup_toolchain.py
@@ -1,6 +1,14 @@
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
import errno
import os
@@ -8,16 +16,6 @@
import subprocess
import sys
-"""
-Copies the given "win tool" (which the toolchain uses to wrap compiler
-invocations) and the environment blocks for the 32-bit and 64-bit builds on
-Windows to the build directory.
-
-The arguments are the visual studio install location and the location of the
-win tool. The script assumes that the root build directory is the current dir
-and the files will be written to the current directory.
-"""
-
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
@@ -104,7 +102,6 @@
'<visual studio path> <win tool path> <win sdk path> '
'<runtime dirs> <cpu_arch>')
sys.exit(2)
- vs_path = sys.argv[1]
tool_source = sys.argv[2]
win_sdk_path = sys.argv[3]
runtime_dirs = sys.argv[4]
diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py
index 5373f03..6f49e7c 100644
--- a/build/vs_toolchain.py
+++ b/build/vs_toolchain.py
@@ -37,7 +37,6 @@
toolchain = toolchain_data['path']
version = toolchain_data['version']
- version_is_pro = version[-1] != 'e'
win8sdk = toolchain_data['win8sdk']
wdk = toolchain_data['wdk']
# TODO(scottmg): The order unfortunately matters in these. They should be
@@ -168,7 +167,6 @@
if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
import find_depot_tools
depot_tools_path = find_depot_tools.add_depot_tools_to_path()
- json_data_file = os.path.join(script_dir, 'win_toolchain.json')
get_toolchain_args = [
sys.executable,
os.path.join(depot_tools_path,