Clone of chromium aad1ce808763f59c7a3753e08f1500a104ecc6fd refs/remotes/origin/HEAD
diff --git a/build/OWNERS b/build/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/build/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/build/README.chromium b/build/README.chromium
new file mode 100644
index 0000000..012df35
--- /dev/null
+++ b/build/README.chromium
@@ -0,0 +1,15 @@
+List of property sheets to be included by projects:
+  common.vsprops
+    Not used anymore. No-op. Kept for compatibility with current projects.
+
+  debug.vsprops
+    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
+
+  external_code.vsprops
+    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
+
+  output_dll_copy.rules
+    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
+
+  release.vsprops
+    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/build/all.gyp b/build/all.gyp
new file mode 100644
index 0000000..e9eaa5b
--- /dev/null
+++ b/build/all.gyp
@@ -0,0 +1,1339 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # A hook that can be overridden in other repositories to add additional
+    # compilation targets to 'All'. Only used on Android.
+    'android_app_targets%': [],
+  },
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'xcode_create_dependents_test_runner': 1,
+      'dependencies': [
+        'some.gyp:*',
+        '../base/base.gyp:*',
+        '../components/components.gyp:*',
+        '../components/components_tests.gyp:*',
+        '../content/content.gyp:*',
+        '../crypto/crypto.gyp:*',
+        '../net/net.gyp:*',
+        '../sdch/sdch.gyp:*',
+        '../sql/sql.gyp:*',
+        '../testing/gmock.gyp:*',
+        '../testing/gtest.gyp:*',
+        '../third_party/icu/icu.gyp:*',
+        '../third_party/libxml/libxml.gyp:*',
+        '../third_party/sqlite/sqlite.gyp:*',
+        '../third_party/zlib/zlib.gyp:*',
+        '../ui/accessibility/accessibility.gyp:*',
+        '../ui/base/ui_base.gyp:*',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/snapshot/snapshot.gyp:*',
+        '../url/url.gyp:*',
+      ],
+      'conditions': [
+        ['OS=="ios"', {
+          'dependencies': [
+            '../ios/ios.gyp:*',
+            # NOTE: This list of targets is present because
+            # mojo_base.gyp:mojo_base cannot be built on iOS, as
+            # javascript-related targets cause v8 to be built.
+            '../mojo/mojo_base.gyp:mojo_common_lib',
+            '../mojo/mojo_base.gyp:mojo_common_unittests',
+            '../mojo/mojo_base.gyp:mojo_cpp_bindings',
+            '../mojo/mojo_base.gyp:mojo_public_bindings_unittests',
+            '../mojo/mojo_base.gyp:mojo_public_environment_unittests',
+            '../mojo/mojo_base.gyp:mojo_public_system_perftests',
+            '../mojo/mojo_base.gyp:mojo_public_system_unittests',
+            '../mojo/mojo_base.gyp:mojo_public_test_utils',
+            '../mojo/mojo_base.gyp:mojo_public_utility_unittests',
+            '../mojo/mojo_base.gyp:mojo_system',
+            '../mojo/mojo_base.gyp:mojo_system_impl',
+            '../mojo/mojo_base.gyp:mojo_system_unittests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/ios/ui_ios_tests.gyp:ui_ios_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+          ],
+        }],
+        ['OS=="android"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:content_shell_apk',
+            '../mojo/mojo.gyp:mojo_shell_apk',
+            '../mojo/mojo_base.gyp:mojo_test_apk',
+            '<@(android_app_targets)',
+            'android_builder_tests',
+            '../android_webview/android_webview.gyp:android_webview_apk',
+            '../android_webview/android_webview.gyp:system_webview_apk',
+            '../android_webview/android_webview_telemetry_shell.gyp:android_webview_telemetry_shell_apk',
+            '../chrome/chrome.gyp:chrome_shell_apk',
+            '../chrome/chrome.gyp:chrome_sync_shell_apk',
+            '../remoting/remoting.gyp:remoting_apk',
+            '../tools/telemetry/telemetry.gyp:*#host',
+            # TODO(nyquist) This should instead by a target for sync when all of
+            # the sync-related code for Android has been upstreamed.
+            # See http://crbug.com/159203
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib',
+          ],
+          'conditions': [
+            ['target_arch == "arm" or target_arch == "arm64"', {
+              'dependencies': [
+                # The relocation packer only works on ARM or ARM64.
+                '../tools/relocation_packer/relocation_packer.gyp:relocation_packer_unittests#host',
+              ],
+            }],
+          ],
+        }, {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:*',
+            # TODO: This should build on Android and the target should move to the list above.
+            '../sync/sync.gyp:*',
+          ],
+        }],
+        ['OS!="ios" and OS!="android"', {
+          'dependencies': [
+            '../third_party/re2/re2.gyp:re2',
+            '../chrome/chrome.gyp:*',
+            '../chrome/tools/profile_reset/jtl_compiler.gyp:*',
+            '../cc/blink/cc_blink_tests.gyp:*',
+            '../cc/cc_tests.gyp:*',
+            '../device/bluetooth/bluetooth.gyp:*',
+            '../device/device_tests.gyp:*',
+            '../device/usb/usb.gyp:*',
+            '../gin/gin.gyp:*',
+            '../gpu/gpu.gyp:*',
+            '../gpu/tools/tools.gyp:*',
+            '../ipc/ipc.gyp:*',
+            '../ipc/mojo/ipc_mojo.gyp:*',
+            '../jingle/jingle.gyp:*',
+            '../media/cast/cast.gyp:*',
+            '../media/media.gyp:*',
+            '../mojo/mojo.gyp:*',
+            '../mojo/mojo_base.gyp:*',
+            '../ppapi/ppapi.gyp:*',
+            '../ppapi/ppapi_internal.gyp:*',
+            '../ppapi/tools/ppapi_tools.gyp:*',
+            '../printing/printing.gyp:*',
+            '../skia/skia.gyp:*',
+            '../sync/tools/sync_tools.gyp:*',
+            '../third_party/WebKit/public/all.gyp:*',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
+            '../third_party/codesighs/codesighs.gyp:*',
+            '../third_party/ffmpeg/ffmpeg.gyp:*',
+            '../third_party/iccjpeg/iccjpeg.gyp:*',
+            '../third_party/libpng/libpng.gyp:*',
+            '../third_party/libusb/libusb.gyp:*',
+            '../third_party/libwebp/libwebp.gyp:*',
+            '../third_party/libxslt/libxslt.gyp:*',
+            '../third_party/lzma_sdk/lzma_sdk.gyp:*',
+            '../third_party/mesa/mesa.gyp:*',
+            '../third_party/modp_b64/modp_b64.gyp:*',
+            '../third_party/npapi/npapi.gyp:*',
+            '../third_party/ots/ots.gyp:*',
+            '../third_party/pdfium/samples/samples.gyp:*',
+            '../third_party/qcms/qcms.gyp:*',
+            '../tools/gn/gn.gyp:*',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../v8/tools/gyp/v8.gyp:*',
+            '<(libjpeg_gyp_path):*',
+          ],
+        }],
+        ['OS=="mac" or OS=="ios" or OS=="win"', {
+          'dependencies': [
+            '../third_party/nss/nss.gyp:*',
+           ],
+        }],
+        ['OS=="win" or OS=="ios" or OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:*',
+           ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../sandbox/sandbox.gyp:*',
+            '../third_party/ocmock/ocmock.gyp:*',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../courgette/courgette.gyp:*',
+            '../dbus/dbus.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+          ],
+          'conditions': [
+            ['branding=="Chrome"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_packages_<(channel)',
+              ],
+            }],
+            ['enable_ipc_fuzzer==1', {
+              'dependencies': [
+                '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*',
+              ],
+            }],
+          ],
+        }],
+        ['chromecast==1', {
+          'dependencies': [
+            '../chromecast/chromecast.gyp:*',
+          ],
+        }],
+        ['use_x11==1', {
+          'dependencies': [
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:*',
+          ],
+        }],
+        ['OS=="win"', {
+          'conditions': [
+            ['win_use_allocator_shim==1', {
+              'dependencies': [
+                '../base/allocator/allocator.gyp:*',
+              ],
+            }],
+          ],
+          'dependencies': [
+            '../chrome_elf/chrome_elf.gyp:*',
+            '../cloud_print/cloud_print.gyp:*',
+            '../courgette/courgette.gyp:*',
+            '../rlz/rlz.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+            '<(angle_path)/src/angle.gyp:*',
+            '../third_party/bspatch/bspatch.gyp:*',
+            '../tools/win/static_initializers/static_initializers.gyp:*',
+          ],
+        }, {
+          'dependencies': [
+            '../third_party/libevent/libevent.gyp:*',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/views/controls/webview/webview.gyp:*',
+            '../ui/views/views.gyp:*',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/aura/aura.gyp:*',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:*',
+          ],
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting.gyp:*',
+          ],
+        }],
+        ['use_openssl==0', {
+          'dependencies': [
+            '../net/third_party/nss/ssl.gyp:*',
+          ],
+        }],
+        ['use_openssl==1', {
+          'dependencies': [
+            '../third_party/boringssl/boringssl.gyp:*',
+            '../third_party/boringssl/boringssl_tests.gyp:*',
+          ],
+        }],
+        ['enable_app_list==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:*',
+          ],
+        }],
+        ['OS!="android" and OS!="ios"', {
+          'dependencies': [
+            '../google_apis/gcm/gcm.gyp:*',
+          ],
+        }],
+        ['chromeos==1 or OS=="linux" or OS=="win"', {
+          'dependencies': [
+            '../extensions/shell/app_shell.gyp:*',
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../athena/main/athena_main.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All
+    {
+      'target_name': 'All_syzygy',
+      'type': 'none',
+      'conditions': [
+        ['OS=="win" and fastbuild==0 and target_arch=="ia32" and '
+            '(syzyasan==1 or syzygy_optimize==1)', {
+          'dependencies': [
+            '../chrome/installer/mini_installer_syzygy.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All_syzygy
+    {
+      # Note: Android uses android_builder_tests below.
+      # TODO: Consider merging that with this target.
+      'target_name': 'chromium_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_unittests',
+        '../components/components_tests.gyp:components_unittests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../net/net.gyp:net_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../sync/sync.gyp:sync_unit_tests',
+        '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+        '../ui/base/ui_base_tests.gyp:ui_unittests',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+        '../url/url.gyp:url_unittests',
+      ],
+      'conditions': [
+        ['OS!="ios" and OS!="android"', {
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chromedriver_tests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../extensions/extensions.gyp:extensions_unittests',
+            '../gin/gin.gyp:gin_unittests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/cast/cast.gyp:cast_unittests',
+            '../media/media.gyp:media_unittests',
+            '../mojo/mojo.gyp:mojo',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/WebKit/public/all.gyp:all_blink',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            # ../chrome/test/mini_installer requires mini_installer.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+          ],
+          'conditions': [
+            # remoting_host_installation uses lots of non-trivial GYP that tend
+            # to break because of differences between ninja and msbuild. Make
+            # sure this target is built by the builders on the main waterfall.
+            # See http://crbug.com/180600.
+            ['wix_exists == "True" and sas_dll_exists == "True"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_host_installation',
+              ],
+            }],
+            ['syzyasan==1', {
+              'variables': {
+                # Disable incremental linking for all modules.
+                # 0: inherit, 1: disabled, 2: enabled.
+                'msvs_debug_link_incremental': '1',
+                'msvs_large_module_debug_link_mode': '1',
+                # Disable RTC. Syzygy explicitly doesn't support RTC
+                # instrumented binaries for now.
+                'win_debug_RuntimeChecks': '0',
+              },
+              'defines': [
+                # Disable iterator debugging (huge speed boost).
+                '_HAS_ITERATOR_DEBUGGING=0',
+              ],
+              'msvs_settings': {
+                'VCLinkerTool': {
+                  # Enable profile information (necessary for SyzyAsan
+                  # instrumentation). This is incompatible with incremental
+                  # linking.
+                  'Profile': 'true',
+                },
+              }
+            }],
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../dbus/dbus.gyp:dbus_unittests',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+          ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/message_center/message_center.gyp:*',
+          ],
+        }],
+        ['test_isolation_mode != "noop"', {
+          'dependencies': [
+            'chromium_swarm_tests',
+          ],
+        }],
+        ['OS!="android"', {
+          'dependencies': [
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+          ],
+        }],
+        ['enable_printing!=0', {
+          'dependencies': [
+            '../printing/printing.gyp:printing_unittests',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/compositor/compositor.gyp:compositor_unittests',
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        }],
+        ['use_aura==1 or toolkit_views==1', {
+          'dependencies': [
+            '../ui/events/events.gyp:events_unittests',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:ash_unittests',
+          ],
+        }],
+        ['disable_nacl==0', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_loader_unittests',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0', {
+          'dependencies': [
+            '../mojo/mojo_nacl.gyp:*',
+            '../testing/gtest_nacl.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: chromium_builder_tests
+  ],
+  'conditions': [
+    ['OS!="ios"', {
+      'targets': [
+        {
+          'target_name': 'blink_tests',
+          'type': 'none',
+          'dependencies': [
+            '../third_party/WebKit/public/all.gyp:all_blink',
+          ],
+          'conditions': [
+            ['OS=="android"', {
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell_apk',
+                '../breakpad/breakpad.gyp:dump_syms#host',
+                '../breakpad/breakpad.gyp:minidump_stackwalk#host',
+              ],
+            }, {  # OS!="android"
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell',
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+                '../content/content_shell_and_tests.gyp:layout_test_helper',
+              ],
+            }],
+            ['OS!="win" and OS!="android"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:dump_syms#host',
+                '../content/content_shell_and_tests.gyp:layout_test_helper',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:dump_syms',
+              ],
+            }],
+          ],
+        }, # target_name: blink_tests
+      ],
+    }], # OS!=ios
+    ['OS!="ios" and OS!="android"', {
+      'targets': [
+        {
+          'target_name': 'chromium_builder_nacl_win_integration',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_qa', # needed for pyauto
+            'chromium_builder_tests',
+          ],
+        }, # target_name: chromium_builder_nacl_win_integration
+        {
+          'target_name': 'chromium_builder_perf',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_perftests',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:load_library_perf_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_performance_tests',
+            '../media/media.gyp:media_perftests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_builder_perf
+        {
+          'target_name': 'chromium_gpu_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',
+            '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_gpu_builder
+        {
+          'target_name': 'chromium_gpu_debug_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',
+            '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_gpu_debug_builder
+        {
+          'target_name': 'chromium_builder_qa',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            # Dependencies of pyauto_functional tests.
+            '../remoting/remoting.gyp:remoting_webapp',
+          ],
+          'conditions': [
+            ['OS=="mac"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_me2me_host_archive',
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+            ['OS=="win" and component != "shared_library" and wix_exists == "True" and sas_dll_exists == "True"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_host_installation',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_builder_qa
+        {
+          'target_name': 'chromium_builder_perf_av',
+          'type': 'none',
+          'dependencies': [
+            'blink_tests', # to run layout tests
+            'chromium_builder_qa',  # needed for perf pyauto tests
+          ],
+        },  # target_name: chromium_builder_perf_av
+        {
+          # This target contains everything we need to run tests on the special
+          # device-equipped WebRTC bots. We have device-requiring tests in
+          # browser_tests and content_browsertests.
+          'target_name': 'chromium_builder_webrtc',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_perf',
+            '../chrome/chrome.gyp:browser_tests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../media/media.gyp:media_unittests',
+            '../third_party/webrtc/tools/tools.gyp:frame_analyzer',
+            '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter',
+          ],
+        },  # target_name: chromium_builder_webrtc
+        {
+          'target_name': 'chromium_builder_chromedriver',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chromedriver',
+            '../chrome/chrome.gyp:chromedriver_tests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+          ],
+        },  # target_name: chromium_builder_chromedriver
+        {
+          'target_name': 'chromium_builder_asan',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+
+            # We refer to content_shell directly rather than blink_tests
+            # because we don't want the _unittests binaries.
+            '../content/content_shell_and_tests.gyp:content_shell',
+          ],
+          'conditions': [
+            ['OS!="win"', {
+              'dependencies': [
+                '../net/net.gyp:hpack_fuzz_wrapper',
+                '../net/net.gyp:dns_fuzz_stub',
+                '../skia/skia.gyp:filter_fuzz_stub',
+              ],
+            }],
+            ['enable_ipc_fuzzer==1 and OS=="linux" and component!="shared_library"', {
+              'dependencies': [
+                '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*',
+              ],
+            }],
+            ['chromeos==0', {
+              'dependencies': [
+                '../v8/src/d8.gyp:d8#host',
+                '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+              ],
+            }],
+            ['internal_filter_fuzzer==1', {
+              'dependencies': [
+                '../skia/tools/clusterfuzz-data/fuzzers/filter_fuzzer/filter_fuzzer.gyp:filter_fuzzer',
+              ],
+            }], # internal_filter_fuzzer
+            ['OS=="win" and fastbuild==0 and target_arch=="ia32" and syzyasan==1', {
+              'dependencies': [
+                '../chrome/chrome_syzygy.gyp:chrome_dll_syzygy',
+                '../content/content_shell_and_tests.gyp:content_shell_syzyasan',
+                '../pdf/pdf.gyp:pdf_syzyasan',
+              ],
+              'conditions': [
+                ['chrome_multiple_dll==1', {
+                  'dependencies': [
+                    '../chrome/chrome_syzygy.gyp:chrome_child_dll_syzygy',
+                  ],
+                }],
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_nacl_sdk',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:chrome_nacl_win64',
+              ]
+            }],
+          ],
+        },  #target_name: chromium_builder_nacl_sdk
+      ],  # targets
+    }], #OS!=ios and OS!=android
+    ['OS=="android"', {
+      'targets': [
+        {
+          # The current list of tests for android.  This is temporary
+          # until the full set supported.  If adding a new test here,
+          # please also add it to build/android/pylib/gtest/gtest_config.py,
+          # else the test is not run.
+          #
+          # WARNING:
+          # Do not add targets here without communicating the implications
+          # on tryserver triggers and load.  Discuss with
+          # chrome-infrastructure-team please.
+          'target_name': 'android_builder_tests',
+          'type': 'none',
+          'dependencies': [
+            '../android_webview/android_webview.gyp:android_webview_unittests',
+            '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests',
+            '../base/base.gyp:base_unittests',
+            '../breakpad/breakpad.gyp:breakpad_unittests_stripped',
+            # Also compile the tools needed to deal with minidumps, they are
+            # needed to run minidump tests upstream.
+            '../breakpad/breakpad.gyp:dump_syms#host',
+            '../breakpad/breakpad.gyp:symupload#host',
+            '../breakpad/breakpad.gyp:minidump_dump#host',
+            '../breakpad/breakpad.gyp:minidump_stackwalk#host',
+            '../build/android/tests/multiple_proguards/multiple_proguards.gyp:multiple_proguards_test_apk',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_perftests_apk',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../content/content_shell_and_tests.gyp:chromium_linker_test_apk',
+            '../content/content_shell_and_tests.gyp:content_shell_test_apk',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../media/media.gyp:media_perftests_apk',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests_stripped',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../testing/android/junit/junit_test.gyp:junit_unit_tests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/WebKit/public/all.gyp:*',
+            '../tools/android/android_tools.gyp:android_tools',
+            '../tools/android/android_tools.gyp:memconsumer',
+            '../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            # Unit test bundles packaged as an apk.
+            '../android_webview/android_webview.gyp:android_webview_test_apk',
+            '../android_webview/android_webview.gyp:android_webview_unittests_apk',
+            '../base/base.gyp:base_unittests_apk',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk',
+            '../cc/cc_tests.gyp:cc_unittests_apk',
+            '../chrome/chrome.gyp:chrome_shell_test_apk',
+            '../chrome/chrome.gyp:chrome_sync_shell_test_apk',
+            '../chrome/chrome.gyp:chrome_shell_uiautomator_tests',
+            '../chrome/chrome.gyp:unit_tests_apk',
+            '../components/components_tests.gyp:components_unittests_apk',
+            '../components/devtools_bridge.gyp:devtools_bridge_tests_apk',
+            '../content/content_shell_and_tests.gyp:content_browsertests_apk',
+            '../content/content_shell_and_tests.gyp:content_gl_tests_apk',
+            '../content/content_shell_and_tests.gyp:content_unittests_apk',
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest_apk',
+            '../gpu/gpu.gyp:gl_tests_apk',
+            '../gpu/gpu.gyp:gpu_unittests_apk',
+            '../ipc/ipc.gyp:ipc_tests_apk',
+            '../media/media.gyp:media_unittests_apk',
+            '../net/net.gyp:net_unittests_apk',
+            '../sandbox/sandbox.gyp:sandbox_linux_jni_unittests_apk',
+            '../sql/sql.gyp:sql_unittests_apk',
+            '../sync/sync.gyp:sync_unit_tests_apk',
+            '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_apk',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests_apk',
+            '../ui/base/ui_base_tests.gyp:ui_unittests_apk',
+            '../ui/events/events.gyp:events_unittests_apk',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests_apk',
+          ],
+        },
+        {
+          # WebRTC Chromium tests to run on Android.
+          'target_name': 'android_builder_chromium_webrtc',
+          'type': 'none',
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../tools/android/android_tools.gyp:android_tools',
+            '../tools/android/android_tools.gyp:memconsumer',
+            # Unit test bundles packaged as an apk.
+            '../content/content_shell_and_tests.gyp:content_browsertests_apk',
+          ],
+          'conditions': [
+            ['"<(libpeer_target_type)"=="static_library"', {
+              'dependencies': [
+                '../third_party/libjingle/libjingle.gyp:libjingle_peerconnection_javalib',
+              ],
+            }],
+          ],
+        },  # target_name: android_builder_chromium_webrtc
+      ], # targets
+    }], # OS="android"
+    ['OS=="mac"', {
+      'targets': [
+        {
+          # Target to build everything plus the dmg.  We don't put the dmg
+          # in the All target because developers really don't need it.
+          'target_name': 'all_and_dmg',
+          'type': 'none',
+          'dependencies': [
+            'All',
+            '../chrome/chrome.gyp:build_app_dmg',
+          ],
+        },
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder_dbg',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../rlz/rlz.gyp:*',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_rel',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_valgrind_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+      ],  # targets
+    }], # OS="mac"
+    ['OS=="win"', {
+      'targets': [
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:gcapi_test',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
+            # ../chrome/test/mini_installer requires mini_installer.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/views/views.gyp:views_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+          'conditions': [
+            ['target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_win_cf',
+          'type': 'none',
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_lkgr_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+            '../content/content_shell_and_tests.gyp:layout_test_helper',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../ash/ash.gyp:ash_shell_unittests',
+            '../ash/ash.gyp:ash_unittests',
+            '../base/base.gyp:base_unittests',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chrome_app_unittests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+            '../content/content_shell_and_tests.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../extensions/extensions.gyp:extensions_unittests',
+            '../gin/gin.gyp:gin_shell',
+            '../gin/gin.gyp:gin_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/cast/cast.gyp:cast_unittests',
+            '../media/media.gyp:media_unittests',
+            '../mojo/mojo.gyp:mojo',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests',
+            '../ui/accessibility/accessibility.gyp:accessibility_unittests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/compositor/compositor.gyp:compositor_unittests',
+            '../ui/display/display.gyp:display_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'webkit_builder_win',
+          'type': 'none',
+          'dependencies': [
+            'blink_tests',
+          ],
+        },
+      ],  # targets
+      'conditions': [
+        ['branding=="Chrome"', {
+          'targets': [
+            {
+              'target_name': 'chrome_official_builder',
+              'type': 'none',
+              'dependencies': [
+                '../base/base.gyp:base_unittests',
+                '../chrome/chrome.gyp:browser_tests',
+                '../chrome/chrome.gyp:sync_integration_tests',
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:gcapi_dll',
+                '../chrome/chrome.gyp:pack_policy_templates',
+                '../chrome/installer/mini_installer.gyp:mini_installer',
+                '../cloud_print/cloud_print.gyp:cloud_print',
+                '../courgette/courgette.gyp:courgette',
+                '../courgette/courgette.gyp:courgette64',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../media/media.gyp:media_unittests',
+                '../net/net.gyp:net_unittests_run',
+                '../printing/printing.gyp:printing_unittests',
+                '../remoting/remoting.gyp:remoting_webapp',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+                '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmadapter',
+                '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+                '../ui/base/ui_base_tests.gyp:ui_unittests',
+                '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+                '../ui/views/views.gyp:views_unittests',
+                '../url/url.gyp:url_unittests',
+              ],
+              'conditions': [
+                ['target_arch=="ia32"', {
+                  'dependencies': [
+                    '../chrome/chrome.gyp:crash_service_win64',
+                  ],
+                }],
+                ['component != "shared_library" and wix_exists == "True" and \
+                    sas_dll_exists == "True"', {
+                  'dependencies': [
+                    '../remoting/remoting.gyp:remoting_host_installation',
+                  ],
+                }], # component != "shared_library"
+              ]
+            },
+          ], # targets
+        }], # branding=="Chrome"
+       ], # conditions
+    }], # OS="win"
+    ['use_aura==1', {
+      'targets': [
+        {
+          'target_name': 'aura_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../ui/app_list/app_list.gyp:*',
+            '../ui/aura/aura.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_unittests',
+            '../ui/compositor/compositor.gyp:*',
+            '../ui/display/display.gyp:display_unittests',
+            '../ui/events/events.gyp:*',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/keyboard/keyboard.gyp:*',
+            '../ui/message_center/message_center.gyp:*',
+            '../ui/snapshot/snapshot.gyp:snapshot_unittests',
+            '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+            '../ui/views/views.gyp:views',
+            '../ui/views/views.gyp:views_unittests',
+            '../ui/wm/wm.gyp:*',
+            'blink_tests',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+            ['use_ash==1', {
+              'dependencies': [
+                '../ash/ash.gyp:ash_shell',
+                '../ash/ash.gyp:ash_unittests',
+              ],
+            }],
+            ['OS=="linux"', {
+              # Tests that currently only work on Linux.
+              'dependencies': [
+                '../base/base.gyp:base_unittests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+              ],
+            }],
+            ['chromeos==1', {
+              'dependencies': [
+                '../chromeos/chromeos.gyp:chromeos_unittests',
+                '../athena/main/athena_main.gyp:*',
+              ],
+            }],
+            ['use_ozone==1', {
+              'dependencies': [
+                '../ui/ozone/ozone.gyp:*',
+                '../ui/ozone/demo/ozone_demos.gyp:*',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }, {
+      'conditions': [
+        ['OS=="linux"', {
+          # TODO(thakis): Remove this once the linux gtk bot no longer references
+          # it (probably after the first aura release on linux), see r249162
+          'targets': [
+            {
+              'target_name': 'aura_builder',
+              'type': 'none',
+              'dependencies': [
+                '../chrome/chrome.gyp:chrome',
+              ],
+            },
+          ],  # targets
+      }]], # OS=="linux"
+    }], # "use_aura==1"
+    ['test_isolation_mode != "noop"', {
+      'targets': [
+        {
+          'target_name': 'chromium_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests_run',
+            '../chrome/chrome.gyp:browser_tests_run',
+            '../chrome/chrome.gyp:interactive_ui_tests_run',
+            '../chrome/chrome.gyp:sync_integration_tests_run',
+            '../chrome/chrome.gyp:unit_tests_run',
+            '../content/content_shell_and_tests.gyp:content_browsertests_run',
+            '../content/content_shell_and_tests.gyp:content_unittests_run',
+            '../net/net.gyp:net_unittests_run',
+          ],
+        }, # target_name: chromium_swarm_tests
+      ],
+    }],
+    ['archive_chromoting_tests==1', {
+      'targets': [
+        {
+          'target_name': 'chromoting_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../testing/chromoting/integration_tests.gyp:chromoting_integration_tests_run',
+          ],
+        }, # target_name: chromoting_swarm_tests
+      ]
+    }],
+    ['OS=="mac" and toolkit_views==1', {
+      'targets': [
+        {
+          'target_name': 'macviews_builder',
+          'type': 'none',
+          'dependencies': [
+            '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+            '../ui/views/views.gyp:views',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        },  # target_name: macviews_builder
+      ],  # targets
+    }],  # os=='mac' and toolkit_views==1
+  ],  # conditions
+}
diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..5dacfa9
--- /dev/null
+++ b/build/android/AndroidManifest.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+  2. lint: [min|target]SdkVersion are required by lint and should
+     be kept up-to-date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="dummy.package">
+
+    <uses-sdk android:minSdkVersion="14" android:targetSdkVersion="20" />
+
+</manifest>
diff --git a/build/android/CheckInstallApk-debug.apk b/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000..3dc3191
--- /dev/null
+++ b/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py
new file mode 100644
index 0000000..bb57e54
--- /dev/null
+++ b/build/android/PRESUBMIT.py
@@ -0,0 +1,78 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for android buildbot.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into gcl.
+"""
+
+_DELETIONS_ONLY_FILES = (
+    'build/android/findbugs_filter/findbugs_known_bugs.txt',
+)
+
+
+def _CheckDeletionsOnlyFiles(input_api, output_api):
+  """Check that a certain listed files only have deletions.
+  """
+  warnings = []
+  for f in input_api.AffectedFiles():
+    if f.LocalPath() in _DELETIONS_ONLY_FILES:
+      if f.ChangedContents():
+        warnings.append(f.LocalPath())
+  results = []
+  if warnings:
+    results.append(output_api.PresubmitPromptWarning(
+        'Following files should only contain deletions.', warnings))
+  return results
+
+
+def CommonChecks(input_api, output_api):
+  output = []
+
+  def J(*dirs):
+    """Returns a path relative to presubmit directory."""
+    return input_api.os_path.join(input_api.PresubmitLocalPath(), *dirs)
+
+  output.extend(input_api.canned_checks.RunPylint(
+      input_api,
+      output_api,
+      black_list=[r'pylib/symbols/.*\.py$', r'gyp/.*\.py$', r'gn/.*\.py'],
+      extra_paths_list=[
+          J(), J('..', '..', 'third_party', 'android_testrunner'),
+          J('buildbot')]))
+  output.extend(input_api.canned_checks.RunPylint(
+      input_api,
+      output_api,
+      white_list=[r'gyp/.*\.py$', r'gn/.*\.py'],
+      extra_paths_list=[J('gyp'), J('gn')]))
+
+  # Disabled due to http://crbug.com/410936
+  #output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+  #input_api, output_api, J('buildbot', 'tests')))
+
+  pylib_test_env = dict(input_api.environ)
+  pylib_test_env.update({
+      'PYTHONPATH': input_api.PresubmitLocalPath(),
+      'PYTHONDONTWRITEBYTECODE': '1',
+  })
+  output.extend(input_api.canned_checks.RunUnitTests(
+      input_api,
+      output_api,
+      unit_tests=[
+          J('pylib', 'device', 'device_utils_test.py'),
+          J('pylib', 'gtest', 'test_package_test.py'),
+          J('pylib', 'instrumentation', 'test_runner_test.py'),
+      ],
+      env=pylib_test_env))
+  output.extend(_CheckDeletionsOnlyFiles(input_api, output_api))
+  return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CommonChecks(input_api, output_api)
diff --git a/build/android/adb_android_webview_command_line b/build/android/adb_android_webview_command_line
new file mode 100755
index 0000000..947cfb1
--- /dev/null
+++ b/build/android/adb_android_webview_command_line
@@ -0,0 +1,37 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_android_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_android_webview_command_line ""
+
+CMD_LINE_FILE=/data/local/tmp/android-webview-command-line
+
+if [ $# -eq 0 ] ; then
+  # If nothing specified, print the command line (stripping off "content_shell")
+  tempfile=$(tempfile)
+  adb pull $CMD_LINE_FILE $tempfile 2>/dev/null
+  if [ $? -eq 0 ] ; then
+    rm $tempfile
+    adb shell cat $CMD_LINE_FILE | cut -d " " -f "2-" 2>/dev/null
+  fi
+elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+  # If given an empty string, delete the command line.
+  set -x
+  adb shell rm $CMD_LINE_FILE >/dev/null
+else
+  # Else set it.
+  set -x
+  adb shell "echo 'android_webview $*' > $CMD_LINE_FILE"
+  # Prevent other apps from modifying flags -- this can create security issues.
+  adb shell chmod 0664 $CMD_LINE_FILE
+fi
+
diff --git a/build/android/adb_chrome_shell_command_line b/build/android/adb_chrome_shell_command_line
new file mode 100755
index 0000000..1e2bd38
--- /dev/null
+++ b/build/android/adb_chrome_shell_command_line
@@ -0,0 +1,37 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current chrome shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the chrome shell
+# flags. For example:
+#   adb_chrome_shell_command_line --enable-webgl
+#
+# To remove all chrome shell flags, pass an empty string for the flags:
+#   adb_chrome_shell_command_line ""
+
+CMD_LINE_FILE=/data/local/tmp/chrome-shell-command-line
+
+if [ $# -eq 0 ] ; then
+  # If nothing specified, print the command line (stripping off "chrome_shell")
+  tempfile=$(tempfile)
+  adb pull $CMD_LINE_FILE $tempfile 2>/dev/null
+  if [ $? -eq 0 ] ; then
+    rm $tempfile
+    adb shell cat $CMD_LINE_FILE | cut -d " " -f "2-" 2>/dev/null
+  fi
+elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+  # If given an empty string, delete the command line.
+  set -x
+  adb shell rm $CMD_LINE_FILE >/dev/null
+else
+  # Else set it.
+  set -x
+  adb shell "echo 'chrome_shell $*' > $CMD_LINE_FILE"
+  # Prevent other apps from modifying flags -- this can create security issues.
+  adb shell chmod 0664 $CMD_LINE_FILE
+fi
+
diff --git a/build/android/adb_content_shell_command_line b/build/android/adb_content_shell_command_line
new file mode 100755
index 0000000..f3c1d4f
--- /dev/null
+++ b/build/android/adb_content_shell_command_line
@@ -0,0 +1,37 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_content_shell_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_content_shell_command_line ""
+
+CMD_LINE_FILE=/data/local/tmp/content-shell-command-line
+
+if [ $# -eq 0 ] ; then
+  # If nothing specified, print the command line (stripping off "content_shell")
+  tempfile=$(tempfile)
+  adb pull $CMD_LINE_FILE $tempfile 2>/dev/null
+  if [ $? -eq 0 ] ; then
+    rm $tempfile
+    adb shell cat $CMD_LINE_FILE | cut -d " " -f "2-" 2>/dev/null
+  fi
+elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+  # If given an empty string, delete the command line.
+  set -x
+  adb shell rm $CMD_LINE_FILE >/dev/null
+else
+  # Else set it.
+  set -x
+  adb shell "echo 'content_shell $*' > $CMD_LINE_FILE"
+  # Prevent other apps from modifying flags -- this can create security issues.
+  adb shell chmod 0664 $CMD_LINE_FILE
+fi
+
diff --git a/build/android/adb_device_functions.sh b/build/android/adb_device_functions.sh
new file mode 100755
index 0000000..66cc32f
--- /dev/null
+++ b/build/android/adb_device_functions.sh
@@ -0,0 +1,139 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# A collection of functions useful for maintaining android devices
+
+
+# Run an adb command on all connected device in parallel.
+# Usage: adb_all command line to eval.  Quoting is optional.
+#
+# Examples:
+#  adb_all install Chrome.apk
+#  adb_all 'shell cat /path/to/file'
+#
+adb_all() {
+  if [[ $# == 0 ]]; then
+    echo "Usage: adb_all <adb command>.  Quoting is optional."
+    echo "Example: adb_all install Chrome.apk"
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices -b)
+  local NUM_DEVICES=$(echo $DEVICES | wc -w)
+  if (( $NUM_DEVICES > 1 )); then
+    echo "Looping over $NUM_DEVICES devices"
+  fi
+  _adb_multi "$DEVICES" "$*"
+}
+
+
+# Run a command on each connected device.  Quoting the command is suggested but
+# not required.  The script setups up variable DEVICE to correspond to the
+# current serial number.  Intended for complex one_liners that don't work in
+# adb_all
+# Usage: adb_device_loop 'command line to eval'
+adb_device_loop() {
+  if [[ $# == 0 ]]; then
+    echo "Intended for more complex one-liners that cannot be done with" \
+        "adb_all."
+    echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \
+        'adb shell cat /data/local.prop)"'
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices)
+  if [[ -z $DEVICES ]]; then
+    return
+  fi
+  # Do not change DEVICE variable name - part of api
+  for DEVICE in $DEVICES; do
+    DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//')
+    echo "Running on $DEVICE ($DEV_TYPE)"
+    ANDROID_SERIAL=$DEVICE eval "$*"
+  done
+}
+
+# Erases data from any devices visible on adb.  To preserve a device,
+# disconnect it or:
+#  1) Reboot it into fastboot with 'adb reboot bootloader'
+#  2) Run wipe_all_devices to wipe remaining devices
+#  3) Restore device it with 'fastboot reboot'
+#
+#  Usage: wipe_all_devices [-f]
+#
+wipe_all_devices() {
+  if [[ -z $(which adb) || -z $(which fastboot) ]]; then
+    echo "aborting: adb and fastboot not in path"
+    return 1
+  elif ! $(groups | grep -q 'plugdev'); then
+    echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'"
+  fi
+
+  local DEVICES=$(adb_get_devices -b)
+
+  if [[ $1 != '-f' ]]; then
+    echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device."
+    read -p "Hit enter to continue"
+  fi
+
+  _adb_multi "$DEVICES" "reboot bootloader"
+  # Subshell to isolate job list
+  (
+  for DEVICE in $DEVICES; do
+    fastboot_erase $DEVICE &
+  done
+  wait
+  )
+
+  # Reboot devices together
+  for DEVICE in $DEVICES; do
+    fastboot -s $DEVICE reboot
+  done
+}
+
+# Wipe a device in fastboot.
+# Usage fastboot_erase [serial]
+fastboot_erase() {
+  if [[ -n $1 ]]; then
+    echo "Wiping $1"
+    local SERIAL="-s $1"
+  else
+    if [ -z $(fastboot devices) ]; then
+      echo "No devices in fastboot, aborting."
+      echo "Check out wipe_all_devices to see if sufficient"
+      echo "You can put a device in fastboot using adb reboot bootloader"
+      return 1
+    fi
+    local SERIAL=""
+  fi
+  fastboot $SERIAL erase cache
+  fastboot $SERIAL erase userdata
+}
+
+# Get list of devices connected via adb
+# Args: -b block until adb detects a device
+adb_get_devices() {
+  local DEVICES="$(adb devices | grep 'device$')"
+  if [[ -z $DEVICES && $1 == '-b' ]]; then
+    echo '- waiting for device -' >&2
+    local DEVICES="$(adb wait-for-device devices | grep 'device$')"
+  fi
+  echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/'
+}
+
+###################################################
+## HELPER FUNCTIONS
+###################################################
+
+# Run an adb command in parallel over a device list
+_adb_multi() {
+  local DEVICES=$1
+  local ADB_ARGS=$2
+  (
+    for DEVICE in $DEVICES; do
+      adb -s $DEVICE $ADB_ARGS &
+    done
+    wait
+  )
+}
diff --git a/build/android/adb_gdb b/build/android/adb_gdb
new file mode 100755
index 0000000..07178c2
--- /dev/null
+++ b/build/android/adb_gdb
@@ -0,0 +1,1014 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+# Location of Chromium out/ directory.
+if [ -z "$CHROMIUM_OUT_DIR" ]; then
+  CHROMIUM_OUT_DIR=out
+fi
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell rm "$TARGET_GDBSERVER" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  trap "" EXIT
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+  echo "ERROR: $@" >&2
+  exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR=/tmp/$USER-adb-gdb-libs
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the default activity through
+# the ADB_GDB_ACTIVITY variable. Users are still able to change the
+# final activity name through --activity=<name> option.
+#
+# This is only for cosmetic reasons, i.e. to display the proper default
+# in the --help output.
+#
+DEFAULT_ACTIVITY=${ADB_GDB_ACTIVITY:-".Main"}
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ACTIVITY=$DEFAULT_ACTIVITY
+ADB=
+ANNOTATE=
+# Note: Ignore BUILDTYPE variable, because the Ninja build doesn't use it.
+BUILDTYPE=
+FORCE=
+GDBEXEPOSTFIX=gdb
+GDBINIT=
+GDBSERVER=
+HELP=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+SANDBOXED=
+SANDBOXED_INDEX=
+START=
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --activity=*)
+      ACTIVITY=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --force)
+      FORCE=true
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --gdb=*)
+      GDB=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --sandboxed)
+      SANDBOXED=true
+      ;;
+    --sandboxed=*)
+      SANDBOXED=true
+      SANDBOXED_INDEX=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --start)
+      START=true
+      ;;
+    --su-prefix=*)
+      SU_PREFIX=$optarg
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --out-dir=*)
+      CHROMIUM_OUT_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --ui)
+      GDBEXEPOSTFIX=gdbtui
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    --debug)
+      BUILDTYPE=Debug
+      ;;
+    --release)
+      BUILDTYPE=Release
+      ;;
+    -*)
+      panic "Unknown option $OPT, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+print_help_options () {
+  cat <<EOF
+EOF
+}
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+This can be a regular Android application process, or a sandboxed
+service, if you use the --sandboxed or --sandboxed=<num> option.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/<out>/Release/lib/           (used by Ninja builds)
+  \$CHROMIUM_SRC/<out>/Debug/lib/             (used by Ninja builds)
+  \$CHROMIUM_SRC/<out>/Release/lib.target/    (used by Make builds)
+  \$CHROMIUM_SRC/<out>/Debug/lib.target/      (used by Make builds)
+
+Where <out> is 'out' by default, unless the --out=<name> option is used or
+the CHROMIUM_OUT_DIR environment variable is defined.
+
+You can restrict this search by using --release or --debug to specify the
+build type, or simply use --symbol-dir=<path> to specify the file manually.
+
+The script tries to extract the target architecture from your GYP_DEFINES,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and ANDROID_SERIAL is not defined.
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --sandboxed           Debug first sandboxed process we find.
+  --sandboxed=<num>     Debug specific sandboxed process.
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --out-dir=<path>      Specify the out directory.
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --force               Kill any previous debugging session, if any.
+  --start               Start package's activity on device.
+  --ui                  Use gdbtui instead of gdb
+  --activity=<name>     Activity name for --start [$DEFAULT_ACTIVITY].
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify target gdbserver binary.
+  --gdb=<file>          Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<file>          Specify host ADB binary.
+
+  --su-prefix=<prefix>  Prepend <prefix> to 'adb shell' commands that are
+                        run by this script. This can be useful to use
+                        the 'su' program on rooted production devices.
+                        e.g. --su-prefix="su -c"
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+  --debug               Use libraries under out/Debug.
+  --release             Use libraries under out/Release.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  ANDROID_NDK_ROOT=$(PYTHONPATH=build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Find the target architecture from our $GYP_DEFINES
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  local ARCH=$(echo $GYP_DEFINES | tr ' ' '\n' | grep '^target_arch=' |\
+               cut -d= -f2)
+  case $ARCH in
+    ia32|i?86|x86) echo "x86";;
+    mips|arm|arm64|x86_64) echo "$ARCH";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+  local HOST_OS
+  if [ -z "$NDK_HOST_SYSTEM" ]; then
+    HOST_OS=$(uname -s)
+    case $HOST_OS in
+      Linux) NDK_HOST_SYSTEM=linux;;
+      Darwin) NDK_HOST_SYSTEM=darwin;;
+      *) panic "You can't run this script on this system: $HOST_OS";;
+    esac
+  fi
+  echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+  local HOST_ARCH HOST_OS
+  if [ -z "$NDK_HOST_ARCH" ]; then
+    HOST_OS=$(get_ndk_host_system)
+    HOST_ARCH=$(uname -p)
+    case $HOST_ARCH in
+      i?86) NDK_HOST_ARCH=x86;;
+      x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+      *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+    esac
+    # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+    if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+      # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+      # implementations of the tool. See http://b.android.com/53769
+      HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+      if [ "$HOST_64BITS" ]; then
+        NDK_HOST_ARCH=x86_64
+      fi
+    fi
+  fi
+  echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    arm64)
+      echo "aarch64-linux-android"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    x86_64)
+      echo "x86_64-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86!
+  if [ "$1" = "x86" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  GCC=
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  GCC=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc")
+  if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc")
+  fi
+  if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc")
+  fi
+  if [ -z "$GCC" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${GCC%%gcc}"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+  GDB=$(which $ANDROID_TOOLCHAIN/*-$GDBEXEPOSTFIX 2>/dev/null | head -1)
+  if [ -z "$GDB" ]; then
+    panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+  fi
+  log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -lt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# If --force is specified, try to kill any gdbserver process started by the
+# same user on the device. Normally, these are killed automatically by the
+# script on exit, but there are a few corner cases where this would still
+# be needed.
+if [ "$FORCE" ]; then
+  GDBSERVER_PIDS=$(adb_shell ps | awk '$9 ~ /gdbserver/ { print $2; }')
+  for GDB_PID in $GDBSERVER_PIDS; do
+    log "Killing previous gdbserver (PID=$GDB_PID)"
+    adb_shell kill -9 $GDB_PID
+  done
+fi
+
+if [ "$START" ]; then
+  log "Starting $PROGRAM_NAME on device."
+  adb_shell am start -n $PACKAGE_NAME/$ACTIVITY 2>/dev/null
+  adb_shell ps | grep -q $PACKAGE_NAME
+  fail_panic "Could not start $PROGRAM_NAME on device. Are you sure the \
+package is installed?"
+fi
+
+# Return the timestamp of a given time, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_SRC/$CHROMIUM_OUT_DIR/
+#
+# $1: $BUILDTYPE value, can be empty
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  local SUBDIRS SUBDIR LIST DIR DIR_LIBS TSTAMP
+  # Note: Ninja places debug libraries under out/$BUILDTYPE/lib/, while
+  # Make places then under out/$BUILDTYPE/lib.target.
+  if [ "$1" ]; then
+    SUBDIRS="$1/lib $1/lib.target"
+  else
+    SUBDIRS="Release/lib Debug/lib Release/lib.target Debug/lib.target"
+  fi
+  LIST=$TMPDIR/scan-subdirs-$$.txt
+  printf "" > "$LIST"
+  for SUBDIR in $SUBDIRS; do
+    DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR
+    if [ -d "$DIR" ]; then
+      # Ignore build directories that don't contain symbol versions
+      # of the shared libraries.
+      DIR_LIBS=$(ls "$DIR"/lib*.so 2>/dev/null)
+      if [ -z "$DIR_LIBS" ]; then
+        echo "No shared libs: $DIR"
+        continue
+      fi
+      TSTAMP=$(get_file_timestamp "$DIR")
+      printf "%s %s\n" "$TSTAMP" "$SUBDIR" >> "$LIST"
+    fi
+  done
+  SUBDIR=$(cat $LIST | sort -r | head -1 | cut -d" " -f2)
+  rm -f "$LIST"
+
+  if [ -z "$SUBDIR" ]; then
+    if [ -z "$1" ]; then
+      panic "Could not find any build directory under \
+$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Please build the program first!"
+    else
+      panic "Could not find any $1 directory under \
+$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Check your build type!"
+    fi
+  fi
+
+  SYMBOL_DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir "$BUILDTYPE"
+fi
+
+# Allow several concurrent debugging sessions
+TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Return the build fingerprint contained in a build.prop file.
+# $1: path to build.prop file
+get_build_fingerprint_from () {
+  cat "$1" | grep -e '^ro.build.fingerprint=' | cut -d= -f2
+}
+
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+# If --pull-libs-dir is not specified, and this is a platform build, look
+# if we can use the symbolic libraries under $ANDROID_PRODUCT_OUT/symbols/
+# directly, if the build fingerprint matches the device.
+if [ -z "$ORG_PULL_LIBS_DIR" -a \
+     "$ANDROID_PRODUCT_OUT" -a \
+     -f "$ANDROID_PRODUCT_OUT/system/build.prop" ]; then
+  ANDROID_FINGERPRINT=$(get_build_fingerprint_from \
+                        "$ANDROID_PRODUCT_OUT"/system/build.prop)
+  log "Android build fingerprint:  $ANDROID_FINGERPRINT"
+  if [ "$ANDROID_FINGERPRINT" = "$DEVICE_FINGERPRINT" ]; then
+    log "Perfect match!"
+    PULL_LIBS_DIR=$ANDROID_PRODUCT_OUT/symbols
+    HOST_FINGERPRINT=$ANDROID_FINGERPRINT
+    if [ "$PULL_LIBS" ]; then
+      log "Ignoring --pull-libs since the device and platform build \
+fingerprints match."
+      NO_PULL_LIBS=true
+    fi
+  fi
+fi
+
+# If neither --pull-libs an --no-pull-libs were specified, check the build
+# fingerprints of the device, and the cached system libraries on the host.
+#
+if [ -z "$NO_PULL_LIBS" -a -z "$PULL_LIBS" ]; then
+  if [ ! -f "$PULL_LIBS_DIR/build.prop" ]; then
+    log "Auto-config: --pull-libs  (no cached libraries)"
+    PULL_LIBS=true
+  else
+    HOST_FINGERPRINT=$(get_build_fingerprint_from "$PULL_LIBS_DIR/build.prop")
+    log "Host build fingerprint:   $HOST_FINGERPRINT"
+    if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+      log "Auto-config: --no-pull-libs (fingerprint match)"
+      NO_PULL_LIBS=true
+    else
+      log "Auto-config: --pull-libs  (fingerprint mismatch)"
+      PULL_LIBS=true
+    fi
+  fi
+fi
+
+# Extract the system libraries from the device if necessary.
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+fi
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+GDB_ARGS=
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ "$SANDBOXED_INDEX" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process$SANDBOXED_INDEX
+  elif [ "$SANDBOXED" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process
+    PID=$(adb_shell ps | \
+          awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    if [ "$START" ]; then
+      panic "Can't find application process PID, did it crash?"
+    else
+      panic "Can't find application process PID, are you sure it is \
+running? Try using --start."
+    fi
+  fi
+  log "Found process PID: $PID"
+elif [ "$SANDBOXED" ]; then
+  echo "WARNING: --sandboxed option ignored due to use of --pid."
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+  # Need to check that this works properly.
+  SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+  adb_shell $SU_PREFIX echo "foo" > $SU_PREFIX_TEST_LOG 2>&1
+  if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+    echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+    echo "$ adb shell $SU_PREFIX echo foo"
+    cat $SU_PREFIX_TEST_LOG
+    exit 1
+  fi
+  COMMAND_PREFIX="$SU_PREFIX"
+else
+  SHELL_UID=$(adb shell cat /proc/self/status | \
+              awk '$1 == "Uid:" { print $2; }')
+  log "Shell UID: $SHELL_UID"
+  if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+    COMMAND_PREFIX="run-as $PACKAGE_NAME"
+  else
+    COMMAND_PREFIX=
+  fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  rm -f $PULL_LIBS_DIR/build.prop
+  MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps)
+  if [ $? != 0 ]; then
+    echo "ERROR: Could not list process's memory mappings."
+    if [ "$SU_PREFIX" ]; then
+      panic "Are you sure your --su-prefix is correct?"
+    else
+      panic "Use --su-prefix if the application is not debuggable."
+    fi
+  fi
+  SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+      awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && adb pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Pulling device build.prop"
+  adb pull /system/build.prop $PULL_LIBS_DIR/build.prop
+  fail_panic "Could not pull device build.prop !?"
+fi
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+
+# This is a re-implementation of gdbclient, where we use compatible
+# versions of gdbserver and $GDBNAME to ensure that everything works
+# properly.
+#
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+adb push $GDBSERVER $TARGET_GDBSERVER &>/dev/null
+fail_panic "Could not copy gdbserver to the device!"
+
+PORT=5039
+HOST_PORT=$PORT
+TARGET_PORT=$PORT
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+      arm|x86|mips) GDBEXEC=app_process;;
+      arm64|x86_64) GDBEXEC=app_process64;;
+      *) fail_panic "Unknown app_process for architecture!";;
+esac
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN
+if [ $? == 0 ]; then
+    GDBEXEC=$GDBEXEC_ASAN
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+adb pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_PORT)"
+adb forward tcp:$HOST_PORT tcp:$TARGET_PORT
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:localhost:$TARGET_PORT!"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+log "Starting gdbserver in the background:"
+GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+--attach $PID"
+("$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+ --attach $PID > $GDBSERVER_LOG 2>&1) &
+GDBSERVER_PID=$!
+echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+log "background job pid: $GDBSERVER_PID"
+
+# Check that it is still running after a few seconds. If not, this means we
+# could not properly attach to it
+sleep 2
+log "Job control: $(jobs -l)"
+STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+if [ "$STATE" != "Running" ]; then
+  echo "ERROR: GDBServer could not attach to PID $PID!"
+  if [ $(adb_shell su -c getenforce) != "Permissive" ];  then
+    echo "Device mode is Enforcing. Changing Device mode to Permissive "
+    $(adb_shell su -c setenforce 0)
+    if [ $(adb_shell su -c getenforce) != "Permissive" ]; then
+      echo "ERROR: Failed to Change Device mode to Permissive"
+      echo "Failure log (use --verbose for more information):"
+      cat $GDBSERVER_LOG
+      exit 1
+    fi
+  else
+    echo "Failure log (use --verbose for more information):"
+    cat $GDBSERVER_LOG
+    exit 1
+  fi
+fi
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+echo -n "" > $COMMANDS
+echo "set print pretty 1" >> $COMMANDS
+echo "python" >> $COMMANDS
+echo "import sys" >> $COMMANDS
+echo "sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')" >> $COMMANDS
+echo "try:" >> $COMMANDS
+echo "  import gdb_chrome" >> $COMMANDS
+echo "finally:" >> $COMMANDS
+echo "  sys.path.pop(0)" >> $COMMANDS
+echo "end" >> $COMMANDS
+echo "file $TMPDIR/$GDBEXEC" >> $COMMANDS
+echo "directory $CHROMIUM_SRC" >> $COMMANDS
+echo "set solib-absolute-prefix $PULL_LIBS_DIR" >> $COMMANDS
+echo "set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR" \
+    >> $COMMANDS
+echo "echo Attaching and reading symbols, this may take a while.." \
+    >> $COMMANDS
+echo "target remote :$HOST_PORT" >> $COMMANDS
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> $COMMANDS
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat $COMMANDS
+  echo "### END $COMMANDS"
+fi
+
+log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+$GDB $GDB_ARGS -x $COMMANDS &&
+rm -f "$GDBSERVER_PIDFILE"
diff --git a/build/android/adb_gdb_android_webview_shell b/build/android/adb_gdb_android_webview_shell
new file mode 100755
index 0000000..f685fda
--- /dev/null
+++ b/build/android/adb_gdb_android_webview_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.AwShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=AwShellApplication \
+    --package-name=org.chromium.android_webview.shell \
+    "$@"
diff --git a/build/android/adb_gdb_chrome_shell b/build/android/adb_gdb_chrome_shell
new file mode 100755
index 0000000..e5c8a30
--- /dev/null
+++ b/build/android/adb_gdb_chrome_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ChromeShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ChromeShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ChromeShell \
+    --package-name=org.chromium.chrome.shell \
+    "$@"
diff --git a/build/android/adb_gdb_content_shell b/build/android/adb_gdb_content_shell
new file mode 100755
index 0000000..18e1a61
--- /dev/null
+++ b/build/android/adb_gdb_content_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ContentShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ContentShell \
+    --package-name=org.chromium.content_shell_apk \
+    "$@"
diff --git a/build/android/adb_gdb_cronet_sample b/build/android/adb_gdb_cronet_sample
new file mode 100755
index 0000000..8d0c864
--- /dev/null
+++ b/build/android/adb_gdb_cronet_sample
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.CronetSampleActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=CronetSample \
+    --package-name=org.chromium.cronet_sample_apk \
+    "$@"
diff --git a/build/android/adb_gdb_mojo_shell b/build/android/adb_gdb_mojo_shell
new file mode 100755
index 0000000..ba91149
--- /dev/null
+++ b/build/android/adb_gdb_mojo_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.MojoShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=MojoShell \
+    --package-name=org.chromium.mojo_shell_apk \
+    "$@"
diff --git a/build/android/adb_install_apk.py b/build/android/adb_install_apk.py
new file mode 100755
index 0000000..5d0fd17
--- /dev/null
+++ b/build/android/adb_install_apk.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import optparse
+import os
+import sys
+
+from pylib import android_commands
+from pylib import constants
+from pylib.device import device_utils
+
+
+def AddInstallAPKOption(option_parser):
+  """Adds apk option used to install the APK to the OptionParser."""
+  option_parser.add_option('--apk',
+                           help=('DEPRECATED The name of the apk containing the'
+                                 ' application (with the .apk extension).'))
+  option_parser.add_option('--apk_package',
+                           help=('DEPRECATED The package name used by the apk '
+                                 'containing the application.'))
+  option_parser.add_option('--keep_data',
+                           action='store_true',
+                           default=False,
+                           help=('Keep the package data when installing '
+                                 'the application.'))
+  option_parser.add_option('--debug', action='store_const', const='Debug',
+                           dest='build_type',
+                           default=os.environ.get('BUILDTYPE', 'Debug'),
+                           help='If set, run test suites under out/Debug. '
+                           'Default is env var BUILDTYPE or Debug')
+  option_parser.add_option('--release', action='store_const', const='Release',
+                           dest='build_type',
+                           help='If set, run test suites under out/Release. '
+                           'Default is env var BUILDTYPE or Debug.')
+  option_parser.add_option('-d', '--device', dest='device',
+                           help='Target device for apk to install on.')
+
+
+def ValidateInstallAPKOption(option_parser, options, args):
+  """Validates the apk option and potentially qualifies the path."""
+  if not options.apk:
+    if len(args) > 1:
+      options.apk = args[1]
+    else:
+      option_parser.error('apk target not specified.')
+
+  if not options.apk.endswith('.apk'):
+    options.apk += '.apk'
+
+  if not os.path.exists(options.apk):
+    options.apk = os.path.join(constants.GetOutDirectory(), 'apks',
+                               options.apk)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.set_usage("usage: %prog [options] target")
+  AddInstallAPKOption(parser)
+  options, args = parser.parse_args(argv)
+
+  if len(args) > 1 and options.apk:
+    parser.error("Appending the apk as argument can't be used with --apk.")
+  elif len(args) > 2:
+    parser.error("Too many arguments.")
+
+  constants.SetBuildType(options.build_type)
+  ValidateInstallAPKOption(parser, options, args)
+
+  devices = android_commands.GetAttachedDevices()
+
+  if options.device:
+    if options.device not in devices:
+      raise Exception('Error: %s not in attached devices %s' % (options.device,
+                      ','.join(devices)))
+    devices = [options.device]
+
+  if not devices:
+    raise Exception('Error: no connected devices')
+
+  device_utils.DeviceUtils.parallel(devices).Install(
+      options.apk, reinstall=options.keep_data)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
+
diff --git a/build/android/adb_kill_content_shell b/build/android/adb_kill_content_shell
new file mode 100755
index 0000000..e379dd4
--- /dev/null
+++ b/build/android/adb_kill_content_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running content shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell_apk')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running Content shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Content shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..a2acfd1
--- /dev/null
+++ b/build/android/adb_logcat_monitor.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\w+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_signum, _unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_signum, _unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except:
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print 'adb_logcat_monitor: Initializing'
+    sys.exit(main(*sys.argv[1:3]))
+
+  print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/build/android/adb_logcat_printer.py b/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..f79a9e4
--- /dev/null
+++ b/build/android/adb_logcat_printer.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile(r'^logcat_(\w+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if device_id not in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+  parser.add_option('--output-path',
+                    help='Output file path (if unspecified, prints to stdout)')
+  options, args = parser.parse_args(argv)
+  if len(args) != 1:
+    parser.error('Wrong number of unparsed args')
+  base_dir = args[0]
+  if options.output_path:
+    output_file = open(options.output_path, 'w')
+  else:
+    output_file = sys.stdout
+
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/adb_profile_chrome b/build/android/adb_profile_chrome
new file mode 100755
index 0000000..21f6faf
--- /dev/null
+++ b/build/android/adb_profile_chrome
@@ -0,0 +1,8 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+exec $(dirname $0)/../../tools/profile_chrome.py $@
diff --git a/build/android/adb_reverse_forwarder.py b/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000..700e4e9
--- /dev/null
+++ b/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import optparse
+import sys
+import time
+
+from pylib import constants, forwarder
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
+                                 'host_port [device_port_2 host_port_2] ...',
+                                 description=__doc__)
+  parser.add_option('-v',
+                    '--verbose',
+                    dest='verbose_count',
+                    default=0,
+                    action='count',
+                    help='Verbose level (multiple times for more)')
+  parser.add_option('--device',
+                    help='Serial number of device we should use.')
+  parser.add_option('--debug', action='store_const', const='Debug',
+                    dest='build_type', default='Release',
+                    help='Use Debug build of host tools instead of Release.')
+
+  options, args = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(options.verbose_count)
+
+  if len(args) < 2 or not len(args) % 2:
+    parser.error('Need even number of port pairs')
+    sys.exit(1)
+
+  try:
+    port_pairs = map(int, args[1:])
+    port_pairs = zip(port_pairs[::2], port_pairs[1::2])
+  except ValueError:
+    parser.error('Bad port number')
+    sys.exit(1)
+
+  device = device_utils.DeviceUtils(options.device)
+  constants.SetBuildType(options.build_type)
+  try:
+    forwarder.Forwarder.Map(port_pairs, device)
+    while True:
+      time.sleep(60)
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/adb_run_android_webview_shell b/build/android/adb_run_android_webview_shell
new file mode 100755
index 0000000..cc9f6d2
--- /dev/null
+++ b/build/android/adb_run_android_webview_shell
@@ -0,0 +1,15 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+   INTENT_ARGS="-d \"$1\""  # e.g. a URL
+fi
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.android_webview.shell/.AwShellActivity \
+  $INTENT_ARGS
+
diff --git a/build/android/adb_run_chrome_shell b/build/android/adb_run_chrome_shell
new file mode 100755
index 0000000..46558b3
--- /dev/null
+++ b/build/android/adb_run_chrome_shell
@@ -0,0 +1,14 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+   INTENT_ARGS="-d \"$1\""  # e.g. a URL
+fi
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.chrome.shell/.ChromeShellActivity \
+  $INTENT_ARGS
diff --git a/build/android/adb_run_content_shell b/build/android/adb_run_content_shell
new file mode 100755
index 0000000..17a734c
--- /dev/null
+++ b/build/android/adb_run_content_shell
@@ -0,0 +1,14 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+   INTENT_ARGS="-d \"$1\""  # e.g. a URL
+fi
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.content_shell_apk/.ContentShellActivity \
+  $INTENT_ARGS
diff --git a/build/android/adb_run_mojo_shell b/build/android/adb_run_mojo_shell
new file mode 100755
index 0000000..6f55fb2
--- /dev/null
+++ b/build/android/adb_run_mojo_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [ $# -gt 0 ] ; then
+   INTENT_ARGS="-d \"$1\""  # e.g. a URL
+fi
+
+adb logcat -c
+adb shell am start -S \
+  -a android.intent.action.VIEW \
+  -n org.chromium.mojo_shell_apk/.MojoShellActivity \
+  $INTENT_ARGS
+adb logcat -s MojoShellApplication MojoShellActivity chromium
diff --git a/build/android/android_exports.gyp b/build/android/android_exports.gyp
new file mode 100644
index 0000000..c259eee
--- /dev/null
+++ b/build/android/android_exports.gyp
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'android_exports',
+      'type': 'none',
+      'inputs': [
+        '<(DEPTH)/build/android/android_exports.lst',
+      ],
+      'outputs': [
+        '<(android_linker_script)',
+      ],
+      'copies': [
+        {
+          'destination': '<(SHARED_INTERMEDIATE_DIR)',
+          'files': [
+            '<@(_inputs)',
+         ],
+        },
+      ],
+      'conditions': [
+        ['component=="static_library"', {
+          'link_settings': {
+            'ldflags': [
+              # Only export symbols that are specified in version script.
+              '-Wl,--version-script=<(android_linker_script)',
+            ],
+            'ldflags!': [
+              '-Wl,--exclude-libs=ALL',
+            ],
+          },
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/android/android_exports.lst b/build/android/android_exports.lst
new file mode 100644
index 0000000..6eee232
--- /dev/null
+++ b/build/android/android_exports.lst
@@ -0,0 +1,15 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Default exports specification for chromium shared libraries on android.
+# Check ld version script manual:
+# https://sourceware.org/binutils/docs-2.24/ld/VERSION.html#VERSION
+
+{
+  global:
+    Java_*_native*;
+    JNI_OnLoad;
+    __gcov_*;
+  local: *;
+};
diff --git a/build/android/ant/apk-package.xml b/build/android/ant/apk-package.xml
new file mode 100644
index 0000000..99279a4
--- /dev/null
+++ b/build/android/ant/apk-package.xml
@@ -0,0 +1,96 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Copyright (C) 2005-2008 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+
+<project default="-package">
+  <property name="verbose" value="false" />
+  <property name="out.dir" location="${OUT_DIR}" />
+  <property name="out.absolute.dir" location="${out.dir}" />
+
+  <property name="sdk.dir" location="${ANDROID_SDK_ROOT}"/>
+  <property name="emma.device.jar" location="${EMMA_DEVICE_JAR}" />
+
+  <condition property="emma.enabled" value="true" else="false">
+    <equals arg1="${EMMA_INSTRUMENT}" arg2="1"/>
+  </condition>
+
+  <!-- jar file from where the tasks are loaded -->
+  <path id="android.antlibs">
+    <pathelement path="${sdk.dir}/tools/lib/ant-tasks.jar" />
+  </path>
+
+  <!-- Custom tasks -->
+  <taskdef resource="anttasks.properties" classpathref="android.antlibs" />
+
+  <condition property="build.target" value="release" else="debug">
+    <equals arg1="${CONFIGURATION_NAME}" arg2="Release" />
+  </condition>
+  <condition property="build.is.packaging.debug" value="true" else="false">
+    <equals arg1="${build.target}" arg2="debug" />
+  </condition>
+
+  <!-- Disables automatic signing. -->
+  <property name="build.is.signing.debug" value="false"/>
+
+  <!-- SDK tools assume that out.packaged.file is signed and name it "...-unaligned" -->
+  <property name="out.packaged.file" value="${UNSIGNED_APK_PATH}" />
+
+  <property name="native.libs.absolute.dir" location="${NATIVE_LIBS_DIR}" />
+
+  <!-- Intermediate files -->
+  <property name="resource.package.file.name" value="${RESOURCE_PACKAGED_APK_NAME}" />
+
+  <property name="intermediate.dex.file" location="${DEX_FILE_PATH}" />
+
+  <!-- Macro that enables passing a variable list of external jar files
+       to ApkBuilder. -->
+  <macrodef name="package-helper">
+    <element name="extra-jars" optional="yes" />
+    <sequential>
+      <apkbuilder
+          outfolder="${out.absolute.dir}"
+          resourcefile="${resource.package.file.name}"
+          apkfilepath="${out.packaged.file}"
+          debugpackaging="${build.is.packaging.debug}"
+          debugsigning="${build.is.signing.debug}"
+          verbose="${verbose}"
+          hascode="true"
+          previousBuildType="/"
+          buildType="${build.is.packaging.debug}/${build.is.signing.debug}">
+        <dex path="${intermediate.dex.file}"/>
+        <nativefolder path="${native.libs.absolute.dir}" />
+        <extra-jars/>
+      </apkbuilder>
+    </sequential>
+  </macrodef>
+
+
+  <!-- Packages the application. -->
+  <target name="-package">
+    <if condition="${emma.enabled}">
+      <then>
+        <package-helper>
+          <extra-jars>
+            <jarfile path="${emma.device.jar}" />
+          </extra-jars>
+        </package-helper>
+      </then>
+      <else>
+        <package-helper />
+      </else>
+    </if>
+  </target>
+</project>
diff --git a/build/android/ant/chromium-debug.keystore b/build/android/ant/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/build/android/ant/chromium-debug.keystore
Binary files differ
diff --git a/build/android/ant/empty/res/.keep b/build/android/ant/empty/res/.keep
new file mode 100644
index 0000000..1fd038b
--- /dev/null
+++ b/build/android/ant/empty/res/.keep
@@ -0,0 +1,2 @@
+# This empty res folder can be passed to aapt while building Java libraries or
+# APKs that don't have any resources.
diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py
new file mode 100755
index 0000000..10087a6
--- /dev/null
+++ b/build/android/asan_symbolize.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+
+# Uses symbol.py from third_party/android_platform, not python's.
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                            'third_party/android_platform/development/scripts'))
+import symbol
+
+
+_RE_ASAN = re.compile(r'(.*?)(#\S*?) (\S*?) \((.*?)\+(.*?)\)')
+
+def _ParseAsanLogLine(line):
+  m = re.match(_RE_ASAN, line)
+  if not m:
+    return None
+  return {
+      'prefix': m.group(1),
+      'library': m.group(4),
+      'pos': m.group(2),
+      'rel_address': '%08x' % int(m.group(5), 16),
+  }
+
+
+def _FindASanLibraries():
+  asan_lib_dir = os.path.join(constants.DIR_SOURCE_ROOT,
+                              'third_party', 'llvm-build',
+                              'Release+Asserts', 'lib')
+  asan_libs = []
+  for src_dir, _, files in os.walk(asan_lib_dir):
+    asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+                  for f in files
+                  if f.endswith('.so')]
+  return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+  for asan_lib in asan_libs:
+    if os.path.basename(library) == os.path.basename(asan_lib):
+      return '/' + asan_lib
+  return symbol.TranslateLibPath(library)
+
+
+def _Symbolize(asan_input):
+  asan_libs = _FindASanLibraries()
+  libraries = collections.defaultdict(list)
+  asan_lines = []
+  for asan_log_line in [a.rstrip() for a in asan_input]:
+    m = _ParseAsanLogLine(asan_log_line)
+    if m:
+      libraries[m['library']].append(m)
+    asan_lines.append({'raw_log': asan_log_line, 'parsed': m})
+
+  all_symbols = collections.defaultdict(dict)
+  for library, items in libraries.iteritems():
+    libname = _TranslateLibPath(library, asan_libs)
+    lib_relative_addrs = set([i['rel_address'] for i in items])
+    info_dict = symbol.SymbolInformationForSet(libname,
+                                               lib_relative_addrs,
+                                               True)
+    if info_dict:
+      all_symbols[library]['symbols'] = info_dict
+
+  for asan_log_line in asan_lines:
+    m = asan_log_line['parsed']
+    if not m:
+      print asan_log_line['raw_log']
+      continue
+    if (m['library'] in all_symbols and
+        m['rel_address'] in all_symbols[m['library']]['symbols']):
+      s = all_symbols[m['library']]['symbols'][m['rel_address']][0]
+      print '%s%s %s %s' % (m['prefix'], m['pos'], s[0], s[1])
+    else:
+      print asan_log_line['raw_log']
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-l', '--logcat',
+                    help='File containing adb logcat output with ASan stacks. '
+                         'Use stdin if not specified.')
+  options, _ = parser.parse_args()
+  if options.logcat:
+    asan_input = file(options.logcat, 'r')
+  else:
+    asan_input = sys.stdin
+  _Symbolize(asan_input.readlines())
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/build/android/avd.py b/build/android/avd.py
new file mode 100755
index 0000000..ddff11a
--- /dev/null
+++ b/build/android/avd.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Launches Android Virtual Devices with a set configuration for testing Chrome.
+
+The script will launch a specified number of Android Virtual Devices (AVD's).
+"""
+
+
+import install_emulator_deps
+import logging
+import optparse
+import os
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import emulator
+
+
+def main(argv):
+  # ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch
+  # the emulator to find the system images upon launch.
+  emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
+  os.environ['ANDROID_SDK_ROOT'] = emulator_sdk
+
+  opt_parser = optparse.OptionParser(description='AVD script.')
+  opt_parser.add_option('--name', help='Optinaly, name of existing AVD to '
+                        'launch. If not specified, new AVD\'s will be created')
+  opt_parser.add_option('-n', '--num', dest='emulator_count',
+                        help='Number of emulators to launch (default is 1).',
+                        type='int', default='1')
+  opt_parser.add_option('--abi', default='x86',
+                        help='Platform of emulators to launch (x86 default).')
+  opt_parser.add_option('--api-level', dest='api_level',
+                        help='API level for the image, e.g. 19 for Android 4.4',
+                        type='int', default=constants.ANDROID_SDK_VERSION)
+
+  options, _ = opt_parser.parse_args(argv[1:])
+
+  logging.basicConfig(level=logging.INFO,
+                      format='# %(asctime)-15s: %(message)s')
+  logging.root.setLevel(logging.INFO)
+
+  # Check if KVM is enabled for x86 AVD's and check for x86 system images.
+  # TODO(andrewhayden) Since we can fix all of these with install_emulator_deps
+  # why don't we just run it?
+  if options.abi == 'x86':
+    if not install_emulator_deps.CheckKVM():
+      logging.critical('ERROR: KVM must be enabled in BIOS, and installed. '
+                       'Enable KVM in BIOS and run install_emulator_deps.py')
+      return 1
+    elif not install_emulator_deps.CheckX86Image(options.api_level):
+      logging.critical('ERROR: System image for x86 AVD not installed. Run '
+                       'install_emulator_deps.py')
+      return 1
+
+  if not install_emulator_deps.CheckSDK():
+    logging.critical('ERROR: Emulator SDK not installed. Run '
+                     'install_emulator_deps.py.')
+    return 1
+
+  # If AVD is specified, check that the SDK has the required target. If not,
+  # check that the SDK has the desired target for the temporary AVD's.
+  api_level = options.api_level
+  if options.name:
+    android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools',
+                           'android')
+    avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd'])
+    names = re.findall('Name: (\w+)', avds_output)
+    api_levels = re.findall('API level (\d+)', avds_output)
+    try:
+      avd_index = names.index(options.name)
+    except ValueError:
+      logging.critical('ERROR: Specified AVD %s does not exist.' % options.name)
+      return 1
+    api_level = int(api_levels[avd_index])
+
+  if not install_emulator_deps.CheckSDKPlatform(api_level):
+    logging.critical('ERROR: Emulator SDK missing required target for API %d. '
+                     'Run install_emulator_deps.py.')
+    return 1
+
+  if options.name:
+    emulator.LaunchEmulator(options.name, options.abi)
+  else:
+    emulator.LaunchTempEmulators(options.emulator_count, options.abi,
+                                 options.api_level, True)
+
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/bb_run_sharded_steps.py b/build/android/bb_run_sharded_steps.py
new file mode 100755
index 0000000..6aeba5b
--- /dev/null
+++ b/build/android/bb_run_sharded_steps.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""DEPRECATED!
+TODO(bulach): remove me once all other repositories reference
+'test_runner.py perf' directly.
+"""
+
+import optparse
+import sys
+
+from pylib import cmd_helper
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--steps',
+                    help='A JSON file containing all the steps to be '
+                         'sharded.')
+  parser.add_option('--flaky_steps',
+                    help='A JSON file containing steps that are flaky and '
+                         'will have its exit code ignored.')
+  parser.add_option('-p', '--print_results',
+                    help='Only prints the results for the previously '
+                         'executed step, do not run it again.')
+  options, _ = parser.parse_args(argv)
+  if options.print_results:
+    return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf',
+                              '--print-step', options.print_results])
+  flaky_options = []
+  if options.flaky_steps:
+    flaky_options = ['--flaky-steps', options.flaky_steps]
+  return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf', '-v',
+                            '--steps', options.steps] + flaky_options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/OWNERS b/build/android/buildbot/OWNERS
new file mode 100644
index 0000000..425f1d9
--- /dev/null
+++ b/build/android/buildbot/OWNERS
@@ -0,0 +1,9 @@
+set noparent
+
+cmp@chromium.org
+craigdh@chromium.org
+frankf@chromium.org
+navabi@chromium.org
+
+# backup
+ilevy@chromium.org
diff --git a/build/android/buildbot/bb_annotations.py b/build/android/buildbot/bb_annotations.py
new file mode 100644
index 0000000..059d673
--- /dev/null
+++ b/build/android/buildbot/bb_annotations.py
@@ -0,0 +1,46 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to print buildbot messages."""
+
+def PrintLink(label, url):
+  """Adds a link with name |label| linking to |url| to current buildbot step.
+
+  Args:
+    label: A string with the name of the label.
+    url: A string of the URL.
+  """
+  print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+
+
+def PrintMsg(msg):
+  """Appends |msg| to the current buildbot step text.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_TEXT@%s@@@' % msg
+
+
+def PrintSummaryText(msg):
+  """Appends |msg| to main build summary. Visible from waterfall.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg
+
+
+def PrintError():
+  """Marks the current step as failed."""
+  print '@@@STEP_FAILURE@@@'
+
+
+def PrintWarning():
+  """Marks the current step with a warning."""
+  print '@@@STEP_WARNINGS@@@'
+
+
+def PrintNamedStep(step):
+  print '@@@BUILD_STEP %s@@@' % step
diff --git a/build/android/buildbot/bb_device_status_check.py b/build/android/buildbot/bb_device_status_check.py
new file mode 100755
index 0000000..2eb3626
--- /dev/null
+++ b/build/android/buildbot/bb_device_status_check.py
@@ -0,0 +1,391 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class to keep track of devices across builds and report state."""
+import json
+import logging
+import optparse
+import os
+import psutil
+import re
+import signal
+import smtplib
+import subprocess
+import sys
+import time
+import urllib
+
+import bb_annotations
+import bb_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+                             os.pardir, os.pardir, 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper  # pylint: disable=F0401
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import android_commands
+from pylib import constants
+from pylib.cmd_helper import GetCmdOutput
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_list
+from pylib.device import device_utils
+
+def DeviceInfo(serial, options):
+  """Gathers info on a device via various adb calls.
+
+  Args:
+    serial: The serial of the attached device to construct info about.
+
+  Returns:
+    Tuple of device type, build id, report as a string, error messages, and
+    boolean indicating whether or not device can be used for testing.
+  """
+
+  device_adb = device_utils.DeviceUtils(serial)
+  device_type = device_adb.GetProp('ro.build.product')
+  device_build = device_adb.GetProp('ro.build.id')
+  device_build_type = device_adb.GetProp('ro.build.type')
+  device_product_name = device_adb.GetProp('ro.product.name')
+
+  try:
+    battery_info = device_adb.old_interface.GetBatteryInfo()
+  except Exception as e:
+    battery_info = {}
+    logging.error('Unable to obtain battery info for %s, %s', serial, e)
+
+  def _GetData(re_expression, line, lambda_function=lambda x:x):
+    if not line:
+      return 'Unknown'
+    found = re.findall(re_expression, line)
+    if found and len(found):
+      return lambda_function(found[0])
+    return 'Unknown'
+
+  battery_level = int(battery_info.get('level', 100))
+  imei_slice = _GetData('Device ID = (\d+)',
+                        device_adb.old_interface.GetSubscriberInfo(),
+                        lambda x: x[-6:])
+  report = ['Device %s (%s)' % (serial, device_type),
+            '  Build: %s (%s)' %
+              (device_build, device_adb.GetProp('ro.build.fingerprint')),
+            '  Current Battery Service state: ',
+            '\n'.join(['    %s: %s' % (k, v)
+                       for k, v in battery_info.iteritems()]),
+            '  IMEI slice: %s' % imei_slice,
+            '  Wifi IP: %s' % device_adb.GetProp('dhcp.wlan0.ipaddress'),
+            '']
+
+  errors = []
+  dev_good = True
+  if battery_level < 15:
+    errors += ['Device critically low in battery. Turning off device.']
+    dev_good = False
+  if not options.no_provisioning_check:
+    setup_wizard_disabled = (
+        device_adb.GetProp('ro.setupwizard.mode') == 'DISABLED')
+    if not setup_wizard_disabled and device_build_type != 'user':
+      errors += ['Setup wizard not disabled. Was it provisioned correctly?']
+  if (device_product_name == 'mantaray' and
+      battery_info.get('AC powered', None) != 'true'):
+    errors += ['Mantaray device not connected to AC power.']
+
+  # Turn off devices with low battery.
+  if battery_level < 15:
+    try:
+      device_adb.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      # Attempt shutdown anyway.
+      # TODO(jbudorick) Handle this exception appropriately after interface
+      #                 conversions are finished.
+      logging.error(str(e))
+    device_adb.old_interface.Shutdown()
+  full_report = '\n'.join(report)
+  return device_type, device_build, battery_level, full_report, errors, dev_good
+
+
+def CheckForMissingDevices(options, adb_online_devs):
+  """Uses file of previous online devices to detect broken phones.
+
+  Args:
+    options: out_dir parameter of options argument is used as the base
+             directory to load and update the cache file.
+    adb_online_devs: A list of serial numbers of the currently visible
+                     and online attached devices.
+  """
+  # TODO(navabi): remove this once the bug that causes different number
+  # of devices to be detected between calls is fixed.
+  logger = logging.getLogger()
+  logger.setLevel(logging.INFO)
+
+  out_dir = os.path.abspath(options.out_dir)
+
+  # last_devices denotes all known devices prior to this run
+  last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME)
+  last_missing_devices_path = os.path.join(out_dir,
+      device_list.LAST_MISSING_DEVICES_FILENAME)
+  try:
+    last_devices = device_list.GetPersistentDeviceList(last_devices_path)
+  except IOError:
+    # Ignore error, file might not exist
+    last_devices = []
+
+  try:
+    last_missing_devices = device_list.GetPersistentDeviceList(
+        last_missing_devices_path)
+  except IOError:
+    last_missing_devices = []
+
+  missing_devs = list(set(last_devices) - set(adb_online_devs))
+  new_missing_devs = list(set(missing_devs) - set(last_missing_devices))
+
+  if new_missing_devs and os.environ.get('BUILDBOT_SLAVENAME'):
+    logging.info('new_missing_devs %s' % new_missing_devs)
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    bb_annotations.PrintSummaryText(devices_missing_msg)
+
+    from_address = 'chrome-bot@chromium.org'
+    to_addresses = ['chrome-labs-tech-ticket@google.com',
+                    'chrome-android-device-alert@google.com']
+    cc_addresses = ['chrome-android-device-alert@google.com']
+    subject = 'Devices offline on %s, %s, %s' % (
+      os.environ.get('BUILDBOT_SLAVENAME'),
+      os.environ.get('BUILDBOT_BUILDERNAME'),
+      os.environ.get('BUILDBOT_BUILDNUMBER'))
+    msg = ('Please reboot the following devices:\n%s' %
+           '\n'.join(map(str,new_missing_devs)))
+    SendEmail(from_address, to_addresses, cc_addresses, subject, msg)
+
+  all_known_devices = list(set(adb_online_devs) | set(last_devices))
+  device_list.WritePersistentDeviceList(last_devices_path, all_known_devices)
+  device_list.WritePersistentDeviceList(last_missing_devices_path, missing_devs)
+
+  if not all_known_devices:
+    # This can happen if for some reason the .last_devices file is not
+    # present or if it was empty.
+    return ['No online devices. Have any devices been plugged in?']
+  if missing_devs:
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    bb_annotations.PrintSummaryText(devices_missing_msg)
+
+    # TODO(navabi): Debug by printing both output from GetCmdOutput and
+    # GetAttachedDevices to compare results.
+    crbug_link = ('https://code.google.com/p/chromium/issues/entry?summary='
+                  '%s&comment=%s&labels=Restrict-View-Google,OS-Android,Infra' %
+                  (urllib.quote('Device Offline'),
+                   urllib.quote('Buildbot: %s %s\n'
+                                'Build: %s\n'
+                                '(please don\'t change any labels)' %
+                                (os.environ.get('BUILDBOT_BUILDERNAME'),
+                                 os.environ.get('BUILDBOT_SLAVENAME'),
+                                 os.environ.get('BUILDBOT_BUILDNUMBER')))))
+    return ['Current online devices: %s' % adb_online_devs,
+            '%s are no longer visible. Were they removed?\n' % missing_devs,
+            'SHERIFF:\n',
+            '@@@STEP_LINK@Click here to file a bug@%s@@@\n' % crbug_link,
+            'Cache file: %s\n\n' % last_devices_path,
+            'adb devices: %s' % GetCmdOutput(['adb', 'devices']),
+            'adb devices(GetAttachedDevices): %s' % adb_online_devs]
+  else:
+    new_devs = set(adb_online_devs) - set(last_devices)
+    if new_devs and os.path.exists(last_devices_path):
+      bb_annotations.PrintWarning()
+      bb_annotations.PrintSummaryText(
+          '%d new devices detected' % len(new_devs))
+      print ('New devices detected %s. And now back to your '
+             'regularly scheduled program.' % list(new_devs))
+
+
+def SendEmail(from_address, to_addresses, cc_addresses, subject, msg):
+  msg_body = '\r\n'.join(['From: %s' % from_address,
+                          'To: %s' % ', '.join(to_addresses),
+                          'CC: %s' % ', '.join(cc_addresses),
+                          'Subject: %s' % subject, '', msg])
+  try:
+    server = smtplib.SMTP('localhost')
+    server.sendmail(from_address, to_addresses, msg_body)
+    server.quit()
+  except Exception as e:
+    print 'Failed to send alert email. Error: %s' % e
+
+
+def RestartUsb():
+  if not os.path.isfile('/usr/bin/restart_usb'):
+    print ('ERROR: Could not restart usb. /usr/bin/restart_usb not installed '
+           'on host (see BUG=305769).')
+    return False
+
+  lsusb_proc = bb_utils.SpawnCmd(['lsusb'], stdout=subprocess.PIPE)
+  lsusb_output, _ = lsusb_proc.communicate()
+  if lsusb_proc.returncode:
+    print ('Error: Could not get list of USB ports (i.e. lsusb).')
+    return lsusb_proc.returncode
+
+  usb_devices = [re.findall('Bus (\d\d\d) Device (\d\d\d)', lsusb_line)[0]
+                 for lsusb_line in lsusb_output.strip().split('\n')]
+
+  all_restarted = True
+  # Walk USB devices from leaves up (i.e reverse sorted) restarting the
+  # connection. If a parent node (e.g. usb hub) is restarted before the
+  # devices connected to it, the (bus, dev) for the hub can change, making the
+  # output we have wrong. This way we restart the devices before the hub.
+  for (bus, dev) in reversed(sorted(usb_devices)):
+    # Can not restart root usb connections
+    if dev != '001':
+      return_code = bb_utils.RunCmd(['/usr/bin/restart_usb', bus, dev])
+      if return_code:
+        print 'Error restarting USB device /dev/bus/usb/%s/%s' % (bus, dev)
+        all_restarted = False
+      else:
+        print 'Restarted USB device /dev/bus/usb/%s/%s' % (bus, dev)
+
+  return all_restarted
+
+
+def KillAllAdb():
+  def GetAllAdb():
+    for p in psutil.process_iter():
+      try:
+        if 'adb' in p.name:
+          yield p
+      except (psutil.error.NoSuchProcess, psutil.error.AccessDenied):
+        pass
+
+  for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]:
+    for p in GetAllAdb():
+      try:
+        print 'kill %d %d (%s [%s])' % (sig, p.pid, p.name,
+            ' '.join(p.cmdline))
+        p.send_signal(sig)
+      except (psutil.error.NoSuchProcess, psutil.error.AccessDenied):
+        pass
+  for p in GetAllAdb():
+    try:
+      print 'Unable to kill %d (%s [%s])' % (p.pid, p.name, ' '.join(p.cmdline))
+    except (psutil.error.NoSuchProcess, psutil.error.AccessDenied):
+      pass
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('', '--out-dir',
+                    help='Directory where the device path is stored',
+                    default=os.path.join(constants.DIR_SOURCE_ROOT, 'out'))
+  parser.add_option('--no-provisioning-check', action='store_true',
+                    help='Will not check if devices are provisioned properly.')
+  parser.add_option('--device-status-dashboard', action='store_true',
+                    help='Output device status data for dashboard.')
+  parser.add_option('--restart-usb', action='store_true',
+                    help='Restart USB ports before running device check.')
+  parser.add_option('--json-output',
+                    help='Output JSON information into a specified file.')
+
+  options, args = parser.parse_args()
+  if args:
+    parser.error('Unknown options %s' % args)
+
+  # Remove the last build's "bad devices" before checking device statuses.
+  device_blacklist.ResetBlacklist()
+
+  try:
+    expected_devices = device_list.GetPersistentDeviceList(
+        os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME))
+  except IOError:
+    expected_devices = []
+  devices = android_commands.GetAttachedDevices()
+  # Only restart usb if devices are missing.
+  if set(expected_devices) != set(devices):
+    print 'expected_devices: %s, devices: %s' % (expected_devices, devices)
+    KillAllAdb()
+    retries = 5
+    usb_restarted = True
+    if options.restart_usb:
+      if not RestartUsb():
+        usb_restarted = False
+        bb_annotations.PrintWarning()
+        print 'USB reset stage failed, wait for any device to come back.'
+    while retries:
+      print 'retry adb devices...'
+      time.sleep(1)
+      devices = android_commands.GetAttachedDevices()
+      if set(expected_devices) == set(devices):
+        # All devices are online, keep going.
+        break
+      if not usb_restarted and devices:
+        # The USB wasn't restarted, but there's at least one device online.
+        # No point in trying to wait for all devices.
+        break
+      retries -= 1
+
+  # TODO(navabi): Test to make sure this fails and then fix call
+  offline_devices = android_commands.GetAttachedDevices(
+      hardware=False, emulator=False, offline=True)
+
+  types, builds, batteries, reports, errors = [], [], [], [], []
+  fail_step_lst = []
+  if devices:
+    types, builds, batteries, reports, errors, fail_step_lst = (
+        zip(*[DeviceInfo(dev, options) for dev in devices]))
+
+  err_msg = CheckForMissingDevices(options, devices) or []
+
+  unique_types = list(set(types))
+  unique_builds = list(set(builds))
+
+  bb_annotations.PrintMsg('Online devices: %d. Device types %s, builds %s'
+                           % (len(devices), unique_types, unique_builds))
+  print '\n'.join(reports)
+
+  for serial, dev_errors in zip(devices, errors):
+    if dev_errors:
+      err_msg += ['%s errors:' % serial]
+      err_msg += ['    %s' % error for error in dev_errors]
+
+  if err_msg:
+    bb_annotations.PrintWarning()
+    msg = '\n'.join(err_msg)
+    print msg
+    from_address = 'buildbot@chromium.org'
+    to_addresses = ['chromium-android-device-alerts@google.com']
+    bot_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    slave_name = os.environ.get('BUILDBOT_SLAVENAME')
+    subject = 'Device status check errors on %s, %s.' % (slave_name, bot_name)
+    SendEmail(from_address, to_addresses, [], subject, msg)
+
+  if options.device_status_dashboard:
+    perf_tests_results_helper.PrintPerfResult('BotDevices', 'OnlineDevices',
+                                              [len(devices)], 'devices')
+    perf_tests_results_helper.PrintPerfResult('BotDevices', 'OfflineDevices',
+                                              [len(offline_devices)], 'devices',
+                                              'unimportant')
+    for serial, battery in zip(devices, batteries):
+      perf_tests_results_helper.PrintPerfResult('DeviceBattery', serial,
+                                                [battery], '%',
+                                                'unimportant')
+
+  if options.json_output:
+    with open(options.json_output, 'wb') as f:
+      f.write(json.dumps({
+        'online_devices': devices,
+        'offline_devices': offline_devices,
+        'expected_devices': expected_devices,
+        'unique_types': unique_types,
+        'unique_builds': unique_builds,
+      }))
+
+  if False in fail_step_lst:
+    # TODO(navabi): Build fails on device status check step if there exists any
+    # devices with critically low battery. Remove those devices from testing,
+    # allowing build to continue with good devices.
+    return 2
+
+  if not devices:
+    return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/buildbot/bb_device_steps.py b/build/android/buildbot/bb_device_steps.py
new file mode 100755
index 0000000..c0f9fe7
--- /dev/null
+++ b/build/android/buildbot/bb_device_steps.py
@@ -0,0 +1,741 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import glob
+import hashlib
+import json
+import os
+import random
+import re
+import shutil
+import sys
+
+import bb_utils
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+import provision_devices
+from pylib import android_commands
+from pylib import constants
+from pylib.device import device_utils
+from pylib.gtest import gtest_config
+
+CHROME_SRC_DIR = bb_utils.CHROME_SRC
+DIR_BUILD_ROOT = os.path.dirname(CHROME_SRC_DIR)
+CHROME_OUT_DIR = bb_utils.CHROME_OUT_DIR
+BLINK_SCRIPTS_DIR = 'third_party/WebKit/Tools/Scripts'
+
+SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
+LOGCAT_DIR = os.path.join(bb_utils.CHROME_OUT_DIR, 'logcat')
+GS_URL = 'https://storage.googleapis.com'
+GS_AUTH_URL = 'https://storage.cloud.google.com'
+
+# Describes an instrumation test suite:
+#   test: Name of test we're running.
+#   apk: apk to be installed.
+#   apk_package: package for the apk to be installed.
+#   test_apk: apk to run tests on.
+#   test_data: data folder in format destination:source.
+#   host_driven_root: The host-driven test root directory.
+#   annotation: Annotation of the tests to include.
+#   exclude_annotation: The annotation of the tests to exclude.
+I_TEST = collections.namedtuple('InstrumentationTest', [
+    'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'host_driven_root',
+    'annotation', 'exclude_annotation', 'extra_flags'])
+
+
+def SrcPath(*path):
+  return os.path.join(CHROME_SRC_DIR, *path)
+
+
+def I(name, apk, apk_package, test_apk, test_data, host_driven_root=None,
+      annotation=None, exclude_annotation=None, extra_flags=None):
+  return I_TEST(name, apk, apk_package, test_apk, test_data, host_driven_root,
+                annotation, exclude_annotation, extra_flags)
+
+INSTRUMENTATION_TESTS = dict((suite.name, suite) for suite in [
+    I('ContentShell',
+      'ContentShell.apk',
+      'org.chromium.content_shell_apk',
+      'ContentShellTest',
+      'content:content/test/data/android/device_files'),
+    I('ChromeShell',
+      'ChromeShell.apk',
+      'org.chromium.chrome.shell',
+      'ChromeShellTest',
+      'chrome:chrome/test/data/android/device_files',
+      constants.CHROME_SHELL_HOST_DRIVEN_DIR),
+    I('AndroidWebView',
+      'AndroidWebView.apk',
+      'org.chromium.android_webview.shell',
+      'AndroidWebViewTest',
+      'webview:android_webview/test/data/device_files'),
+    ])
+
+VALID_TESTS = set(['chromedriver', 'chrome_proxy', 'gpu', 'mojo', 'sync',
+                   'telemetry_perf_unittests', 'ui', 'unit', 'webkit',
+                   'webkit_layout'])
+
+RunCmd = bb_utils.RunCmd
+
+
+def _GetRevision(options):
+  """Get the SVN revision number.
+
+  Args:
+    options: options object.
+
+  Returns:
+    The revision number.
+  """
+  revision = options.build_properties.get('got_revision')
+  if not revision:
+    revision = options.build_properties.get('revision', 'testing')
+  return revision
+
+
+def _RunTest(options, cmd, suite):
+  """Run test command with runtest.py.
+
+  Args:
+    options: options object.
+    cmd: the command to run.
+    suite: test name.
+  """
+  property_args = bb_utils.EncodeProperties(options)
+  args = [os.path.join(SLAVE_SCRIPTS_DIR, 'runtest.py')] + property_args
+  args += ['--test-platform', 'android']
+  if options.factory_properties.get('generate_gtest_json'):
+    args.append('--generate-json-file')
+    args += ['-o', 'gtest-results/%s' % suite,
+             '--annotate', 'gtest',
+             '--build-number', str(options.build_properties.get('buildnumber',
+                                                                '')),
+             '--builder-name', options.build_properties.get('buildername', '')]
+  if options.target == 'Release':
+    args += ['--target', 'Release']
+  else:
+    args += ['--target', 'Debug']
+  args += cmd
+  RunCmd(args, cwd=DIR_BUILD_ROOT)
+
+
+def RunTestSuites(options, suites, suites_options=None):
+  """Manages an invocation of test_runner.py for gtests.
+
+  Args:
+    options: options object.
+    suites: List of suite names to run.
+    suites_options: Command line options dictionary for particular suites.
+                    For example,
+                    {'content_browsertests', ['--num_retries=1', '--release']}
+                    will add the options only to content_browsertests.
+  """
+
+  if not suites_options:
+    suites_options = {}
+
+  args = ['--verbose']
+  if options.target == 'Release':
+    args.append('--release')
+  if options.asan:
+    args.append('--tool=asan')
+  if options.gtest_filter:
+    args.append('--gtest-filter=%s' % options.gtest_filter)
+
+  for suite in suites:
+    bb_annotations.PrintNamedStep(suite)
+    cmd = [suite] + args
+    cmd += suites_options.get(suite, [])
+    if suite == 'content_browsertests':
+      cmd.append('--num_retries=1')
+    _RunTest(options, cmd, suite)
+
+
+def RunChromeDriverTests(options):
+  """Run all the steps for running chromedriver tests."""
+  bb_annotations.PrintNamedStep('chromedriver_annotation')
+  RunCmd(['chrome/test/chromedriver/run_buildbot_steps.py',
+          '--android-packages=%s,%s,%s,%s' %
+          ('chrome_shell',
+           'chrome_stable',
+           'chrome_beta',
+           'chromedriver_webview_shell'),
+          '--revision=%s' % _GetRevision(options),
+          '--update-log'])
+
+def RunChromeProxyTests(options):
+  """Run the chrome_proxy tests.
+
+  Args:
+    options: options object.
+  """
+  InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
+  args = ['--browser', 'android-chrome-shell']
+  devices = android_commands.GetAttachedDevices()
+  if devices:
+    args = args + ['--device', devices[0]]
+  bb_annotations.PrintNamedStep('chrome_proxy')
+  RunCmd(['tools/chrome_proxy/run_tests'] + args)
+
+def RunChromeSyncShellTests(options):
+  """Run the chrome sync shell tests"""
+  test = I('ChromeSyncShell',
+           'ChromeSyncShell.apk',
+           'org.chromium.chrome.browser.sync',
+           'ChromeSyncShellTest.apk',
+           'chrome:chrome/test/data/android/device_files')
+  RunInstrumentationSuite(options, test)
+
+def RunTelemetryPerfUnitTests(options):
+  """Runs the telemetry perf unit tests.
+
+  Args:
+    options: options object.
+  """
+  InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
+  args = ['--browser', 'android-chrome-shell']
+  devices = android_commands.GetAttachedDevices()
+  if devices:
+    args = args + ['--device', devices[0]]
+  bb_annotations.PrintNamedStep('telemetry_perf_unittests')
+  RunCmd(['tools/perf/run_tests'] + args)
+
+
+def RunMojoTests(options):
+  """Runs the mojo unit tests.
+
+  Args:
+    options: options object.
+  """
+  test = I('MojoTest',
+           None,
+           'org.chromium.mojo.tests',
+           'MojoTest',
+           'bindings:mojo/public/interfaces/bindings/tests/data')
+  RunInstrumentationSuite(options, test)
+
+
+def InstallApk(options, test, print_step=False):
+  """Install an apk to all phones.
+
+  Args:
+    options: options object
+    test: An I_TEST namedtuple
+    print_step: Print a buildbot step
+  """
+  if print_step:
+    bb_annotations.PrintNamedStep('install_%s' % test.name.lower())
+
+  args = ['--apk_package', test.apk_package]
+  if options.target == 'Release':
+    args.append('--release')
+  args.append(test.apk)
+
+  RunCmd(['build/android/adb_install_apk.py'] + args, halt_on_failure=True)
+
+
+def RunInstrumentationSuite(options, test, flunk_on_failure=True,
+                            python_only=False, official_build=False):
+  """Manages an invocation of test_runner.py for instrumentation tests.
+
+  Args:
+    options: options object
+    test: An I_TEST namedtuple
+    flunk_on_failure: Flunk the step if tests fail.
+    Python: Run only host driven Python tests.
+    official_build: Run official-build tests.
+  """
+  bb_annotations.PrintNamedStep('%s_instrumentation_tests' % test.name.lower())
+
+  if test.apk:
+    InstallApk(options, test)
+  args = ['--test-apk', test.test_apk, '--verbose']
+  if test.test_data:
+    args.extend(['--test_data', test.test_data])
+  if options.target == 'Release':
+    args.append('--release')
+  if options.asan:
+    args.append('--tool=asan')
+  if options.flakiness_server:
+    args.append('--flakiness-dashboard-server=%s' %
+                options.flakiness_server)
+  if options.coverage_bucket:
+    args.append('--coverage-dir=%s' % options.coverage_dir)
+  if test.host_driven_root:
+    args.append('--host-driven-root=%s' % test.host_driven_root)
+  if test.annotation:
+    args.extend(['-A', test.annotation])
+  if test.exclude_annotation:
+    args.extend(['-E', test.exclude_annotation])
+  if test.extra_flags:
+    args.extend(test.extra_flags)
+  if python_only:
+    args.append('-p')
+  if official_build:
+    # The option needs to be assigned 'True' as it does not have an action
+    # associated with it.
+    args.append('--official-build')
+
+  RunCmd(['build/android/test_runner.py', 'instrumentation'] + args,
+         flunk_on_failure=flunk_on_failure)
+
+
+def RunWebkitLint():
+  """Lint WebKit's TestExpectation files."""
+  bb_annotations.PrintNamedStep('webkit_lint')
+  RunCmd([SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'lint-test-expectations'))])
+
+
+def RunWebkitLayoutTests(options):
+  """Run layout tests on an actual device."""
+  bb_annotations.PrintNamedStep('webkit_tests')
+  cmd_args = [
+      '--no-show-results',
+      '--no-new-test-results',
+      '--full-results-html',
+      '--clobber-old-results',
+      '--exit-after-n-failures', '5000',
+      '--exit-after-n-crashes-or-timeouts', '100',
+      '--debug-rwt-logging',
+      '--results-directory', '../layout-test-results',
+      '--target', options.target,
+      '--builder-name', options.build_properties.get('buildername', ''),
+      '--build-number', str(options.build_properties.get('buildnumber', '')),
+      '--master-name', 'ChromiumWebkit',  # TODO: Get this from the cfg.
+      '--build-name', options.build_properties.get('buildername', ''),
+      '--platform=android']
+
+  for flag in 'test_results_server', 'driver_name', 'additional_drt_flag':
+    if flag in options.factory_properties:
+      cmd_args.extend(['--%s' % flag.replace('_', '-'),
+                       options.factory_properties.get(flag)])
+
+  for f in options.factory_properties.get('additional_expectations', []):
+    cmd_args.extend(
+        ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
+
+  # TODO(dpranke): Remove this block after
+  # https://codereview.chromium.org/12927002/ lands.
+  for f in options.factory_properties.get('additional_expectations_files', []):
+    cmd_args.extend(
+        ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
+
+  exit_code = RunCmd(
+      [SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'run-webkit-tests'))] + cmd_args)
+  if exit_code == 255: # test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (crashed or hung)')
+  elif exit_code == 254: # test_run_results.NO_DEVICES_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (no devices found)')
+  elif exit_code == 253: # test_run_results.NO_TESTS_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (no tests found)')
+  else:
+    full_results_path = os.path.join('..', 'layout-test-results',
+                                     'full_results.json')
+    if os.path.exists(full_results_path):
+      full_results = json.load(open(full_results_path))
+      unexpected_passes, unexpected_failures, unexpected_flakes = (
+          _ParseLayoutTestResults(full_results))
+      if unexpected_failures:
+        _PrintDashboardLink('failed', unexpected_failures,
+                            max_tests=25)
+      elif unexpected_passes:
+        _PrintDashboardLink('unexpected passes', unexpected_passes,
+                            max_tests=10)
+      if unexpected_flakes:
+        _PrintDashboardLink('unexpected flakes', unexpected_flakes,
+                            max_tests=10)
+
+      if exit_code == 0 and (unexpected_passes or unexpected_flakes):
+        # If exit_code != 0, RunCmd() will have already printed an error.
+        bb_annotations.PrintWarning()
+    else:
+      bb_annotations.PrintError()
+      bb_annotations.PrintMsg('?? (results missing)')
+
+  if options.factory_properties.get('archive_webkit_results', False):
+    bb_annotations.PrintNamedStep('archive_webkit_results')
+    base = 'https://storage.googleapis.com/chromium-layout-test-archives'
+    builder_name = options.build_properties.get('buildername', '')
+    build_number = str(options.build_properties.get('buildnumber', ''))
+    results_link = '%s/%s/%s/layout-test-results/results.html' % (
+        base, EscapeBuilderName(builder_name), build_number)
+    bb_annotations.PrintLink('results', results_link)
+    bb_annotations.PrintLink('(zip)', '%s/%s/%s/layout-test-results.zip' % (
+        base, EscapeBuilderName(builder_name), build_number))
+    gs_bucket = 'gs://chromium-layout-test-archives'
+    RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'chromium',
+                         'archive_layout_test_results.py'),
+            '--results-dir', '../../layout-test-results',
+            '--build-number', build_number,
+            '--builder-name', builder_name,
+            '--gs-bucket', gs_bucket],
+            cwd=DIR_BUILD_ROOT)
+
+
+def _ParseLayoutTestResults(results):
+  """Extract the failures from the test run."""
+  # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
+  tests = _ConvertTrieToFlatPaths(results['tests'])
+  failures = {}
+  flakes = {}
+  passes = {}
+  for (test, result) in tests.iteritems():
+    if result.get('is_unexpected'):
+      actual_results = result['actual'].split()
+      expected_results = result['expected'].split()
+      if len(actual_results) > 1:
+        # We report the first failure type back, even if the second
+        # was more severe.
+        if actual_results[1] in expected_results:
+          flakes[test] = actual_results[0]
+        else:
+          failures[test] = actual_results[0]
+      elif actual_results[0] == 'PASS':
+        passes[test] = result
+      else:
+        failures[test] = actual_results[0]
+
+  return (passes, failures, flakes)
+
+
+def _ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flatten the trie of failures into a list."""
+  # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and 'actual' not in data and 'expected' not in data:
+      result.update(_ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def _PrintDashboardLink(link_text, tests, max_tests):
+  """Add a link to the flakiness dashboard in the step annotations."""
+  if len(tests) > max_tests:
+    test_list_text = ' '.join(tests[:max_tests]) + ' and more'
+  else:
+    test_list_text = ' '.join(tests)
+
+  dashboard_base = ('http://test-results.appspot.com'
+                    '/dashboards/flakiness_dashboard.html#'
+                    'master=ChromiumWebkit&tests=')
+
+  bb_annotations.PrintLink('%d %s: %s' %
+                           (len(tests), link_text, test_list_text),
+                           dashboard_base + ','.join(tests))
+
+
+def EscapeBuilderName(builder_name):
+  return re.sub('[ ()]', '_', builder_name)
+
+
+def SpawnLogcatMonitor():
+  shutil.rmtree(LOGCAT_DIR, ignore_errors=True)
+  bb_utils.SpawnCmd([
+      os.path.join(CHROME_SRC_DIR, 'build', 'android', 'adb_logcat_monitor.py'),
+      LOGCAT_DIR])
+
+  # Wait for logcat_monitor to pull existing logcat
+  RunCmd(['sleep', '5'])
+
+
+def ProvisionDevices(options):
+  bb_annotations.PrintNamedStep('provision_devices')
+
+  if not bb_utils.TESTING:
+    # Restart adb to work around bugs, sleep to wait for usb discovery.
+    device_utils.RestartServer()
+    RunCmd(['sleep', '1'])
+  provision_cmd = ['build/android/provision_devices.py', '-t', options.target]
+  if options.auto_reconnect:
+    provision_cmd.append('--auto-reconnect')
+  if options.skip_wipe:
+    provision_cmd.append('--skip-wipe')
+  RunCmd(provision_cmd, halt_on_failure=True)
+
+
+def DeviceStatusCheck(options):
+  bb_annotations.PrintNamedStep('device_status_check')
+  cmd = ['build/android/buildbot/bb_device_status_check.py']
+  if options.restart_usb:
+    cmd.append('--restart-usb')
+  RunCmd(cmd, halt_on_failure=True)
+
+
+def GetDeviceSetupStepCmds():
+  return [
+      ('device_status_check', DeviceStatusCheck),
+      ('provision_devices', ProvisionDevices),
+  ]
+
+
+def RunUnitTests(options):
+  suites = gtest_config.STABLE_TEST_SUITES
+  if options.asan:
+    suites = [s for s in suites
+              if s not in gtest_config.ASAN_EXCLUDED_TEST_SUITES]
+  RunTestSuites(options, suites)
+
+
+def RunInstrumentationTests(options):
+  for test in INSTRUMENTATION_TESTS.itervalues():
+    RunInstrumentationSuite(options, test)
+
+
+def RunWebkitTests(options):
+  RunTestSuites(options, ['webkit_unit_tests', 'blink_heap_unittests'])
+  RunWebkitLint()
+
+
+def RunGPUTests(options):
+  revision = _GetRevision(options)
+  builder_name = options.build_properties.get('buildername', 'noname')
+
+  bb_annotations.PrintNamedStep('pixel_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          'pixel',
+          '--browser',
+          'android-content-shell',
+          '--build-revision',
+          str(revision),
+          '--upload-refimg-to-cloud-storage',
+          '--refimg-cloud-storage-bucket',
+          'chromium-gpu-archive/reference-images',
+          '--os-type',
+          'android',
+          '--test-machine-name',
+          EscapeBuilderName(builder_name)])
+
+  bb_annotations.PrintNamedStep('webgl_conformance_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          '--browser=android-content-shell', 'webgl_conformance',
+          '--webgl-conformance-version=1.0.1'])
+
+  bb_annotations.PrintNamedStep('gpu_rasterization_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          'gpu_rasterization',
+          '--browser',
+          'android-content-shell',
+          '--build-revision',
+          str(revision),
+          '--test-machine-name',
+          EscapeBuilderName(builder_name)])
+
+
+def GetTestStepCmds():
+  return [
+      ('chromedriver', RunChromeDriverTests),
+      ('chrome_proxy', RunChromeProxyTests),
+      ('gpu', RunGPUTests),
+      ('mojo', RunMojoTests),
+      ('sync', RunChromeSyncShellTests),
+      ('telemetry_perf_unittests', RunTelemetryPerfUnitTests),
+      ('ui', RunInstrumentationTests),
+      ('unit', RunUnitTests),
+      ('webkit', RunWebkitTests),
+      ('webkit_layout', RunWebkitLayoutTests),
+  ]
+
+
+def MakeGSPath(options, gs_base_dir):
+  revision = _GetRevision(options)
+  bot_id = options.build_properties.get('buildername', 'testing')
+  randhash = hashlib.sha1(str(random.random())).hexdigest()
+  gs_path = '%s/%s/%s/%s' % (gs_base_dir, bot_id, revision, randhash)
+  # remove double slashes, happens with blank revisions and confuses gsutil
+  gs_path = re.sub('/+', '/', gs_path)
+  return gs_path
+
+def UploadHTML(options, gs_base_dir, dir_to_upload, link_text,
+               link_rel_path='index.html', gs_url=GS_URL):
+  """Uploads directory at |dir_to_upload| to Google Storage and output a link.
+
+  Args:
+    options: Command line options.
+    gs_base_dir: The Google Storage base directory (e.g.
+      'chromium-code-coverage/java')
+    dir_to_upload: Absolute path to the directory to be uploaded.
+    link_text: Link text to be displayed on the step.
+    link_rel_path: Link path relative to |dir_to_upload|.
+    gs_url: Google storage URL.
+  """
+  gs_path = MakeGSPath(options, gs_base_dir)
+  RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-R', dir_to_upload, 'gs://%s' % gs_path])
+  bb_annotations.PrintLink(link_text,
+                           '%s/%s/%s' % (gs_url, gs_path, link_rel_path))
+
+
+def GenerateJavaCoverageReport(options):
+  """Generates an HTML coverage report using EMMA and uploads it."""
+  bb_annotations.PrintNamedStep('java_coverage_report')
+
+  coverage_html = os.path.join(options.coverage_dir, 'coverage_html')
+  RunCmd(['build/android/generate_emma_html.py',
+          '--coverage-dir', options.coverage_dir,
+          '--metadata-dir', os.path.join(CHROME_OUT_DIR, options.target),
+          '--cleanup',
+          '--output', os.path.join(coverage_html, 'index.html')])
+  return coverage_html
+
+
+def LogcatDump(options):
+  # Print logcat, kill logcat monitor
+  bb_annotations.PrintNamedStep('logcat_dump')
+  logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
+  RunCmd([SrcPath('build' , 'android', 'adb_logcat_printer.py'),
+          '--output-path', logcat_file, LOGCAT_DIR])
+  gs_path = MakeGSPath(options, 'chromium-android/logcat_dumps')
+  RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-z', 'txt', logcat_file,
+          'gs://%s' % gs_path])
+  bb_annotations.PrintLink('logcat dump', '%s/%s' % (GS_AUTH_URL, gs_path))
+
+
+def RunStackToolSteps(options):
+  """Run stack tool steps.
+
+  Stack tool is run for logcat dump, optionally for ASAN.
+  """
+  bb_annotations.PrintNamedStep('Run stack tool with logcat dump')
+  logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
+  RunCmd([os.path.join(CHROME_SRC_DIR, 'third_party', 'android_platform',
+          'development', 'scripts', 'stack'),
+          '--more-info', logcat_file])
+  if options.asan_symbolize:
+    bb_annotations.PrintNamedStep('Run stack tool for ASAN')
+    RunCmd([
+        os.path.join(CHROME_SRC_DIR, 'build', 'android', 'asan_symbolize.py'),
+        '-l', logcat_file])
+
+
+def GenerateTestReport(options):
+  bb_annotations.PrintNamedStep('test_report')
+  for report in glob.glob(
+      os.path.join(CHROME_OUT_DIR, options.target, 'test_logs', '*.log')):
+    RunCmd(['cat', report])
+    os.remove(report)
+
+
+def MainTestWrapper(options):
+  try:
+    # Spawn logcat monitor
+    SpawnLogcatMonitor()
+
+    # Run all device setup steps
+    for _, cmd in GetDeviceSetupStepCmds():
+      cmd(options)
+
+    if options.install:
+      test_obj = INSTRUMENTATION_TESTS[options.install]
+      InstallApk(options, test_obj, print_step=True)
+
+    if options.test_filter:
+      bb_utils.RunSteps(options.test_filter, GetTestStepCmds(), options)
+
+    if options.coverage_bucket:
+      coverage_html = GenerateJavaCoverageReport(options)
+      UploadHTML(options, '%s/java' % options.coverage_bucket, coverage_html,
+                 'Coverage Report')
+      shutil.rmtree(coverage_html, ignore_errors=True)
+
+    if options.experimental:
+      RunTestSuites(options, gtest_config.EXPERIMENTAL_TEST_SUITES)
+
+  finally:
+    # Run all post test steps
+    LogcatDump(options)
+    if not options.disable_stack_tool:
+      RunStackToolSteps(options)
+    GenerateTestReport(options)
+    # KillHostHeartbeat() has logic to check if heartbeat process is running,
+    # and kills only if it finds the process is running on the host.
+    provision_devices.KillHostHeartbeat()
+    if options.cleanup:
+      shutil.rmtree(os.path.join(CHROME_OUT_DIR, options.target),
+          ignore_errors=True)
+
+
+def GetDeviceStepsOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--experimental', action='store_true',
+                    help='Run experiemental tests')
+  parser.add_option('-f', '--test-filter', metavar='<filter>', default=[],
+                    action='append',
+                    help=('Run a test suite. Test suites: "%s"' %
+                          '", "'.join(VALID_TESTS)))
+  parser.add_option('--gtest-filter',
+                    help='Filter for running a subset of tests of a gtest test')
+  parser.add_option('--asan', action='store_true', help='Run tests with asan.')
+  parser.add_option('--install', metavar='<apk name>',
+                    help='Install an apk by name')
+  parser.add_option('--no-reboot', action='store_true',
+                    help='Do not reboot devices during provisioning.')
+  parser.add_option('--coverage-bucket',
+                    help=('Bucket name to store coverage results. Coverage is '
+                          'only run if this is set.'))
+  parser.add_option('--restart-usb', action='store_true',
+                    help='Restart usb ports before device status check.')
+  parser.add_option(
+      '--flakiness-server',
+      help=('The flakiness dashboard server to which the results should be '
+            'uploaded.'))
+  parser.add_option(
+      '--auto-reconnect', action='store_true',
+      help='Push script to device which restarts adbd on disconnections.')
+  parser.add_option('--skip-wipe', action='store_true',
+                    help='Do not wipe devices during provisioning.')
+  parser.add_option(
+      '--logcat-dump-output',
+      help='The logcat dump output will be "tee"-ed into this file')
+  # During processing perf bisects, a seperate working directory created under
+  # which builds are produced. Therefore we should look for relevent output
+  # file under this directory.(/b/build/slave/<slave_name>/build/bisect/src/out)
+  parser.add_option(
+      '--chrome-output-dir',
+      help='Chrome output directory to be used while bisecting.')
+
+  parser.add_option('--disable-stack-tool',  action='store_true',
+      help='Do not run stack tool.')
+  parser.add_option('--asan-symbolize',  action='store_true',
+      help='Run stack tool for ASAN')
+  parser.add_option('--cleanup', action='store_true',
+      help='Delete out/<target> directory at the end of the run.')
+  return parser
+
+
+def main(argv):
+  parser = GetDeviceStepsOptParser()
+  options, args = parser.parse_args(argv[1:])
+
+  if args:
+    return sys.exit('Unused args %s' % args)
+
+  unknown_tests = set(options.test_filter) - VALID_TESTS
+  if unknown_tests:
+    return sys.exit('Unknown tests %s' % list(unknown_tests))
+
+  setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+
+  if options.chrome_output_dir:
+    global CHROME_OUT_DIR
+    global LOGCAT_DIR
+    CHROME_OUT_DIR = options.chrome_output_dir
+    LOGCAT_DIR = os.path.join(CHROME_OUT_DIR, 'logcat')
+
+  if options.coverage_bucket:
+    setattr(options, 'coverage_dir',
+            os.path.join(CHROME_OUT_DIR, options.target, 'coverage'))
+
+  MainTestWrapper(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_host_steps.py b/build/android/buildbot/bb_host_steps.py
new file mode 100755
index 0000000..4041ccd
--- /dev/null
+++ b/build/android/buildbot/bb_host_steps.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import bb_utils
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
+VALID_HOST_TESTS = set(['check_webview_licenses', 'findbugs'])
+
+DIR_BUILD_ROOT = os.path.dirname(constants.DIR_SOURCE_ROOT)
+
+# Short hand for RunCmd which is used extensively in this file.
+RunCmd = bb_utils.RunCmd
+
+
+def SrcPath(*path):
+  return os.path.join(constants.DIR_SOURCE_ROOT, *path)
+
+
+def CheckWebViewLicenses(_):
+  bb_annotations.PrintNamedStep('check_licenses')
+  RunCmd([SrcPath('android_webview', 'tools', 'webview_licenses.py'), 'scan'],
+         warning_code=1)
+
+
+def RunHooks(build_type):
+  RunCmd([SrcPath('build', 'landmines.py')])
+  build_path = SrcPath('out', build_type)
+  landmine_path = os.path.join(build_path, '.landmines_triggered')
+  clobber_env = os.environ.get('BUILDBOT_CLOBBER')
+  if clobber_env or os.path.isfile(landmine_path):
+    bb_annotations.PrintNamedStep('Clobber')
+    if not clobber_env:
+      print 'Clobbering due to triggered landmines:'
+      with open(landmine_path) as f:
+        print f.read()
+    RunCmd(['rm', '-rf', build_path])
+
+  bb_annotations.PrintNamedStep('runhooks')
+  RunCmd(['gclient', 'runhooks'], halt_on_failure=True)
+
+
+def Compile(options):
+  RunHooks(options.target)
+  cmd = [os.path.join(SLAVE_SCRIPTS_DIR, 'compile.py'),
+         '--build-tool=ninja',
+         '--compiler=goma',
+         '--target=%s' % options.target,
+         '--goma-dir=%s' % bb_utils.GOMA_DIR]
+  bb_annotations.PrintNamedStep('compile')
+  if options.build_targets:
+    build_targets = options.build_targets.split(',')
+    cmd += ['--build-args', ' '.join(build_targets)]
+  RunCmd(cmd, halt_on_failure=True, cwd=DIR_BUILD_ROOT)
+
+
+def ZipBuild(options):
+  bb_annotations.PrintNamedStep('zip_build')
+  RunCmd([
+      os.path.join(SLAVE_SCRIPTS_DIR, 'zip_build.py'),
+      '--src-dir', constants.DIR_SOURCE_ROOT,
+      '--exclude-files', 'lib.target,gen,android_webview,jingle_unittests']
+      + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
+
+
+def ExtractBuild(options):
+  bb_annotations.PrintNamedStep('extract_build')
+  RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'extract_build.py')]
+         + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
+
+
+def FindBugs(options):
+  bb_annotations.PrintNamedStep('findbugs')
+  build_type = []
+  if options.target == 'Release':
+    build_type = ['--release-build']
+  RunCmd([SrcPath('build', 'android', 'findbugs_diff.py')] + build_type)
+  RunCmd([SrcPath(
+      'tools', 'android', 'findbugs_plugin', 'test',
+      'run_findbugs_plugin_tests.py')] + build_type)
+
+
+def BisectPerfRegression(options):
+  args = []
+  if options.extra_src:
+    args = ['--extra_src', options.extra_src]
+  RunCmd([SrcPath('tools', 'prepare-bisect-perf-regression.py'),
+          '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)])
+  RunCmd([SrcPath('tools', 'run-bisect-perf-regression.py'),
+          '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)] + args)
+
+
+def GetHostStepCmds():
+  return [
+      ('compile', Compile),
+      ('extract_build', ExtractBuild),
+      ('check_webview_licenses', CheckWebViewLicenses),
+      ('bisect_perf_regression', BisectPerfRegression),
+      ('findbugs', FindBugs),
+      ('zip_build', ZipBuild)
+  ]
+
+
+def GetHostStepsOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--steps', help='Comma separated list of host tests.')
+  parser.add_option('--build-targets', default='',
+                    help='Comma separated list of build targets.')
+  parser.add_option('--experimental', action='store_true',
+                    help='Indicate whether to compile experimental targets.')
+  parser.add_option('--extra_src', default='',
+                    help='Path to extra source file. If this is supplied, '
+                    'bisect script will use it to override default behavior.')
+
+  return parser
+
+
+def main(argv):
+  parser = GetHostStepsOptParser()
+  options, args = parser.parse_args(argv[1:])
+  if args:
+    return sys.exit('Unused args %s' % args)
+
+  setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+  setattr(options, 'extra_src',
+          options.factory_properties.get('extra_src', ''))
+
+  if options.steps:
+    bb_utils.RunSteps(options.steps.split(','), GetHostStepCmds(), options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_run_bot.py b/build/android/buildbot/bb_run_bot.py
new file mode 100755
index 0000000..586287b
--- /dev/null
+++ b/build/android/buildbot/bb_run_bot.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import copy
+import json
+import os
+import pipes
+import re
+import subprocess
+import sys
+
+import bb_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage'
+
+_BotConfig = collections.namedtuple(
+    'BotConfig', ['bot_id', 'host_obj', 'test_obj'])
+
+HostConfig = collections.namedtuple(
+    'HostConfig',
+    ['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch'])
+
+TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args'])
+
+
+def BotConfig(bot_id, host_object, test_object=None):
+  return _BotConfig(bot_id, host_object, test_object)
+
+
+def DictDiff(d1, d2):
+  diff = []
+  for key in sorted(set(d1.keys() + d2.keys())):
+    if key in d1 and d1[key] != d2.get(key):
+      diff.append('- %s=%s' % (key, pipes.quote(d1[key])))
+    if key in d2 and d2[key] != d1.get(key):
+      diff.append('+ %s=%s' % (key, pipes.quote(d2[key])))
+  return '\n'.join(diff)
+
+
+def GetEnvironment(host_obj, testing, extra_env_vars=None):
+  init_env = dict(os.environ)
+  init_env['GYP_GENERATORS'] = 'ninja'
+  if extra_env_vars:
+    init_env.update(extra_env_vars)
+  envsetup_cmd = '. build/android/envsetup.sh'
+  if testing:
+    # Skip envsetup to avoid presubmit dependence on android deps.
+    print 'Testing mode - skipping "%s"' % envsetup_cmd
+    envsetup_cmd = ':'
+  else:
+    print 'Running %s' % envsetup_cmd
+  proc = subprocess.Popen(['bash', '-exc',
+    envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'],
+    stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+    cwd=bb_utils.CHROME_SRC, env=init_env)
+  json_env, envsetup_output = proc.communicate()
+  if proc.returncode != 0:
+    print >> sys.stderr, 'FATAL Failure in envsetup.'
+    print >> sys.stderr, envsetup_output
+    sys.exit(1)
+  env = json.loads(json_env)
+  env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \
+      ' OS=android fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR
+  if host_obj.target_arch:
+    env['GYP_DEFINES'] += ' target_arch=%s' % host_obj.target_arch
+  extra_gyp = host_obj.extra_gyp_defines
+  if extra_gyp:
+    env['GYP_DEFINES'] += ' %s' % extra_gyp
+    if re.search('(asan|clang)=1', extra_gyp):
+      env.pop('CXX_target', None)
+
+  # Bots checkout chrome in /b/build/slave/<name>/build/src
+  build_internal_android = os.path.abspath(os.path.join(
+      bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal',
+      'scripts', 'slave', 'android'))
+  if os.path.exists(build_internal_android):
+    env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']])
+  return env
+
+
+def GetCommands(options, bot_config):
+  """Get a formatted list of commands.
+
+  Args:
+    options: Options object.
+    bot_config: A BotConfig named tuple.
+    host_step_script: Host step script.
+    device_step_script: Device step script.
+  Returns:
+    list of Command objects.
+  """
+  property_args = bb_utils.EncodeProperties(options)
+  commands = [[bot_config.host_obj.script,
+               '--steps=%s' % ','.join(bot_config.host_obj.host_steps)] +
+              property_args + (bot_config.host_obj.extra_args or [])]
+
+  test_obj = bot_config.test_obj
+  if test_obj:
+    run_test_cmd = [test_obj.script] + property_args
+    for test in test_obj.tests:
+      run_test_cmd.extend(['-f', test])
+    if test_obj.extra_args:
+      run_test_cmd.extend(test_obj.extra_args)
+    commands.append(run_test_cmd)
+  return commands
+
+
+def GetBotStepMap():
+  compile_step = ['compile']
+  chrome_proxy_tests = ['chrome_proxy']
+  chrome_sync_shell_tests = ['sync']
+  std_host_tests = ['check_webview_licenses', 'findbugs']
+  emma_coverage_tests = [x for x in std_host_tests if x is not 'findbugs']
+  std_build_steps = ['compile', 'zip_build']
+  std_test_steps = ['extract_build']
+  std_tests = ['ui', 'unit', 'mojo']
+  telemetry_tests = ['telemetry_perf_unittests']
+  flakiness_server = (
+      '--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
+  experimental = ['--experimental']
+  bisect_chrome_output_dir = os.path.abspath(
+      os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+                   os.pardir, 'bisect', 'src', 'out'))
+  B = BotConfig
+  H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None :
+       HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
+                  extra_gyp, target_arch))
+  T = (lambda tests, extra_args=None :
+       TestConfig('build/android/buildbot/bb_device_steps.py', tests,
+                  extra_args))
+
+  bot_configs = [
+      # Main builders
+      B('main-builder-dbg', H(std_build_steps + std_host_tests)),
+      B('main-builder-rel', H(std_build_steps)),
+      B('main-clang-builder',
+        H(compile_step, extra_gyp='clang=1 component=shared_library')),
+      B('main-clobber', H(compile_step)),
+      B('main-tests-rel', H(std_test_steps),
+        T(std_tests + telemetry_tests + chrome_proxy_tests,
+          ['--cleanup', flakiness_server])),
+      B('main-tests', H(std_test_steps),
+        T(std_tests,['--cleanup', flakiness_server])),
+
+      # Other waterfalls
+      B('asan-builder-tests', H(compile_step,
+                                extra_gyp='asan=1 component=shared_library'),
+        T(std_tests, ['--asan', '--asan-symbolize'])),
+      B('blink-try-builder', H(compile_step)),
+      B('chromedriver-fyi-tests-dbg', H(std_test_steps),
+        T(['chromedriver'], ['--install=ChromeShell', '--skip-wipe',
+          '--cleanup'])),
+      B('fyi-x86-builder-dbg',
+        H(compile_step + std_host_tests, experimental, target_arch='ia32')),
+      B('fyi-builder-dbg',
+        H(std_build_steps + emma_coverage_tests, experimental,
+          extra_gyp='emma_coverage=1')),
+      B('x86-builder-dbg',
+        H(compile_step + std_host_tests, target_arch='ia32')),
+      B('fyi-builder-rel', H(std_build_steps,  experimental)),
+      B('fyi-tests', H(std_test_steps),
+        T(std_tests + chrome_sync_shell_tests,
+                      ['--experimental', flakiness_server,
+                      '--coverage-bucket', CHROMIUM_COVERAGE_BUCKET,
+                      '--cleanup'])),
+      B('fyi-component-builder-tests-dbg',
+        H(compile_step, extra_gyp='component=shared_library'),
+        T(std_tests, ['--experimental', flakiness_server])),
+      B('gpu-builder-tests-dbg',
+        H(compile_step),
+        T(['gpu'], ['--install=ContentShell'])),
+      # Pass empty T([]) so that logcat monitor and device status check are run.
+      B('perf-bisect-builder-tests-dbg',
+        H(['bisect_perf_regression']),
+        T([], ['--chrome-output-dir', bisect_chrome_output_dir])),
+      B('perf-tests-rel', H(std_test_steps),
+        T([], ['--install=ChromeShell', '--cleanup'])),
+      B('webkit-latest-webkit-tests', H(std_test_steps),
+        T(['webkit_layout', 'webkit'], ['--cleanup', '--auto-reconnect'])),
+      B('webkit-latest-contentshell', H(compile_step),
+        T(['webkit_layout'], ['--auto-reconnect'])),
+      B('builder-unit-tests', H(compile_step), T(['unit'])),
+
+      # Generic builder config (for substring match).
+      B('builder', H(std_build_steps)),
+  ]
+
+  bot_map = dict((config.bot_id, config) for config in bot_configs)
+
+  # These bots have identical configuration to ones defined earlier.
+  copy_map = [
+      ('lkgr-clobber', 'main-clobber'),
+      ('try-builder-dbg', 'main-builder-dbg'),
+      ('try-builder-rel', 'main-builder-rel'),
+      ('try-clang-builder', 'main-clang-builder'),
+      ('try-fyi-builder-dbg', 'fyi-builder-dbg'),
+      ('try-x86-builder-dbg', 'x86-builder-dbg'),
+      ('try-tests-rel', 'main-tests-rel'),
+      ('try-tests', 'main-tests'),
+      ('try-fyi-tests', 'fyi-tests'),
+      ('webkit-latest-tests', 'main-tests'),
+  ]
+  for to_id, from_id in copy_map:
+    assert to_id not in bot_map
+    # pylint: disable=W0212
+    bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id)
+
+    # Trybots do not upload to flakiness dashboard. They should be otherwise
+    # identical in configuration to their trunk building counterparts.
+    test_obj = bot_map[to_id].test_obj
+    if to_id.startswith('try') and test_obj:
+      extra_args = test_obj.extra_args
+      if extra_args and flakiness_server in extra_args:
+        extra_args.remove(flakiness_server)
+  return bot_map
+
+
+# Return an object from the map, looking first for an exact id match.
+# If this fails, look for an id which is a substring of the specified id.
+# Choose the longest of all substring matches.
+# pylint: disable=W0622
+def GetBestMatch(id_map, id):
+  config = id_map.get(id)
+  if not config:
+    substring_matches = filter(lambda x: x in id, id_map.iterkeys())
+    if substring_matches:
+      max_id = max(substring_matches, key=len)
+      print 'Using config from id="%s" (substring match).' % max_id
+      config = id_map[max_id]
+  return config
+
+
+def GetRunBotOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--bot-id', help='Specify bot id directly.')
+  parser.add_option('--testing', action='store_true',
+                    help='For testing: print, but do not run commands')
+
+  return parser
+
+
+def GetBotConfig(options, bot_step_map):
+  bot_id = options.bot_id or options.factory_properties.get('android_bot_id')
+  if not bot_id:
+    print (sys.stderr,
+           'A bot id must be specified through option or factory_props.')
+    return
+
+  bot_config = GetBestMatch(bot_step_map, bot_id)
+  if not bot_config:
+    print 'Error: config for id="%s" cannot be inferred.' % bot_id
+  return bot_config
+
+
+def RunBotCommands(options, commands, env):
+  print 'Environment changes:'
+  print DictDiff(dict(os.environ), env)
+
+  for command in commands:
+    print bb_utils.CommandToString(command)
+    sys.stdout.flush()
+    if options.testing:
+      env['BUILDBOT_TESTING'] = '1'
+    return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env)
+    if return_code != 0:
+      return return_code
+
+
+def main(argv):
+  proc = subprocess.Popen(
+      ['/bin/hostname', '-f'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  hostname_stdout, hostname_stderr = proc.communicate()
+  if proc.returncode == 0:
+    print 'Running on: ' + hostname_stdout
+  else:
+    print >> sys.stderr, 'WARNING: failed to run hostname'
+    print >> sys.stderr, hostname_stdout
+    print >> sys.stderr, hostname_stderr
+    sys.exit(1)
+
+  parser = GetRunBotOptParser()
+  options, args = parser.parse_args(argv[1:])
+  if args:
+    parser.error('Unused args: %s' % args)
+
+  bot_config = GetBotConfig(options, GetBotStepMap())
+  if not bot_config:
+    sys.exit(1)
+
+  print 'Using config:', bot_config
+
+  commands = GetCommands(options, bot_config)
+  for command in commands:
+    print 'Will run: ', bb_utils.CommandToString(command)
+  print
+
+  env = GetEnvironment(bot_config.host_obj, options.testing)
+  return RunBotCommands(options, commands, env)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_utils.py b/build/android/buildbot/bb_utils.py
new file mode 100644
index 0000000..3c16cc2
--- /dev/null
+++ b/build/android/buildbot/bb_utils.py
@@ -0,0 +1,100 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+TESTING = 'BUILDBOT_TESTING' in os.environ
+
+BB_BUILD_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+    os.pardir, os.pardir, os.pardir, os.pardir))
+
+CHROME_SRC = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+
+# TODO: Figure out how to merge this with pylib.cmd_helper.OutDirectory().
+CHROME_OUT_DIR = os.path.join(CHROME_SRC, 'out')
+
+GOMA_DIR = os.environ.get('GOMA_DIR', os.path.join(BB_BUILD_DIR, 'goma'))
+
+GSUTIL_PATH = os.path.join(BB_BUILD_DIR, 'third_party', 'gsutil', 'gsutil')
+
+def CommandToString(command):
+  """Returns quoted command that can be run in bash shell."""
+  return ' '.join(map(pipes.quote, command))
+
+
+def SpawnCmd(command, stdout=None, cwd=CHROME_SRC):
+  """Spawn a process without waiting for termination."""
+  print '>', CommandToString(command)
+  sys.stdout.flush()
+  if TESTING:
+    class MockPopen(object):
+      @staticmethod
+      def wait():
+        return 0
+      @staticmethod
+      def communicate():
+        return '', ''
+    return MockPopen()
+  return subprocess.Popen(command, cwd=cwd, stdout=stdout)
+
+
+def RunCmd(command, flunk_on_failure=True, halt_on_failure=False,
+           warning_code=constants.WARNING_EXIT_CODE, stdout=None,
+           cwd=CHROME_SRC):
+  """Run a command relative to the chrome source root."""
+  code = SpawnCmd(command, stdout, cwd).wait()
+  print '<', CommandToString(command)
+  if code != 0:
+    print 'ERROR: process exited with code %d' % code
+    if code != warning_code and flunk_on_failure:
+      bb_annotations.PrintError()
+    else:
+      bb_annotations.PrintWarning()
+    # Allow steps to have both halting (i.e. 1) and non-halting exit codes.
+    if code != warning_code and halt_on_failure:
+      print 'FATAL %d != %d' % (code, warning_code)
+      sys.exit(1)
+  return code
+
+
+def GetParser():
+  def ConvertJson(option, _, value, parser):
+    setattr(parser.values, option.dest, json.loads(value))
+  parser = optparse.OptionParser()
+  parser.add_option('--build-properties', action='callback',
+                    callback=ConvertJson, type='string', default={},
+                    help='build properties in JSON format')
+  parser.add_option('--factory-properties', action='callback',
+                    callback=ConvertJson, type='string', default={},
+                    help='factory properties in JSON format')
+  return parser
+
+
+def EncodeProperties(options):
+  return ['--factory-properties=%s' % json.dumps(options.factory_properties),
+          '--build-properties=%s' % json.dumps(options.build_properties)]
+
+
+def RunSteps(steps, step_cmds, options):
+  unknown_steps = set(steps) - set(step for step, _ in step_cmds)
+  if unknown_steps:
+    print >> sys.stderr, 'FATAL: Unknown steps %s' % list(unknown_steps)
+    sys.exit(1)
+
+  for step, cmd in step_cmds:
+    if step in steps:
+      cmd(options)
diff --git a/build/android/buildbot/env_to_json.py b/build/android/buildbot/env_to_json.py
new file mode 100755
index 0000000..f9a7a44
--- /dev/null
+++ b/build/android/buildbot/env_to_json.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Encode current environment into json.
+
+import json
+import os
+
+print json.dumps(dict(os.environ))
diff --git a/build/android/buildbot/tests/bb_run_bot_test.py b/build/android/buildbot/tests/bb_run_bot_test.py
new file mode 100755
index 0000000..810c60d
--- /dev/null
+++ b/build/android/buildbot/tests/bb_run_bot_test.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import subprocess
+import sys
+
+BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILDBOT_DIR)
+import bb_run_bot
+
+def RunBotProcesses(bot_process_map):
+  code = 0
+  for bot, proc in bot_process_map:
+    _, err = proc.communicate()
+    code |= proc.returncode
+    if proc.returncode != 0:
+      print 'Error running the bot script with id="%s"' % bot, err
+
+  return code
+
+
+def main():
+  procs = [
+      (bot, subprocess.Popen(
+          [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
+          '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
+      for bot in bb_run_bot.GetBotStepMap()]
+  return RunBotProcesses(procs)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/chrome_with_libs.gyp b/build/android/chrome_with_libs.gyp
new file mode 100644
index 0000000..690be88
--- /dev/null
+++ b/build/android/chrome_with_libs.gyp
@@ -0,0 +1,82 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to add more loadable libs into Chrome_apk.
+#
+# This is useful when building Chrome_apk with some loadable modules which are
+# not included in Chrome_apk.
+# As an example, when building Chrome_apk with
+# libpeer_target_type=loadable_module,
+# the libpeerconnection.so is not included in Chrome_apk. To add the missing
+# lib, follow the steps below:
+# - Run gyp:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
+# - Build chrome_with_libs:
+#     ninja (or make) chrome_with_libs
+#
+# This tool also allows replacing the loadable module with a new one via the
+# following steps:
+# - Build Chrome_apk with the gyp define:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" build/gyp_chromium
+#     ninja (or make) Chrome_apk
+# - Replace libpeerconnection.so with a new one:
+#     cp the_new_one path/to/libpeerconnection.so
+# - Run gyp:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
+# - Build chrome_with_libs:
+#     ninja (or make) chrome_with_libs
+{
+  'targets': [
+    {
+      # An "All" target is required for a top-level gyp-file.
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        'chrome_with_libs',
+      ],
+    },
+    {
+      'target_name': 'chrome_with_libs',
+      'type': 'none',
+      'variables': {
+        'intermediate_dir': '<(PRODUCT_DIR)/prebuilt_libs/',
+        'chrome_unsigned_path': '<(PRODUCT_DIR)/chrome_apk/Chrome-unsigned.apk',
+        'chrome_with_libs_unsigned': '<(intermediate_dir)/Chrome-with-libs-unsigned.apk',
+        'chrome_with_libs_final': '<(PRODUCT_DIR)/apks/Chrome-with-libs.apk',
+      },
+      'dependencies': [
+        '<(DEPTH)/clank/native/framework/clank.gyp:chrome_apk'
+      ],
+      'copies': [
+        {
+          'destination': '<(intermediate_dir)/lib/<(android_app_abi)',
+          'files': [
+            '<(PRODUCT_DIR)/libpeerconnection.so',
+          ],
+        },
+      ],
+      'actions': [
+        {
+          'action_name': 'put_libs_in_chrome',
+          'variables': {
+            'inputs': [
+              '<(intermediate_dir)/lib/<(android_app_abi)/libpeerconnection.so',
+            ],
+            'input_apk_path': '<(chrome_unsigned_path)',
+            'output_apk_path': '<(chrome_with_libs_unsigned)',
+            'libraries_top_dir%': '<(intermediate_dir)',
+          },
+          'includes': [ 'create_standalone_apk_action.gypi' ],
+        },
+        {
+          'action_name': 'finalize_chrome_with_libs',
+          'variables': {
+            'input_apk_path': '<(chrome_with_libs_unsigned)',
+            'output_apk_path': '<(chrome_with_libs_final)',
+          },
+          'includes': [ 'finalize_apk_action.gypi'],
+        },
+      ],
+    }],
+}
diff --git a/build/android/cpufeatures.gypi b/build/android/cpufeatures.gypi
new file mode 100644
index 0000000..c08e956
--- /dev/null
+++ b/build/android/cpufeatures.gypi
@@ -0,0 +1,31 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Depend on the Android NDK's cpu feature detection. The WebView build is part
+# of the system and the library already exists; for the normal build there is a
+# gyp file in the checked-in NDK to build it.
+{
+  'conditions': [
+    ['android_webview_build == 1', {
+      # This is specified twice intentionally: Android provides include paths
+      # to targets automatically if they depend on libraries, so we add this
+      # library to every target that includes this .gypi to make the headers
+      # available, then also add it to targets that link those targets via
+      # link_settings to ensure it ends up being linked even if the main target
+      # doesn't include this .gypi.
+      'libraries': [
+        'cpufeatures.a',
+      ],
+      'link_settings': {
+        'libraries': [
+          'cpufeatures.a',
+        ],
+      },
+    }, {
+      'dependencies': [
+        '<(android_ndk_root)/android_tools_ndk.gyp:cpu_features',
+      ],
+    }],
+  ],
+}
diff --git a/build/android/create_standalone_apk_action.gypi b/build/android/create_standalone_apk_action.gypi
new file mode 100644
index 0000000..d17af7c
--- /dev/null
+++ b/build/android/create_standalone_apk_action.gypi
@@ -0,0 +1,41 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# combines a directory of shared libraries and an incomplete APK into a
+# standalone APK.
+#
+# To use this, create a gyp action with the following form:
+#  {
+#    'action_name': 'some descriptive action name',
+#    'variables': {
+#      'inputs': [ 'input_path1', 'input_path2' ],
+#      'input_apk_path': '<(unsigned_apk_path)',
+#      'output_apk_path': '<(unsigned_standalone_apk_path)',
+#      'libraries_top_dir': '<(libraries_top_dir)',
+#    },
+#    'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ],
+#  },
+
+{
+  'message': 'Creating standalone APK: <(output_apk_path)',
+  'variables': {
+    'inputs': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+    '<(input_apk_path)',
+    '>@(inputs)',
+  ],
+  'outputs': [
+    '<(output_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+    '--libraries-top-dir=<(libraries_top_dir)',
+    '--input-apk-path=<(input_apk_path)',
+    '--output-apk-path=<(output_apk_path)',
+  ],
+}
diff --git a/build/android/developer_recommended_flags.gypi b/build/android/developer_recommended_flags.gypi
new file mode 100644
index 0000000..3a3db0a
--- /dev/null
+++ b/build/android/developer_recommended_flags.gypi
@@ -0,0 +1,60 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is the set of recommended gyp variable settings for Chrome for Android development.
+#
+# These can be used by copying this file to $CHROME_SRC/chrome/supplement.gypi.
+#
+# Even better, create chrome/supplement.gypi containing the following:
+#   {
+#     'includes': [ '../build/android/developer_recommended_flags.gypi' ]
+#   }
+# and you'll get new settings automatically.
+# When using this method, you can override individual settings by setting them unconditionally (with
+# no %) in chrome/supplement.gypi.
+# I.e. to disable gyp_managed_install but use everything else:
+#   {
+#     'variables': {
+#       'gyp_managed_install': 0,
+#     },
+#     'includes': [ '../build/android/developer_recommended_flags.gypi' ]
+#   }
+
+{
+  'variables': {
+    'variables': {
+      # Set component to 'shared_library' to enable the component build. This builds native code as
+      # many small shared libraries instead of one monolithic library. This slightly reduces the time
+      # required for incremental builds.
+      'component%': 'shared_library',
+    },
+    'component%': '<(component)',
+
+    # When gyp_managed_install is set to 1, building an APK will install that APK on the connected
+    # device(/emulator). To install on multiple devices (or onto a new device), build the APK once
+    # with each device attached. This greatly reduces the time required for incremental builds.
+    #
+    # This comes with some caveats:
+    #   Only works with a single device connected (it will print a warning if
+    #     zero or multiple devices are attached).
+    #   Some actions are always run (i.e. ninja will never say "no work to do").
+    'gyp_managed_install%': 1,
+
+    # With gyp_managed_install, we do not necessarily need a standalone APK.
+    # When create_standalone_apk is set to 1, we will build a standalone APK
+    # anyway. For even faster builds, you can set create_standalone_apk to 0.
+    'create_standalone_apk%': 1,
+
+    # Set clang to 1 to use the clang compiler. Clang has much (much, much) better warning/error
+    # messages than gcc.
+    # TODO(cjhopman): Enable this when http://crbug.com/156420 is addressed. Until then, users can
+    # set clang to 1, but Android stack traces will sometimes be incomplete.
+    #'clang%': 1,
+
+    # Set fastbuild to 1 to build with less debugging information. This can greatly decrease linking
+    # time. The downside is that stack traces will be missing useful information (like line
+    # numbers).
+    #'fastbuild%': 1,
+  },
+}
diff --git a/build/android/dex_action.gypi b/build/android/dex_action.gypi
new file mode 100644
index 0000000..9ea3e71
--- /dev/null
+++ b/build/android/dex_action.gypi
@@ -0,0 +1,59 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that dexes
+# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME ==
+# "Release", then it will dex the proguard_enabled_input_path instead of the
+# normal dex_input_paths/dex_generated_input_paths.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'some name for the action'
+#    'actions': [
+#      'variables': {
+#        'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ],
+#        'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ],
+#
+#        # For targets that use proguard:
+#        'proguard_enabled': 'true',
+#        'proguard_enabled_input_path': 'path to dex when using proguard',
+#      },
+#      'includes': [ 'relative/path/to/dex_action.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Creating dex file: <(output_path)',
+  'variables': {
+    'dex_input_paths': [],
+    'dex_generated_input_dirs': [],
+    'proguard_enabled%': 'false',
+    'proguard_enabled_input_path%': '',
+    'dex_no_locals%': 0,
+    'dex_additional_options': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/util/md5_check.py',
+    '<(DEPTH)/build/android/gyp/dex.py',
+    '>@(dex_input_paths)',
+  ],
+  'outputs': [
+    '<(output_path)',
+    '<(output_path).inputs',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/dex.py',
+    '--dex-path=<(output_path)',
+    '--android-sdk-tools=<(android_sdk_tools)',
+    '--configuration-name=<(CONFIGURATION_NAME)',
+    '--proguard-enabled=>(proguard_enabled)',
+    '--proguard-enabled-input-path=<(proguard_enabled_input_path)',
+    '--no-locals=>(dex_no_locals)',
+    '>@(dex_additional_options)',
+    '>@(dex_input_paths)',
+    '>@(dex_generated_input_dirs)',
+  ]
+}
diff --git a/build/android/empty/src/.keep b/build/android/empty/src/.keep
new file mode 100644
index 0000000..0f710b6
--- /dev/null
+++ b/build/android/empty/src/.keep
@@ -0,0 +1,6 @@
+This is a file that needs to live here until http://crbug.com/158155 has
+been fixed.
+
+The ant build system requires that a src folder is always present, and for
+some of our targets that is not the case. Giving it an empty src-folder works
+nicely though.
diff --git a/build/android/empty_proguard.flags b/build/android/empty_proguard.flags
new file mode 100644
index 0000000..53484fe
--- /dev/null
+++ b/build/android/empty_proguard.flags
@@ -0,0 +1 @@
+# Used for apk targets that do not need proguard. See build/java_apk.gypi.
diff --git a/build/android/enable_asserts.py b/build/android/enable_asserts.py
new file mode 100755
index 0000000..0e30bc2
--- /dev/null
+++ b/build/android/enable_asserts.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Enables dalvik vm asserts in the android device."""
+
+from pylib import android_commands
+from pylib.device import device_utils
+import optparse
+import sys
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--enable_asserts', dest='set_asserts',
+      action='store_true', default=None,
+      help='Sets the dalvik.vm.enableassertions property to "all"')
+  option_parser.add_option('--disable_asserts', dest='set_asserts',
+      action='store_false', default=None,
+      help='Removes the dalvik.vm.enableassertions property')
+  options, _ = option_parser.parse_args(argv)
+
+  # TODO(jbudorick): Accept optional serial number and run only for the
+  # specified device when present.
+  devices = android_commands.GetAttachedDevices()
+  for device in [device_utils.DeviceUtils(serial) for serial in devices]:
+    if options.set_asserts != None:
+      if device.SetJavaAsserts(options.set_asserts):
+        # TODO(jbudorick) How to best do shell restarts after the
+        #                 android_commands refactor?
+        device.RunShellCommand('stop')
+        device.RunShellCommand('start')
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh
new file mode 100755
index 0000000..26960ac
--- /dev/null
+++ b/build/android/envsetup.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Sets up environment for building Chromium on Android.
+
+# Make sure we're being sourced (possibly by another script). Check for bash
+# since zsh sets $0 when sourcing.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+  echo "ERROR: envsetup must be sourced."
+  exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+  local SCRIPT_PATH="$1"
+  local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+
+  local CURRENT_DIR="$(readlink -f "${SCRIPT_DIR}/../../")"
+  if [[ -z "${CHROME_SRC}" ]]; then
+    # If $CHROME_SRC was not set, assume current directory is CHROME_SRC.
+    local CHROME_SRC="${CURRENT_DIR}"
+  fi
+
+  if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then
+    # If current directory is not in $CHROME_SRC, it might be set for other
+    # source tree. If $CHROME_SRC was set correctly and we are in the correct
+    # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "".
+    # Otherwise, it will equal to "${CURRENT_DIR}"
+    echo "Warning: Current directory is out of CHROME_SRC, it may not be \
+  the one you want."
+    echo "${CHROME_SRC}"
+  fi
+
+  # Allow the caller to override a few environment variables. If any of them is
+  # unset, we default to a sane value that's known to work. This allows for
+  # experimentation with a custom SDK.
+  if [[ -z "${ANDROID_SDK_ROOT}" || ! -d "${ANDROID_SDK_ROOT}" ]]; then
+    local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/"
+  fi
+
+  # Add Android SDK tools to system path.
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+
+  # Add Chromium Android development scripts to system path.
+  # Must be after CHROME_SRC is set.
+  export PATH=$PATH:${CHROME_SRC}/build/android
+
+  export ENVSETUP_GYP_CHROME_SRC=${CHROME_SRC}  # TODO(thakis): Remove.
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
+
+android_gyp() {
+  echo "Please call build/gyp_chromium instead. android_gyp is going away."
+  "${ENVSETUP_GYP_CHROME_SRC}/build/gyp_chromium" --depth="${ENVSETUP_GYP_CHROME_SRC}" --check "$@"
+}
diff --git a/build/android/finalize_apk_action.gypi b/build/android/finalize_apk_action.gypi
new file mode 100644
index 0000000..6187239
--- /dev/null
+++ b/build/android/finalize_apk_action.gypi
@@ -0,0 +1,56 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# signs and zipaligns an APK.
+#
+# To use this, create a gyp action with the following form:
+#  {
+#    'action_name': 'some descriptive action name',
+#    'variables': {
+#      'input_apk_path': 'relative/path/to/input.apk',
+#      'output_apk_path': 'relative/path/to/output.apk',
+#    },
+#    'includes': [ '../../build/android/finalize_apk.gypi' ],
+#  },
+#
+
+{
+  'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)',
+  'variables': {
+    'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
+    'keystore_name%': 'chromiumdebugkey',
+    'keystore_password%': 'chromium',
+    'conditions': [
+        # Webview doesn't use zipalign or rezip_apk_jar.
+        ['android_webview_build==0', {
+          'zipalign_path%': ['<!@(find <(android_sdk_root) -name zipalign)'],
+          'rezip_apk_jar_path%': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar'
+        }, {
+          'zipalign_path%': "",
+          'rezip_apk_jar_path%': "",
+        }],
+    ],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '<(keystore_path)',
+    '<(input_apk_path)',
+  ],
+  'outputs': [
+    '<(output_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '--zipalign-path=<(zipalign_path)',
+    '--unsigned-apk-path=<(input_apk_path)',
+    '--final-apk-path=<(output_apk_path)',
+    '--key-path=<(keystore_path)',
+    '--key-name=<(keystore_name)',
+    '--key-passwd=<(keystore_password)',
+    '--load-library-from-zip-file=<(load_library_from_zip_file)',
+    '--rezip-apk-jar-path=<(rezip_apk_jar_path)',
+  ],
+}
diff --git a/build/android/findbugs_diff.py b/build/android/findbugs_diff.py
new file mode 100755
index 0000000..28224f1
--- /dev/null
+++ b/build/android/findbugs_diff.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs findbugs, and returns an error code if there are new warnings.
+This runs findbugs with an additional flag to exclude known bugs.
+To update the list of known bugs, do this:
+
+   findbugs_diff.py --rebaseline
+
+Note that this is separate from findbugs_exclude.xml. The "exclude" file has
+false positives that we do not plan to fix. The "known bugs" file has real
+bugs that we *do* plan to fix (but haven't done so yet).
+
+Other options
+  --only-analyze used to only analyze the class you are interested.
+  --relase-build analyze the classes in out/Release directory.
+  --findbugs-args used to passin other findbugs's options.
+
+Run
+  $CHROM_SRC/third_party/findbugs/bin/findbugs -textui for details.
+
+"""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import findbugs
+
+
+def main():
+  parser = findbugs.GetCommonParser()
+
+  options, _ = parser.parse_args()
+
+  if not options.base_dir:
+    options.base_dir = os.path.join(constants.DIR_SOURCE_ROOT, 'build',
+                                    'android', 'findbugs_filter')
+  if not options.only_analyze:
+    options.only_analyze = 'org.chromium.-'
+
+  return findbugs.Run(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/findbugs_filter/findbugs_exclude.xml b/build/android/findbugs_filter/findbugs_exclude.xml
new file mode 100644
index 0000000..59aa92f
--- /dev/null
+++ b/build/android/findbugs_filter/findbugs_exclude.xml
@@ -0,0 +1,106 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+Documentation: http://findbugs.sourceforge.net/manual/filter.html
+In particular, ~ at the start of a string means it's a regex.
+-->
+<FindBugsFilter>
+  <!-- Skip the generated resource classes (including nested classes). -->
+  <Match>
+    <Class name="~org\.chromium\..*\.R(\$\w+)?" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\..*\.Manifest(\$\w+)?" />
+  </Match>
+  <!-- Ignore bugs in NativeLibraries.java (the auto-generation confuses findbugs). -->
+  <Match>
+    <Class name="~org\.chromium\.base\..*\.NativeLibraries.*?" />
+  </Match>
+  <!--
+  Ignore bugs in CleanupReferenceTest.java (redundant null check)
+  TODO(joth): Group all GC related tests and filter them out, since the null
+  check is necessary to make sure the nullification is flushed to memory.
+  -->
+  <Match>
+    <Class name="~org\.chromium\.content\..*\.CleanupReferenceTest.*?" />
+  </Match>
+  <!-- Ignore errors in JavaBridge due to reflection. -->
+  <Match>
+    <Class name="~.*\.JavaBridge.*"/>
+    <Bug code="UuF,UrF,UMAC" />
+  </Match>
+  <!-- "Struct" like classes expect to have unused public data members -->
+  <Match>
+    <Class name="~.*android_webview.*FileChooserParams"/>
+    <Bug code="UrF" />
+  </Match>
+  <!-- Ignore "reliance on default String encoding" warnings, as we're not multi-platform -->
+  <Bug pattern="DM_DEFAULT_ENCODING" />
+  <!-- Ignore bugs that are often false-positives in test code -->
+  <Match>
+    <Class name="~org\.chromium\..*Test(\$\w+)?" />
+    <Or>
+      <Bug code="DLS,UrF" />
+      <Bug pattern="DM_GC" />
+    </Or>
+  </Match>
+  <!--
+  Ignore calls to System.exit() following errors during loading the native library.
+  There is no way to recover from such errors without restarting the application,
+  so System.exit() is the best solution.
+  -->
+  <Match>
+    <Class name="~org\.chromium\.chrome\..*\.ChromiumSyncAdapter.*" />
+    <Method name="run" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\.chrome\..*\.ChromiumSyncAdapter" />
+    <Method name="startBrowserProcessesSync" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\.chrome\..*\.ChromeShellActivity" />
+    <Method name="onCreate" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\.chrome\..*\.AccountsChangedReceiver.*" />
+    <Method name="run" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\.content\..*\.ChildProcessService.*" />
+    <Method name="run" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\..*ContentBrowserTestsActivity" />
+    <Method name="onCreate" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\..*ContentShellActivity" />
+    <Method name="onCreate" />
+    <Bug code="Dm" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\.components\.gcm_driver\..*\.GCMDriver" />
+    <Method name="launchNativeThen" />
+    <Bug code="Dm" />
+  </Match>
+  <!--
+  Ignore write to static field in GCMDriver, as it's the cleanest way to mark
+  the singleton as null when the native counterpart is destroyed.
+  -->
+  <Match>
+    <Class name="~org\.chromium\.components\.gcm_driver\..*\.GCMDriver" />
+    <Method name="destroy" />
+    <Bug code="ST" />
+  </Match>
+</FindBugsFilter>
diff --git a/build/android/findbugs_filter/findbugs_known_bugs.txt b/build/android/findbugs_filter/findbugs_known_bugs.txt
new file mode 100644
index 0000000..c82e62b
--- /dev/null
+++ b/build/android/findbugs_filter/findbugs_known_bugs.txt
@@ -0,0 +1,24 @@
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeArrayCoercionTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeArrayTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeBasicsTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeChildFrameTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeCoercionTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeFieldsTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeReturnValuesTest.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At JavaBridgeTestBase.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At PerfTraceEvent.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At SimpleSynchronizedMethod.java
+M C CSM: Shouldn't use synchronized method, please narrow down the synchronization scope.  At SimpleSynchronizedStaticMethod.java
+M C CST: Shouldn't use synchronized(this), please narrow down the synchronization scope.  At SimpleSynchronizedThis.java
+M D DMI: Hard coded reference to an absolute pathname in org.chromium.android_webview.test.ArchiveTest.testAutoBadPath()  At ArchiveTest.java
+M D DMI: Hard coded reference to an absolute pathname in org.chromium.android_webview.test.ArchiveTest.testExplicitBadPath()  At ArchiveTest.java
+M D SF: Switch statement found in org.chromium.chrome.browser.ChromeBrowserProvider.insert(Uri, ContentValues) where one case falls through to the next case  At ChromeBrowserProvider.java
+M M UG: org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.getBooleanValue() is unsynchronized, org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.setBooleanValue(boolean) is synchronized  At JavaBridgeReturnValuesTest.java
+M M UG: org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.getStringValue() is unsynchronized, org.chromium.content.browser.JavaBridgeReturnValuesTest$TestObject.setStringValue(String) is synchronized  At JavaBridgeReturnValuesTest.java
+M V EI2: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.setFavicon(byte[]) may expose internal representation by storing an externally mutable object into ChromeBrowserProvider$BookmarkNode.mFavicon  At ChromeBrowserProvider.java
+M V EI2: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.setThumbnail(byte[]) may expose internal representation by storing an externally mutable object into ChromeBrowserProvider$BookmarkNode.mThumbnail  At ChromeBrowserProvider.java
+M V EI: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.favicon() may expose internal representation by returning ChromeBrowserProvider$BookmarkNode.mFavicon  At ChromeBrowserProvider.java
+M V EI: org.chromium.chrome.browser.ChromeBrowserProvider$BookmarkNode.thumbnail() may expose internal representation by returning ChromeBrowserProvider$BookmarkNode.mThumbnail  At ChromeBrowserProvider.java
+M M LI: Incorrect lazy initialization of static field org.chromium.chrome.browser.sync.ProfileSyncService.sSyncSetupManager in org.chromium.chrome.browser.sync.ProfileSyncService.get(Context)  At ProfileSyncService.java
+M V EI2: org.chromium.content_public.browser.LoadUrlParams.setPostData(byte[]) may expose internal representation by storing an externally mutable object into LoadUrlParams.mPostData  At LoadUrlParams.java
+M V EI: org.chromium.content_public.browser.LoadUrlParams.getPostData() may expose internal representation by returning LoadUrlParams.mPostData  At LoadUrlParams.java
diff --git a/build/android/generate_emma_html.py b/build/android/generate_emma_html.py
new file mode 100755
index 0000000..93b0b0e
--- /dev/null
+++ b/build/android/generate_emma_html.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates EMMA coverage files to produce html output."""
+
+import fnmatch
+import json
+import optparse
+import os
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+def _GetFilesWithExt(root_dir, ext):
+  """Gets all files with a given extension.
+
+  Args:
+    root_dir: Directory in which to search for files.
+    ext: Extension to look for (including dot)
+
+  Returns:
+    A list of absolute paths to files that match.
+  """
+  files = []
+  for root, _, filenames in os.walk(root_dir):
+    basenames = fnmatch.filter(filenames, '*.' + ext)
+    files.extend([os.path.join(root, basename)
+                  for basename in basenames])
+
+  return files
+
+
+def main():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--output', help='HTML output filename.')
+  option_parser.add_option('--coverage-dir', default=None,
+                           help=('Root of the directory in which to search for '
+                                 'coverage data (.ec) files.'))
+  option_parser.add_option('--metadata-dir', default=None,
+                           help=('Root of the directory in which to search for '
+                                 'coverage metadata (.em) files.'))
+  option_parser.add_option('--cleanup', action='store_true',
+                           help=('If set, removes coverage files generated at '
+                                 'runtime.'))
+  options, _ = option_parser.parse_args()
+
+  if not (options.coverage_dir and options.metadata_dir and options.output):
+    option_parser.error('One or more mandatory options are missing.')
+
+  coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
+  metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
+  print 'Found coverage files: %s' % str(coverage_files)
+  print 'Found metadata files: %s' % str(metadata_files)
+
+  sources = []
+  for f in metadata_files:
+    sources_file = os.path.splitext(f)[0] + '_sources.txt'
+    with open(sources_file, 'r') as sf:
+      sources.extend(json.load(sf))
+  sources = [os.path.join(constants.DIR_SOURCE_ROOT, s) for s in sources]
+  print 'Sources: %s' % sources
+
+  input_args = []
+  for f in coverage_files + metadata_files:
+    input_args.append('-in')
+    input_args.append(f)
+
+  output_args = ['-Dreport.html.out.file', options.output]
+  source_args = ['-sp', ','.join(sources)]
+
+  exit_code = cmd_helper.RunCmd(
+      ['java', '-cp',
+       os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
+       'emma', 'report', '-r', 'html']
+      + input_args + output_args + source_args)
+
+  if options.cleanup:
+    for f in coverage_files:
+      os.remove(f)
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gn/zip.py b/build/android/gn/zip.py
new file mode 100755
index 0000000..5050ea0
--- /dev/null
+++ b/build/android/gn/zip.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import ast
+import optparse
+import os
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+from util import build_utils
+
+def DoZip(inputs, output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for f in inputs:
+      outfile.write(f, os.path.relpath(f, base_dir))
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--inputs', help='List of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  parser.add_option('--base-dir',
+                    help='If provided, the paths in the archive will be '
+                    'relative to this directory', default='.')
+
+  options, _ = parser.parse_args()
+
+  inputs = ast.literal_eval(options.inputs)
+  output = options.output
+  base_dir = options.base_dir
+
+  DoZip(inputs, output, base_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py
new file mode 100755
index 0000000..d5aa546
--- /dev/null
+++ b/build/android/gyp/aidl.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  options, args = option_parser.parse_args(argv[1:])
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGypList(options.imports)
+      ]
+      if options.includes is not None:
+        aidl_cmd += [
+          '-I' + s for s in build_utils.ParseGypList(options.includes)
+        ]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/ant.py b/build/android/gyp/ant.py
new file mode 100755
index 0000000..5394b9e
--- /dev/null
+++ b/build/android/gyp/ant.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An Ant wrapper that suppresses useless Ant output.
+
+Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of
+every build. In the Android build, this just adds a lot of useless noise to the
+build output. This script forwards its arguments to ant, and prints Ant's
+output up until the BUILD SUCCESSFUL line.
+
+Also, when a command fails, this script will re-run that ant command with the
+'-verbose' argument so that the failure is easier to debug.
+"""
+
+import optparse
+import sys
+import traceback
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  try:
+    stdout = build_utils.CheckOutput(['ant'] + args)
+  except build_utils.CalledProcessError:
+    # It is very difficult to diagnose ant failures without the '-verbose'
+    # argument. So, when an ant command fails, re-run it with '-verbose' so that
+    # the cause of the failure is easier to identify.
+    verbose_args = ['-verbose'] + [a for a in args if a != '-quiet']
+    try:
+      stdout = build_utils.CheckOutput(['ant'] + verbose_args)
+    except build_utils.CalledProcessError:
+      traceback.print_exc()
+      sys.exit(1)
+
+    # If this did sys.exit(1), building again would succeed (which would be
+    # awkward). Instead, just print a big warning.
+    build_utils.PrintBigWarning(
+        'This is unexpected. `ant ' + ' '.join(args) + '` failed.' +
+        'But, running `ant ' + ' '.join(verbose_args) + '` passed.')
+
+  stdout = stdout.strip().split('\n')
+  for line in stdout:
+    if line.strip() == 'BUILD SUCCESSFUL':
+      break
+    print line
+
+  if options.depfile:
+    assert '-buildfile' in args
+    ant_buildfile = args[args.index('-buildfile') + 1]
+
+    build_utils.WriteDepfile(
+        options.depfile,
+        [ant_buildfile] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/apk_install.py b/build/android/gyp/apk_install.py
new file mode 100755
index 0000000..19a217c
--- /dev/null
+++ b/build/android/gyp/apk_install.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs an APK.
+
+"""
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+from pylib.utils import apk_helper
+
+def GetNewMetadata(device, apk_package):
+  """Gets the metadata on the device for the apk_package apk."""
+  output = device.RunShellCommand('ls -l /data/app/')
+  # Matches lines like:
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.shell.apk
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.shell-1.apk
+  apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?(.apk)?$' % apk_package, s)
+  matches = filter(apk_matcher, output)
+  return matches[0] if matches else None
+
+def HasInstallMetadataChanged(device, apk_package, metadata_path):
+  """Checks if the metadata on the device for apk_package has changed."""
+  if not os.path.exists(metadata_path):
+    return True
+
+  with open(metadata_path, 'r') as expected_file:
+    return expected_file.read() != device.GetInstallMetadata(apk_package)
+
+
+def RecordInstallMetadata(device, apk_package, metadata_path):
+  """Records the metadata from the device for apk_package."""
+  metadata = GetNewMetadata(device, apk_package)
+  if not metadata:
+    raise Exception('APK install failed unexpectedly.')
+
+  with open(metadata_path, 'w') as outfile:
+    outfile.write(metadata)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--apk-path',
+      help='Path to .apk to install.')
+  parser.add_option('--install-record',
+      help='Path to install record (touched only when APK is installed).')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--stamp',
+      help='Path to touch on success.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args()
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  constants.SetBuildType(options.configuration_name)
+
+  serial_number = device.GetSerialNumber()
+  apk_package = apk_helper.GetPackageName(options.apk_path)
+
+  metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number)
+
+  # If the APK on the device does not match the one that was last installed by
+  # the build, then the APK has to be installed (regardless of the md5 record).
+  force_install = HasInstallMetadataChanged(device, apk_package, metadata_path)
+
+  def Install():
+    device.Install(options.apk_path, reinstall=True)
+    RecordInstallMetadata(device, apk_package, metadata_path)
+    build_utils.Touch(options.install_record)
+
+
+  record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number)
+  md5_check.CallAndRecordIfStale(
+      Install,
+      record_path=record_path,
+      input_paths=[options.apk_path],
+      force=force_install)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/apk_obfuscate.py b/build/android/gyp/apk_obfuscate.py
new file mode 100755
index 0000000..fec70c8
--- /dev/null
+++ b/build/android/gyp/apk_obfuscate.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates the obfuscated jar and test jar for an apk.
+
+If proguard is not enabled or 'Release' is not in the configuration name,
+obfuscation will be a no-op.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def ParseArgs(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK build tools folder')
+  parser.add_option('--android-sdk-jar',
+                    help='path to Android SDK\'s android.jar')
+  parser.add_option('--proguard-jar-path',
+                    help='Path to proguard.jar in the sdk')
+  parser.add_option('--input-jars-paths',
+                    help='Path to jars to include in obfuscated jar')
+
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard config files')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--proguard-enabled', action='store_true',
+                    help='Set if proguard is enabled for this target.')
+
+  parser.add_option('--obfuscated-jar-path',
+                    help='Output path for obfuscated jar.')
+
+  parser.add_option('--testapp', action='store_true',
+                    help='Set this if building an instrumentation test apk')
+  parser.add_option('--tested-apk-obfuscated-jar-path',
+                    help='Path to obfusctated jar of the tested apk')
+  parser.add_option('--test-jar-path',
+                    help='Output path for jar containing all the test apk\'s '
+                    'code.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  (options, args) = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given. ' + str(args))
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'android_sdk_tools',
+      'android_sdk_jar',
+      'proguard_jar_path',
+      'input_jars_paths',
+      'configuration_name',
+      'obfuscated_jar_path',
+      )
+
+  if options.testapp:
+    required_options += (
+        'test_jar_path',
+        )
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options, args
+
+
+def main(argv):
+  options, _ = ParseArgs(argv)
+
+  library_classpath = [options.android_sdk_jar]
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  dependency_class_filters = [
+      '*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class']
+
+  if options.testapp:
+    build_utils.MergeZips(
+        options.test_jar_path, input_jars, dependency_class_filters)
+
+  if options.configuration_name == 'Release' and options.proguard_enabled:
+    proguard_cmd = [
+        'java', '-jar', options.proguard_jar_path,
+        '-forceprocessing',
+        '-libraryjars', ':'.join(library_classpath),
+        '-dump', options.obfuscated_jar_path + '.dump',
+        '-printseeds', options.obfuscated_jar_path + '.seeds',
+        '-printusage', options.obfuscated_jar_path + '.usage',
+        '-printmapping', options.obfuscated_jar_path + '.mapping',
+        ]
+
+    exclude_paths = []
+    configs = build_utils.ParseGypList(options.proguard_configs)
+    if (options.tested_apk_obfuscated_jar_path and
+        options.tested_apk_obfuscated_jar_path != '/'):
+      # configs should only contain the process_resources.py generated config.
+      assert len(configs) == 1, (
+          'test apks should not have custom proguard configs: ' + str(configs))
+      tested_jar_info = build_utils.ReadJson(
+          options.tested_apk_obfuscated_jar_path + '.info')
+      exclude_paths = tested_jar_info['inputs']
+      configs = tested_jar_info['configs']
+      proguard_cmd += [
+          '-dontobfuscate',
+          '-dontoptimize',
+          '-dontshrink',
+          '-dontskipnonpubliclibraryclassmembers',
+          '-libraryjars', options.tested_apk_obfuscated_jar_path,
+          '-applymapping', options.tested_apk_obfuscated_jar_path + '.mapping',
+          ]
+
+    proguard_injars = [p for p in input_jars if p not in exclude_paths]
+    proguard_cmd += ['-injars', ':'.join(proguard_injars)]
+
+    for config_file in configs:
+      proguard_cmd += ['-include', config_file]
+
+    # The output jar must be specified after inputs.
+    proguard_cmd += ['-outjars', options.obfuscated_jar_path]
+
+    build_utils.CheckOutput(proguard_cmd)
+
+    this_info = {
+      'inputs': proguard_injars,
+      'configs': configs
+    }
+
+    build_utils.WriteJson(
+        this_info, options.obfuscated_jar_path + '.info')
+  else:
+    output_files = [
+        options.obfuscated_jar_path,
+        options.obfuscated_jar_path + '.info',
+        options.obfuscated_jar_path + '.dump',
+        options.obfuscated_jar_path + '.seeds',
+        options.obfuscated_jar_path + '.usage',
+        options.obfuscated_jar_path + '.mapping']
+    for f in output_files:
+      if os.path.exists(f):
+        os.remove(f)
+      build_utils.Touch(f)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000..eee3d19
--- /dev/null
+++ b/build/android/gyp/copy_ex.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import optparse
+import shutil
+import sys
+
+from util import build_utils
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--dest', help='Directory to copy files to.')
+  parser.add_option('--files', action='append',
+                    help='List of files to copy.')
+  parser.add_option('--clear', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args(args)
+
+  if options.clear:
+    build_utils.DeleteDirectory(options.dest)
+    build_utils.MakeDirectory(options.dest)
+
+  files = []
+  for file_arg in options.files:
+    files += build_utils.ParseGypList(file_arg)
+
+  for f in files:
+    shutil.copy(f, options.dest)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        files + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py
new file mode 100755
index 0000000..30e050c
--- /dev/null
+++ b/build/android/gyp/create_device_library_links.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates symlinks to native libraries for an APK.
+
+The native libraries should have previously been pushed to the device (in
+options.target_dir). This script then creates links in an apk's lib/ folder to
+those native libraries.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_device
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+from pylib.utils import apk_helper
+
+def RunShellCommand(device, cmd):
+  output = device.RunShellCommand(cmd)
+
+  if output:
+    raise Exception(
+        'Unexpected output running command: ' + cmd + '\n' +
+        '\n'.join(output))
+
+
+def CreateSymlinkScript(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  link_cmd = (
+      'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n'
+      'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s '
+        '$APK_LIBRARIES_DIR/%(lib_basename)s \n'
+      )
+
+  script = '#!/bin/sh \n'
+
+  for lib in libraries:
+    script += link_cmd % { 'lib_basename': lib }
+
+  with open(options.script_host_path, 'w') as scriptfile:
+    scriptfile.write(script)
+
+
+def TriggerSymlinkScript(options):
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  apk_package = apk_helper.GetPackageName(options.apk)
+  apk_libraries_dir = '/data/data/%s/lib' % apk_package
+
+  device_dir = os.path.dirname(options.script_device_path)
+  mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' %
+      { 'dir': device_dir })
+  RunShellCommand(device, mkdir_cmd)
+  device.PushChangedFiles(options.script_host_path, options.script_device_path)
+
+  trigger_cmd = (
+      'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; '
+      'STRIPPED_LIBRARIES_DIR=%(target_dir)s; '
+      '. %(script_device_path)s'
+      ) % {
+          'apk_libraries_dir': apk_libraries_dir,
+          'target_dir': options.target_dir,
+          'script_device_path': options.script_device_path
+          }
+  RunShellCommand(device, trigger_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--apk', help='Path to the apk.')
+  parser.add_option('--script-host-path',
+      help='Path on the host for the symlink script.')
+  parser.add_option('--script-device-path',
+      help='Path on the device to push the created symlink script.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--target-dir',
+      help='Device directory that contains the target libraries for symlinks.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['apk', 'libraries', 'script_host_path',
+      'script_device_path', 'target_dir', 'configuration_name']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  CreateSymlinkScript(options)
+  TriggerSymlinkScript(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_dist_jar.py b/build/android/gyp/create_dist_jar.py
new file mode 100755
index 0000000..0d31c5d
--- /dev/null
+++ b/build/android/gyp/create_dist_jar.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges a list of jars into a single jar."""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Path to output jar.')
+  parser.add_option('--inputs', action='append', help='List of jar inputs.')
+  options, _ = parser.parse_args(args)
+  build_utils.CheckOptions(options, parser, ['output', 'inputs'])
+
+  input_jars = []
+  for inputs_arg in options.inputs:
+    input_jars.extend(build_utils.ParseGypList(inputs_arg))
+
+  build_utils.MergeZips(options.output, input_jars)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_jars + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_placeholder_files.py b/build/android/gyp/create_placeholder_files.py
new file mode 100755
index 0000000..103e1df
--- /dev/null
+++ b/build/android/gyp/create_placeholder_files.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create placeholder files.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '--dest-lib-dir',
+      help='Destination directory to have placeholder files.')
+  parser.add_option(
+      '--stamp',
+      help='Path to touch on success')
+
+  options, args = parser.parse_args()
+
+  for name in args:
+    target_path = os.path.join(options.dest_lib_dir, name)
+    build_utils.Touch(target_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/create_standalone_apk.py b/build/android/gyp/create_standalone_apk.py
new file mode 100755
index 0000000..c560599
--- /dev/null
+++ b/build/android/gyp/create_standalone_apk.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Combines stripped libraries and incomplete APK into single standalone APK.
+
+"""
+
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+from util import md5_check
+
+def CreateStandaloneApk(options):
+  def DoZip():
+    with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file:
+      intermediate_path = intermediate_file.name
+      shutil.copy(options.input_apk_path, intermediate_path)
+      apk_path_abs = os.path.abspath(intermediate_path)
+      build_utils.CheckOutput(
+          ['zip', '-r', '-1', apk_path_abs, 'lib'],
+          cwd=options.libraries_top_dir)
+      shutil.copy(intermediate_path, options.output_apk_path)
+
+  input_paths = [options.input_apk_path, options.libraries_top_dir]
+  record_path = '%s.standalone.stamp' % options.input_apk_path
+  md5_check.CallAndRecordIfStale(
+      DoZip,
+      record_path=record_path,
+      input_paths=input_paths)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-top-dir',
+      help='Top directory that contains libraries '
+      '(i.e. library paths are like '
+      'libraries_top_dir/lib/android_app_abi/foo.so).')
+  parser.add_option('--input-apk-path', help='Path to incomplete APK.')
+  parser.add_option('--output-apk-path', help='Path for standalone APK.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  options, _ = parser.parse_args()
+
+  required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path']
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  CreateStandaloneApk(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/delete_files.py b/build/android/gyp/delete_files.py
new file mode 100755
index 0000000..2fd945c
--- /dev/null
+++ b/build/android/gyp/delete_files.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Delete files in directories matching a pattern.
+"""
+
+import glob
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '--pattern',
+      help='Pattern for matching Files to delete.')
+  parser.add_option(
+      '--keep',
+      help='Files to keep even if they matches the pattern.')
+  parser.add_option(
+      '--stamp',
+      help='Path to touch on success')
+
+  options, args = parser.parse_args()
+
+  if not options.pattern or not args:
+    print 'No --pattern or target directories given'
+    return
+
+  for target_dir in args:
+    target_pattern = os.path.join(target_dir, options.pattern)
+    matching_files = glob.glob(target_pattern)
+
+    keep_pattern = os.path.join(target_dir, options.keep)
+    files_to_keep = glob.glob(keep_pattern)
+
+    for target_file in matching_files:
+      if target_file in files_to_keep:
+        continue
+
+      if os.path.isfile(target_file):
+        os.remove(target_file)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py
new file mode 100755
index 0000000..d89c7c2
--- /dev/null
+++ b/build/android/gyp/dex.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import md5_check
+
+
+def DoDex(options, paths):
+  dx_binary = os.path.join(options.android_sdk_tools, 'dx')
+  # See http://crbug.com/272064 for context on --force-jumbo.
+  dex_cmd = [dx_binary, '--dex', '--force-jumbo', '--output', options.dex_path]
+  if options.no_locals != '0':
+    dex_cmd.append('--no-locals')
+
+  dex_cmd += paths
+
+  record_path = '%s.md5.stamp' % options.dex_path
+  md5_check.CallAndRecordIfStale(
+      lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False),
+      record_path=record_path,
+      input_paths=paths,
+      input_strings=dex_cmd,
+      force=not os.path.exists(options.dex_path))
+  build_utils.WriteJson(paths, options.dex_path + '.inputs')
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk-tools',
+                    help='Android sdk build tools directory.')
+  parser.add_option('--dex-path', help='Dex output path.')
+  parser.add_option('--configuration-name',
+                    help='The build CONFIGURATION_NAME.')
+  parser.add_option('--proguard-enabled',
+                    help='"true" if proguard is enabled.')
+  parser.add_option('--proguard-enabled-input-path',
+                    help=('Path to dex in Release mode when proguard '
+                          'is enabled.'))
+  parser.add_option('--no-locals',
+                    help='Exclude locals list from the dex file.')
+  parser.add_option('--inputs', help='A list of additional input paths.')
+  parser.add_option('--excluded-paths-file',
+                    help='Path to a file containing a list of paths to exclude '
+                    'from the dex file.')
+
+  options, paths = parser.parse_args(args)
+
+  required_options = ('android_sdk_tools',)
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.proguard_enabled == 'true'
+      and options.configuration_name == 'Release'):
+    paths = [options.proguard_enabled_input_path]
+
+  if options.excluded_paths_file:
+    exclude_paths = build_utils.ReadJson(options.excluded_paths_file)
+    paths = [p for p in paths if not p in exclude_paths]
+
+  if options.inputs:
+    paths += build_utils.ParseGypList(options.inputs)
+
+  DoDex(options, paths)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        paths + build_utils.GetPythonDependencies())
+
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/emma_instr.py b/build/android/gyp/emma_instr.py
new file mode 100755
index 0000000..6f3555a
--- /dev/null
+++ b/build/android/gyp/emma_instr.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'emma_instr' action in the java build process.
+Depending on whether emma_instrument is set, the 'emma_instr' action will either
+call one of the instrument commands, or the copy command.
+
+Possible commands are:
+- instrument_jar: Accepts a jar and instruments it using emma.jar.
+- instrument_classes: Accepts a directory containing java classes and
+      instruments it using emma.jar.
+- copy: Called when EMMA coverage is not enabled. This allows us to make
+      this a required step without necessarily instrumenting on every build.
+      Also removes any stale coverage files.
+"""
+
+import collections
+import json
+import os
+import shutil
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.utils import command_option_parser
+
+from util import build_utils
+
+
+def _AddCommonOptions(option_parser):
+  """Adds common options to |option_parser|."""
+  option_parser.add_option('--input-path',
+                           help=('Path to input file(s). Either the classes '
+                                 'directory, or the path to a jar.'))
+  option_parser.add_option('--output-path',
+                           help=('Path to output final file(s) to. Either the '
+                                 'final classes directory, or the directory in '
+                                 'which to place the instrumented/copied jar.'))
+  option_parser.add_option('--stamp', help='Path to touch when done.')
+  option_parser.add_option('--coverage-file',
+                           help='File to create with coverage metadata.')
+  option_parser.add_option('--sources-file',
+                           help='File to create with the list of sources.')
+
+
+def _AddInstrumentOptions(option_parser):
+  """Adds options related to instrumentation to |option_parser|."""
+  _AddCommonOptions(option_parser)
+  option_parser.add_option('--sources',
+                           help='Space separated list of sources.')
+  option_parser.add_option('--src-root',
+                           help='Root of the src repository.')
+  option_parser.add_option('--emma-jar',
+                           help='Path to emma.jar.')
+  option_parser.add_option(
+      '--filter-string', default='',
+      help=('Filter string consisting of a list of inclusion/exclusion '
+            'patterns separated with whitespace and/or comma.'))
+
+
+def _RunCopyCommand(_command, options, _, option_parser):
+  """Copies the jar from input to output locations.
+
+  Also removes any old coverage/sources file.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_file):
+    option_parser.error('All arguments are required.')
+
+  coverage_file = os.path.join(os.path.dirname(options.output_path),
+                               options.coverage_file)
+  sources_file = os.path.join(os.path.dirname(options.output_path),
+                              options.sources_file)
+  if os.path.exists(coverage_file):
+    os.remove(coverage_file)
+  if os.path.exists(sources_file):
+    os.remove(sources_file)
+
+  if os.path.isdir(options.input_path):
+    shutil.rmtree(options.output_path, ignore_errors=True)
+    shutil.copytree(options.input_path, options.output_path)
+  else:
+    shutil.copy(options.input_path, options.output_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+def _CreateSourcesFile(sources_string, sources_file, src_root):
+  """Adds all normalized source directories to |sources_file|.
+
+  Args:
+    sources_string: String generated from gyp containing the list of sources.
+    sources_file: File into which to write the JSON list of sources.
+    src_root: Root which sources added to the file should be relative to.
+
+  Returns:
+    An exit code.
+  """
+  src_root = os.path.abspath(src_root)
+  sources = build_utils.ParseGypList(sources_string)
+  relative_sources = []
+  for s in sources:
+    abs_source = os.path.abspath(s)
+    if abs_source[:len(src_root)] != src_root:
+      print ('Error: found source directory not under repository root: %s %s'
+             % (abs_source, src_root))
+      return 1
+    rel_source = os.path.relpath(abs_source, src_root)
+
+    relative_sources.append(rel_source)
+
+  with open(sources_file, 'w') as f:
+    json.dump(relative_sources, f)
+
+
+def _RunInstrumentCommand(command, options, _, option_parser):
+  """Instruments the classes/jar files using EMMA.
+
+  Args:
+    command: 'instrument_jar' or 'instrument_classes'. This distinguishes
+        whether we copy the output from the created lib/ directory, or classes/
+        directory.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_file and options.sources and
+          options.src_root and options.emma_jar):
+    option_parser.error('All arguments are required.')
+
+  coverage_file = os.path.join(os.path.dirname(options.output_path),
+                               options.coverage_file)
+  sources_file = os.path.join(os.path.dirname(options.output_path),
+                              options.sources_file)
+  if os.path.exists(coverage_file):
+    os.remove(coverage_file)
+  temp_dir = tempfile.mkdtemp()
+  try:
+    cmd = ['java', '-cp', options.emma_jar,
+           'emma', 'instr',
+           '-ip', options.input_path,
+           '-ix', options.filter_string,
+           '-d', temp_dir,
+           '-out', coverage_file,
+           '-m', 'fullcopy']
+    build_utils.CheckOutput(cmd)
+
+    if command == 'instrument_jar':
+      for jar in os.listdir(os.path.join(temp_dir, 'lib')):
+        shutil.copy(os.path.join(temp_dir, 'lib', jar),
+                    options.output_path)
+    else:  # 'instrument_classes'
+      if os.path.isdir(options.output_path):
+        shutil.rmtree(options.output_path, ignore_errors=True)
+      shutil.copytree(os.path.join(temp_dir, 'classes'),
+                      options.output_path)
+  finally:
+    shutil.rmtree(temp_dir)
+
+  _CreateSourcesFile(options.sources, sources_file, options.src_root)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+CommandFunctionTuple = collections.namedtuple(
+    'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+    'copy': CommandFunctionTuple(_AddCommonOptions,
+                                 _RunCopyCommand),
+    'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
+                                           _RunInstrumentCommand),
+    'instrument_classes': CommandFunctionTuple(_AddInstrumentOptions,
+                                               _RunInstrumentCommand),
+}
+
+
+def main():
+  option_parser = command_option_parser.CommandOptionParser(
+      commands_dict=VALID_COMMANDS)
+  command_option_parser.ParseAndExecute(option_parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py
new file mode 100755
index 0000000..5416008
--- /dev/null
+++ b/build/android/gyp/finalize_apk.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns APK.
+
+"""
+
+import optparse
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def RenameInflateAndAddPageAlignment(
+    rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'renamealign',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'reorder',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path):
+  shutil.copy(unsigned_path, signed_path)
+  sign_cmd = [
+      'jarsigner',
+      '-sigalg', 'MD5withRSA',
+      '-digestalg', 'SHA1',
+      '-keystore', key_path,
+      '-storepass', key_passwd,
+      signed_path,
+      key_name,
+    ]
+  build_utils.CheckOutput(sign_cmd)
+
+
+def AlignApk(zipalign_path, unaligned_path, final_path):
+  align_cmd = [
+      zipalign_path,
+      '-f', '4',  # 4 bytes
+      unaligned_path,
+      final_path,
+      ]
+  build_utils.CheckOutput(align_cmd)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--rezip-apk-jar-path',
+                    help='Path to the RezipApk jar file.')
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.')
+  parser.add_option('--final-apk-path',
+      help='Path to output signed and aligned APK.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--load-library-from-zip-file', type='int',
+      help='If non-zero, build the APK such that the library can be loaded ' +
+           'directly from the zip file using the crazy linker. The library ' +
+           'will be renamed, uncompressed and page aligned.')
+
+  options, _ = parser.parse_args()
+
+  with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \
+      tempfile.NamedTemporaryFile() as apk_to_sign_tmp:
+
+    if options.load_library_from_zip_file:
+      # We alter the name of the library so that the Android Package Manager
+      # does not extract it into a separate file. This must be done before
+      # signing, as the filename is part of the signed manifest. At the same
+      # time we uncompress the library, which is necessary so that it can be
+      # loaded directly from the APK.
+      # Move the library to a page boundary by adding a page alignment file.
+      apk_to_sign = apk_to_sign_tmp.name
+      RenameInflateAndAddPageAlignment(
+          options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign)
+    else:
+      apk_to_sign = options.unsigned_apk_path
+
+    signed_apk_path = signed_apk_path_tmp.name
+    JarSigner(options.key_path, options.key_name, options.key_passwd,
+              apk_to_sign, signed_apk_path)
+
+    if options.load_library_from_zip_file:
+      # Reorder the contents of the APK. This re-establishes the canonical
+      # order which means the library will be back at its page aligned location.
+      # This step also aligns uncompressed items to 4 bytes.
+      ReorderAndAlignApk(
+          options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path)
+    else:
+      # Align uncompressed items to 4 bytes
+      AlignApk(options.zipalign_path, signed_apk_path, options.final_apk_path)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile, build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py
new file mode 100755
index 0000000..07132c7
--- /dev/null
+++ b/build/android/gyp/find.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--pattern', default='*', help='File pattern to match.')
+  options, directories = parser.parse_args(argv)
+
+  for d in directories:
+    for root, _, filenames in os.walk(d):
+      for f in fnmatch.filter(filenames, options.pattern):
+        print os.path.join(root, f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000..03becf9
--- /dev/null
+++ b/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoGcc(options):
+  build_utils.MakeDirectory(os.path.dirname(options.output))
+
+  gcc_cmd = [ 'gcc' ]  # invoke host gcc.
+  if options.defines:
+    gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
+  gcc_cmd.extend([
+      '-E',                  # stop after preprocessing.
+      '-D', 'ANDROID',       # Specify ANDROID define for pre-processor.
+      '-x', 'c-header',      # treat sources as C header files
+      '-P',                  # disable line markers, i.e. '#line 309'
+      '-I', options.include_path,
+      '-o', options.output,
+      options.template
+      ])
+
+  build_utils.CheckOutput(gcc_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--include-path', help='Include path for gcc.')
+  parser.add_option('--template', help='Path to template.')
+  parser.add_option('--output', help='Path for generated file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--defines', help='Pre-defines macros', action='append')
+
+  options, _ = parser.parse_args(args)
+
+  DoGcc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/generate_v14_compatible_resources.py b/build/android/gyp/generate_v14_compatible_resources.py
new file mode 100755
index 0000000..1961622
--- /dev/null
+++ b/build/android/gyp/generate_v14_compatible_resources.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert Android xml resources to API 14 compatible.
+
+There are two reasons that we cannot just use API 17 attributes,
+so we are generating another set of resources by this script.
+
+1. paddingStart attribute can cause a crash on Galaxy Tab 2.
+2. There is a bug that paddingStart does not override paddingLeft on
+   JB-MR1. This is fixed on JB-MR2.
+
+Therefore, this resource generation script can be removed when
+we drop the support for JB-MR1.
+
+Please refer to http://crbug.com/235118 for the details.
+"""
+
+import optparse
+import os
+import re
+import shutil
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+
+# Note that we are assuming 'android:' is an alias of
+# the namespace 'http://schemas.android.com/apk/res/android'.
+
+GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
+
+# Almost all the attributes that has "Start" or "End" in
+# its name should be mapped.
+ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
+                     'drawableStart' : 'drawableLeft',
+                     'layout_alignStart' : 'layout_alignLeft',
+                     'layout_marginStart' : 'layout_marginLeft',
+                     'layout_alignParentStart' : 'layout_alignParentLeft',
+                     'layout_toStartOf' : 'layout_toLeftOf',
+                     'paddingEnd' : 'paddingRight',
+                     'drawableEnd' : 'drawableRight',
+                     'layout_alignEnd' : 'layout_alignRight',
+                     'layout_marginEnd' : 'layout_marginRight',
+                     'layout_alignParentEnd' : 'layout_alignParentRight',
+                     'layout_toEndOf' : 'layout_toRightOf'}
+
+ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
+                         in ATTRIBUTES_TO_MAP.iteritems())
+
+ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
+                                  in ATTRIBUTES_TO_MAP.iteritems())
+
+
+def IterateXmlElements(node):
+  """minidom helper function that iterates all the element nodes.
+  Iteration order is pre-order depth-first."""
+  if node.nodeType == node.ELEMENT_NODE:
+    yield node
+  for child_node in node.childNodes:
+    for child_node_element in IterateXmlElements(child_node):
+      yield child_node_element
+
+
+def AssertNotDeprecatedAttribute(name, value, filename):
+  """Raises an exception if the given attribute is deprecated."""
+  msg = None
+  if name in ATTRIBUTES_TO_MAP_REVERSED:
+    msg = '{0} should use {1} instead of {2}'.format(filename,
+        ATTRIBUTES_TO_MAP_REVERSED[name], name)
+  elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
+    msg = '{0} should use start/end instead of left/right for {1}'.format(
+        filename, name)
+
+  if msg:
+    msg += ('\nFor background, see: http://android-developers.blogspot.com/'
+            '2013/03/native-rtl-support-in-android-42.html\n'
+            'If you have a legitimate need for this attribute, discuss with '
+            'kkimlabs@chromium.org or newt@chromium.org')
+    raise Exception(msg)
+
+
+def WriteDomToFile(dom, filename):
+  """Write the given dom to filename."""
+  build_utils.MakeDirectory(os.path.dirname(filename))
+  with open(filename, 'w') as f:
+    dom.writexml(f, '', '  ', '\n', encoding='utf-8')
+
+
+def HasStyleResource(dom):
+  """Return True if the dom is a style resource, False otherwise."""
+  root_node = IterateXmlElements(dom).next()
+  return bool(root_node.nodeName == 'resources' and
+              list(root_node.getElementsByTagName('style')))
+
+
+def ErrorIfStyleResourceExistsInDir(input_dir):
+  """If a style resource is in input_dir, raises an exception."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    dom = minidom.parse(input_filename)
+    if HasStyleResource(dom):
+      raise Exception('error: style file ' + input_filename +
+                      ' should be under ' + input_dir +
+                      '-v17 directory. Please refer to '
+                      'http://crbug.com/243952 for the details.')
+
+
+def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert layout resource to API 14 compatible layout resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  # Iterate all the elements' attributes to find attributes to convert.
+  for element in IterateXmlElements(dom):
+    for name, value in list(element.attributes.items()):
+      # Convert any API 17 Start/End attributes to Left/Right attributes.
+      # For example, from paddingStart="10dp" to paddingLeft="10dp"
+      # Note: gravity attributes are not necessary to convert because
+      # start/end values are backward-compatible. Explained at
+      # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
+      if name in ATTRIBUTES_TO_MAP:
+        element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
+        del element.attributes[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert style resource to API 14 compatible style resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  for style_element in dom.getElementsByTagName('style'):
+    for item_element in style_element.getElementsByTagName('item'):
+      name = item_element.attributes['name'].value
+      value = item_element.childNodes[0].nodeValue
+      if name in ATTRIBUTES_TO_MAP:
+        item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename):
+  """Convert API 17 layout resource to API 14 compatible layout resource.
+
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  If the generated resource is identical to the original resource,
+  don't do anything. If not, write the generated resource to
+  output_v14_filename, and copy the original resource to output_v17_filename.
+  """
+  dom = minidom.parse(input_filename)
+  is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
+
+  if is_modified:
+    # Write the generated resource.
+    WriteDomToFile(dom, output_v14_filename)
+
+    # Copy the original resource.
+    build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
+    shutil.copy2(input_filename, output_v17_filename)
+
+
+def GenerateV14StyleResource(input_filename, output_v14_filename):
+  """Convert API 17 style resources to API 14 compatible style resource.
+
+  Write the generated style resource to output_v14_filename.
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  """
+  dom = minidom.parse(input_filename)
+  GenerateV14StyleResourceDom(dom, input_filename)
+
+  # Write the generated resource.
+  WriteDomToFile(dom, output_v14_filename)
+
+
+def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
+  """Convert layout resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    output_v17_filename = os.path.join(output_v17_dir, rel_filename)
+    GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename)
+
+
+def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
+  """Convert style resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    GenerateV14StyleResource(input_filename, output_v14_filename)
+
+
+def VerifyV14ResourcesInDir(input_dir, resource_type):
+  """Verify that the resources in input_dir is compatible with v14, i.e., they
+  don't use attributes that cause crashes on certain devices. Print an error if
+  they have."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    exception_message = ('error : ' + input_filename + ' has an RTL attribute, '
+                        'i.e., attribute that has "start" or "end" in its name.'
+                        ' Pre-v17 resources should not include it because it '
+                        'can cause crashes on certain devices. Please refer to '
+                        'http://crbug.com/243952 for the details.')
+    dom = minidom.parse(input_filename)
+    if resource_type in ('layout', 'xml'):
+      if GenerateV14LayoutResourceDom(dom, input_filename, False):
+        raise Exception(exception_message)
+    elif resource_type == 'values':
+      if GenerateV14StyleResourceDom(dom, input_filename, False):
+        raise Exception(exception_message)
+
+
+def AssertNoDeprecatedAttributesInDir(input_dir, resource_type):
+  """Raises an exception if resources in input_dir have deprecated attributes,
+  e.g., paddingLeft, paddingRight"""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    dom = minidom.parse(input_filename)
+    if resource_type in ('layout', 'xml'):
+      GenerateV14LayoutResourceDom(dom, input_filename)
+    elif resource_type == 'values':
+      GenerateV14StyleResourceDom(dom, input_filename)
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  parser.add_option('--res-dir',
+                    help='directory containing resources '
+                         'used to generate v14 compatible resources')
+  parser.add_option('--res-v14-compatibility-dir',
+                    help='output directory into which '
+                         'v14 compatible resources will be generated')
+  parser.add_option('--stamp', help='File to touch on success')
+  parser.add_option('--verify-only', action="store_true", help='Do not generate'
+      ' v14 resources. Instead, just verify that the resources are already '
+      "compatible with v14, i.e. they don't use attributes that cause crashes "
+      'on certain devices.')
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('res_dir', 'res_v14_compatibility_dir')
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options
+
+def GenerateV14Resources(res_dir, res_v14_dir, verify_only):
+  for name in os.listdir(res_dir):
+    if not os.path.isdir(os.path.join(res_dir, name)):
+      continue
+
+    dir_pieces = name.split('-')
+    resource_type = dir_pieces[0]
+    qualifiers = dir_pieces[1:]
+
+    api_level_qualifier_index = -1
+    api_level_qualifier = ''
+    for index, qualifier in enumerate(qualifiers):
+      if re.match('v[0-9]+$', qualifier):
+        api_level_qualifier_index = index
+        api_level_qualifier = qualifier
+        break
+
+    # Android pre-v17 API doesn't support RTL. Skip.
+    if 'ldrtl' in qualifiers:
+      continue
+
+    input_dir = os.path.abspath(os.path.join(res_dir, name))
+
+    if verify_only:
+      if not api_level_qualifier or int(api_level_qualifier[1:]) < 17:
+        VerifyV14ResourcesInDir(input_dir, resource_type)
+      else:
+        AssertNoDeprecatedAttributesInDir(input_dir, resource_type)
+    else:
+      # We also need to copy the original v17 resource to *-v17 directory
+      # because the generated v14 resource will hide the original resource.
+      output_v14_dir = os.path.join(res_v14_dir, name)
+      output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
+
+      # We only convert layout resources under layout*/, xml*/,
+      # and style resources under values*/.
+      if resource_type in ('layout', 'xml'):
+        if not api_level_qualifier:
+          GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
+                                          output_v17_dir)
+      elif resource_type == 'values':
+        if api_level_qualifier == 'v17':
+          output_qualifiers = qualifiers[:]
+          del output_qualifiers[api_level_qualifier_index]
+          output_v14_dir = os.path.join(res_v14_dir,
+                                        '-'.join([resource_type] +
+                                                 output_qualifiers))
+          GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
+        elif not api_level_qualifier:
+          ErrorIfStyleResourceExistsInDir(input_dir)
+
+def main():
+  options = ParseArgs()
+
+  res_v14_dir = options.res_v14_compatibility_dir
+
+  build_utils.DeleteDirectory(res_v14_dir)
+  build_utils.MakeDirectory(res_v14_dir)
+
+  GenerateV14Resources(options.res_dir, res_v14_dir, options.verify_only)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/get_device_configuration.py b/build/android/gyp/get_device_configuration.py
new file mode 100755
index 0000000..390eb2f
--- /dev/null
+++ b/build/android/gyp/get_device_configuration.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Gets and writes the configurations of the attached devices.
+
+This configuration is used by later build steps to determine which devices to
+install to and what needs to be installed to those devices.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+from util import build_device
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--stamp', action='store')
+  parser.add_option('--output', action='store')
+  options, _ = parser.parse_args(argv)
+
+  devices = build_device.GetAttachedDevices()
+
+  device_configurations = []
+  for d in devices:
+    configuration, is_online, has_root = (
+        build_device.GetConfigurationForDevice(d))
+
+    if not is_online:
+      build_utils.PrintBigWarning(
+          '%s is not online. Skipping managed install for this device. '
+          'Try rebooting the device to fix this warning.' % d)
+      continue
+
+    if not has_root:
+      build_utils.PrintBigWarning(
+          '"adb root" failed on device: %s\n'
+          'Skipping managed install for this device.'
+          % configuration['description'])
+      continue
+
+    device_configurations.append(configuration)
+
+  if len(device_configurations) == 0:
+    build_utils.PrintBigWarning(
+        'No valid devices attached. Skipping managed install steps.')
+  elif len(devices) > 1:
+    # Note that this checks len(devices) and not len(device_configurations).
+    # This way, any time there are multiple devices attached it is
+    # explicitly stated which device we will install things to even if all but
+    # one device were rejected for other reasons (e.g. two devices attached,
+    # one w/o root).
+    build_utils.PrintBigWarning(
+        'Multiple devices attached. '
+        'Installing to the preferred device: '
+        '%(id)s (%(description)s)' % (device_configurations[0]))
+
+
+  build_device.WriteConfigurations(device_configurations, options.output)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/insert_chromium_version.py b/build/android/gyp/insert_chromium_version.py
new file mode 100755
index 0000000..f858225
--- /dev/null
+++ b/build/android/gyp/insert_chromium_version.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Insert a version string into a library as a section '.chromium.version'.
+"""
+
+import optparse
+import os
+import sys
+import tempfile
+
+from util import build_utils
+
+def InsertChromiumVersion(android_objcopy,
+                          library_path,
+                          version_string):
+  # Remove existing .chromium.version section from .so
+  objcopy_command = [android_objcopy,
+                     '--remove-section=.chromium.version',
+                     library_path]
+  build_utils.CheckOutput(objcopy_command)
+
+  # Add a .chromium.version section.
+  with tempfile.NamedTemporaryFile() as stream:
+    stream.write(version_string)
+    stream.flush()
+    objcopy_command = [android_objcopy,
+                       '--add-section', '.chromium.version=%s' % stream.name,
+                       library_path]
+    build_utils.CheckOutput(objcopy_command)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+
+  parser.add_option('--android-objcopy',
+      help='Path to the toolchain\'s objcopy binary')
+  parser.add_option('--libraries-source-dir',
+      help='Directory of native libraries')
+  parser.add_option('--libraries',
+      help='List of libraries')
+  parser.add_option('--version-string',
+      help='Version string to be inserted')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  for library in libraries:
+    library_path = os.path.join(options.libraries_source_dir, library)
+
+    InsertChromiumVersion(options.android_objcopy,
+                          library_path,
+                          options.version_string)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/jar.py b/build/android/gyp/jar.py
new file mode 100755
index 0000000..17f968c
--- /dev/null
+++ b/build/android/gyp/jar.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import md5_check
+
+
+def Jar(class_files, classes_dir, jar_path, manifest_file=None):
+  jar_path = os.path.abspath(jar_path)
+
+  # The paths of the files in the jar will be the same as they are passed in to
+  # the command. Because of this, the command should be run in
+  # options.classes_dir so the .class file paths in the jar are correct.
+  jar_cwd = classes_dir
+  class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files]
+  jar_cmd = ['jar', 'cf0', jar_path]
+  if manifest_file:
+    jar_cmd[1] += 'm'
+    jar_cmd.append(os.path.abspath(manifest_file))
+  jar_cmd.extend(class_files_rel)
+
+  record_path = '%s.md5.stamp' % jar_path
+  md5_check.CallAndRecordIfStale(
+      lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd),
+      record_path=record_path,
+      input_paths=class_files,
+      input_strings=jar_cmd,
+      force=not os.path.exists(jar_path),
+      )
+
+  build_utils.Touch(jar_path, fail_if_missing=True)
+
+
+def JarDirectory(classes_dir, excluded_classes, jar_path, manifest_file=None):
+  class_files = build_utils.FindInDirectory(classes_dir, '*.class')
+  for exclude in excluded_classes:
+    class_files = filter(
+        lambda f: not fnmatch.fnmatch(f, exclude), class_files)
+
+  Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--classes-dir', help='Directory containing .class files.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option('--excluded-classes',
+      help='List of .class file patterns to exclude from the jar.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.excluded_classes:
+    excluded_classes = build_utils.ParseGypList(options.excluded_classes)
+  else:
+    excluded_classes = []
+  JarDirectory(options.classes_dir,
+               excluded_classes,
+               options.jar_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/jar_toc.py b/build/android/gyp/jar_toc.py
new file mode 100755
index 0000000..3cafd6e
--- /dev/null
+++ b/build/android/gyp/jar_toc.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a TOC file from a Java jar.
+
+The TOC file contains the non-package API of the jar. This includes all
+public/protected/package classes/functions/members and the values of static
+final variables (members with package access are kept because in some cases we
+have multiple libraries with the same package, particularly test+non-test). Some
+other information (major/minor javac version) is also included.
+
+This TOC file then can be used to determine if a dependent library should be
+rebuilt when this jar changes. I.e. any change to the jar that would require a
+rebuild, will have a corresponding change in the TOC file.
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import md5_check
+
+
+def GetClassesInZipFile(zip_file):
+  classes = []
+  files = zip_file.namelist()
+  for f in files:
+    if f.endswith('.class'):
+      # f is of the form org/chromium/base/Class$Inner.class
+      classes.append(f.replace('/', '.')[:-6])
+  return classes
+
+
+def CallJavap(classpath, classes):
+  javap_cmd = [
+      'javap',
+      '-package',  # Show public/protected/package.
+      # -verbose is required to get constant values (which can be inlined in
+      # dependents).
+      '-verbose',
+      '-classpath', classpath
+      ] + classes
+  return build_utils.CheckOutput(javap_cmd)
+
+
+def ExtractToc(disassembled_classes):
+  # javap output is structured by indent (2-space) levels.
+  good_patterns = [
+      '^[^ ]', # This includes all class/function/member signatures.
+      '^  SourceFile:',
+      '^  minor version:',
+      '^  major version:',
+      '^  Constant value:',
+      ]
+  bad_patterns = [
+      '^const #', # Matches the constant pool (i.e. literals used in the class).
+    ]
+
+  def JavapFilter(line):
+    return (re.match('|'.join(good_patterns), line) and
+        not re.match('|'.join(bad_patterns), line))
+  toc = filter(JavapFilter, disassembled_classes.split('\n'))
+
+  return '\n'.join(toc)
+
+
+def UpdateToc(jar_path, toc_path):
+  classes = GetClassesInZipFile(zipfile.ZipFile(jar_path))
+  javap_output = CallJavap(classpath=jar_path, classes=classes)
+  toc = ExtractToc(javap_output)
+
+  with open(toc_path, 'w') as tocfile:
+    tocfile.write(toc)
+
+
+def DoJarToc(options):
+  jar_path = options.jar_path
+  toc_path = options.toc_path
+  record_path = '%s.md5.stamp' % toc_path
+  md5_check.CallAndRecordIfStale(
+      lambda: UpdateToc(jar_path, toc_path),
+      record_path=record_path,
+      input_paths=[jar_path],
+      force=not os.path.exists(toc_path),
+      )
+  build_utils.Touch(toc_path, fail_if_missing=True)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--jar-path', help='Input .jar path.')
+  parser.add_option('--toc-path', help='Output .jar.TOC path.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  DoJarToc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/jarjar_resources.py b/build/android/gyp/jarjar_resources.py
new file mode 100755
index 0000000..67b510b
--- /dev/null
+++ b/build/android/gyp/jarjar_resources.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Transforms direct Java class references in Android layout .xml files
+according to the specified JarJar rules."""
+
+import optparse
+import os
+import shutil
+import sys
+from xml.dom import minidom
+
+from util import build_utils
+
+
+class JarJarRules(object):
+  def __init__(self, jarjar_rules):
+    self._rules = []
+    for line in jarjar_rules.splitlines():
+      rule = line.split()
+      if rule[0] != 'rule':
+        continue
+      _, src, dest = rule
+      if src.endswith('**'):
+        src_real_name = src[:-2]
+      else:
+        assert not '*' in src
+        src_real_name = src
+
+      if dest.endswith('@0'):
+        self._rules.append((src, dest[:-2] + src_real_name))
+      elif dest.endswith('@1'):
+        assert '**' in src
+        self._rules.append((src, dest[:-2]))
+      else:
+        assert not '@' in dest
+        self._rules.append((src, dest))
+
+  def RenameClass(self, class_name):
+    for old, new in self._rules:
+      if old.endswith('**') and old[:-2] in class_name:
+        return class_name.replace(old[:-2], new, 1)
+      if '*' not in old and class_name.endswith(old):
+        return class_name.replace(old, new, 1)
+    return class_name
+
+
+def RenameNodes(node, rules):
+  if node.nodeType == node.ELEMENT_NODE:
+    if node.tagName.lower() == 'view' and  node.attributes.has_key('class'):
+      node.attributes['class'] = rules.RenameClass(node.attributes['class'])
+    else:
+      node.tagName = rules.RenameClass(node.tagName)
+  for child in node.childNodes:
+    RenameNodes(child, rules)
+
+
+def ProcessLayoutFile(path, rules):
+  xmldoc = minidom.parse(path)
+  RenameNodes(xmldoc.documentElement, rules)
+  with open(path, 'w') as f:
+    xmldoc.writexml(f)
+
+
+def LayoutFilesFilter(src, names):
+  if os.path.basename(src).lower() != 'layout':
+    return []
+  else:
+    return filter(lambda n: n.endswith('.xml'), names)
+
+
+def ProcessResources(options):
+  with open(options.rules_path) as f:
+    rules = JarJarRules(f.read())
+
+  build_utils.DeleteDirectory(options.output_dir)
+  for input_dir in options.input_dir:
+    shutil.copytree(input_dir, options.output_dir)
+
+  for root, _dirnames, filenames in os.walk(options.output_dir):
+    layout_files = LayoutFilesFilter(root, filenames)
+    for layout_file in layout_files:
+      ProcessLayoutFile(os.path.join(root, layout_file), rules)
+
+
+def ParseArgs():
+  parser = optparse.OptionParser()
+  parser.add_option('--input-dir', action='append',
+                    help='Path to the resources folder to process.')
+  parser.add_option('--output-dir',
+                    help=('Directory to hold processed resources. Note: the ' +
+                          'directory will be clobbered on every invocation.'))
+  parser.add_option('--rules-path',
+                    help='Path to the jarjar rules file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('input_dir', 'output_dir', 'rules_path')
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def main():
+  options = ParseArgs()
+
+  ProcessResources(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000..ad09742
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,237 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import re
+import optparse
+import os
+from string import Template
+import sys
+
+from util import build_utils
+
+class EnumDefinition(object):
+  def __init__(self, class_name=None, class_package=None, entries=None):
+    self.class_name = class_name
+    self.class_package = class_package
+    self.entries = collections.OrderedDict(entries or [])
+    self.prefix_to_strip = ''
+
+  def AppendEntry(self, key, value):
+    if key in self.entries:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.entries[key] = value
+
+  def Finalize(self):
+    self._Validate()
+    self._AssignEntryIndices()
+    self._StripPrefix()
+
+  def _Validate(self):
+    assert self.class_name
+    assert self.class_package
+    assert self.entries
+
+  def _AssignEntryIndices(self):
+    # Supporting the same set enum value assignments the compiler does is rather
+    # complicated, so we limit ourselves to these cases:
+    # - all the enum constants have values assigned,
+    # - enum constants reference other enum constants or have no value assigned.
+
+    if not all(self.entries.values()):
+      index = 0
+      for key, value in self.entries.iteritems():
+        if not value:
+          self.entries[key] = index
+          index = index + 1
+        elif value in self.entries:
+          self.entries[key] = self.entries[value]
+        else:
+          raise Exception('You can only reference other enum constants unless '
+                          'you assign values to all of the constants.')
+
+  def _StripPrefix(self):
+    if not self.prefix_to_strip:
+      prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', self.class_name).upper()
+      prefix_to_strip += '_'
+      if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]):
+        prefix_to_strip = ''
+    else:
+      prefix_to_strip = self.prefix_to_strip
+    entries = ((k.replace(prefix_to_strip, '', 1), v) for (k, v) in
+               self.entries.iteritems())
+    self.entries = collections.OrderedDict(entries)
+
+class HeaderParser(object):
+  single_line_comment_re = re.compile(r'\s*//')
+  multi_line_comment_start_re = re.compile(r'\s*/\*')
+  enum_start_re = re.compile(r'^\s*enum\s+(\w+)\s+{\s*$')
+  enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?\s*$')
+  enum_end_re = re.compile(r'^\s*}\s*;\s*$')
+  generator_directive_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+
+  def __init__(self, lines):
+    self._lines = lines
+    self._enum_definitions = []
+    self._in_enum = False
+    self._current_definition = None
+    self._generator_directives = {}
+
+  def ParseDefinitions(self):
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._enum_definitions
+
+  def _ParseLine(self, line):
+    if not self._in_enum:
+      self._ParseRegularLine(line)
+    else:
+      self._ParseEnumLine(line)
+
+  def _ParseEnumLine(self, line):
+    if HeaderParser.single_line_comment_re.match(line):
+      return
+    if HeaderParser.multi_line_comment_start_re.match(line):
+      raise Exception('Multi-line comments in enums are not supported.')
+    enum_end = HeaderParser.enum_end_re.match(line)
+    enum_entry = HeaderParser.enum_line_re.match(line)
+    if enum_end:
+      self._ApplyGeneratorDirectives()
+      self._current_definition.Finalize()
+      self._enum_definitions.append(self._current_definition)
+      self._in_enum = False
+    elif enum_entry:
+      enum_key = enum_entry.groups()[0]
+      enum_value = enum_entry.groups()[2]
+      self._current_definition.AppendEntry(enum_key, enum_value)
+
+  def _GetCurrentEnumPackageName(self):
+    return self._generator_directives.get('ENUM_PACKAGE')
+
+  def _GetCurrentEnumPrefixToStrip(self):
+    return self._generator_directives.get('PREFIX_TO_STRIP', '')
+
+  def _ApplyGeneratorDirectives(self):
+    current_definition = self._current_definition
+    current_definition.class_package = self._GetCurrentEnumPackageName()
+    current_definition.prefix_to_strip = self._GetCurrentEnumPrefixToStrip()
+    self._generator_directives = {}
+
+  def _ParseRegularLine(self, line):
+    enum_start = HeaderParser.enum_start_re.match(line)
+    generator_directive = HeaderParser.generator_directive_re.match(line)
+    if enum_start:
+      if not self._GetCurrentEnumPackageName():
+        return
+      self._current_definition = EnumDefinition()
+      self._current_definition.class_name = enum_start.groups()[0]
+      self._in_enum = True
+    elif generator_directive:
+      directive_name = generator_directive.groups()[0]
+      directive_value = generator_directive.groups()[1]
+      self._generator_directives[directive_name] = directive_value
+
+
+def GetScriptName():
+  script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
+  build_index = script_components.index('build')
+  return os.sep.join(script_components[build_index:])
+
+
+def DoGenerate(options, source_paths):
+  output_paths = []
+  for source_path in source_paths:
+    enum_definitions = DoParseHeaderFile(source_path)
+    for enum_definition in enum_definitions:
+      package_path = enum_definition.class_package.replace('.', os.path.sep)
+      file_name = enum_definition.class_name + '.java'
+      output_path = os.path.join(options.output_dir, package_path, file_name)
+      output_paths.append(output_path)
+      if not options.print_output_only:
+        build_utils.MakeDirectory(os.path.dirname(output_path))
+        DoWriteOutput(source_path, output_path, enum_definition)
+  return output_paths
+
+
+def DoParseHeaderFile(path):
+  with open(path) as f:
+    return HeaderParser(f.readlines()).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+  template = Template("""
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+  enum_template = Template('  public static final int ${NAME} = ${VALUE};')
+  enum_entries_string = []
+  for enum_name, enum_value in enum_definition.entries.iteritems():
+    values = {
+        'NAME': enum_name,
+        'VALUE': enum_value,
+    }
+    enum_entries_string.append(enum_template.substitute(values))
+  enum_entries_string = '\n'.join(enum_entries_string)
+
+  values = {
+      'CLASS_NAME': enum_definition.class_name,
+      'ENUM_ENTRIES': enum_entries_string,
+      'PACKAGE': enum_definition.class_package,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': source_path,
+  }
+  return template.substitute(values)
+
+
+def DoWriteOutput(source_path, output_path, enum_definition):
+  with open(output_path, 'w') as out_file:
+    out_file.write(GenerateOutput(source_path, enum_definition))
+
+def AssertFilesList(output_paths, assert_files_list):
+  actual = set(output_paths)
+  expected = set(assert_files_list)
+  if not actual == expected:
+    need_to_add = list(actual - expected)
+    need_to_remove = list(expected - actual)
+    raise Exception('Output files list does not match expectations. Please '
+                    'add %s and remove %s.' % (need_to_add, need_to_remove))
+
+def DoMain(argv):
+  parser = optparse.OptionParser()
+
+  parser.add_option('--assert_file', action="append", default=[],
+                    dest="assert_files_list", help='Assert that the given '
+                    'file is an output. There can be multiple occurrences of '
+                    'this flag.')
+  parser.add_option('--output_dir', help='Base path for generated files.')
+  parser.add_option('--print_output_only', help='Only print output paths.',
+                    action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  output_paths = DoGenerate(options, args)
+
+  if options.assert_files_list:
+    AssertFilesList(output_paths, options.assert_files_list)
+
+  return " ".join(output_paths)
+
+if __name__ == '__main__':
+  DoMain(sys.argv[1:])
diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000..24da05f
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite containss various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+import unittest
+from java_cpp_enum import EnumDefinition, GenerateOutput, HeaderParser
+
+class TestPreprocess(unittest.TestCase):
+  def testOutput(self):
+    definition = EnumDefinition(class_name='ClassName',
+                                class_package='some.package',
+                                entries=[('E1', 1), ('E2', '2 << 2')])
+    output = GenerateOutput('path/to/file', definition)
+    expected = """
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     build/android/gyp/java_cpp_enum_tests.py
+// From
+//     path/to/file
+
+package some.package;
+
+public class ClassName {
+  public static final int E1 = 1;
+  public static final int E2 = 2 << 2;
+}
+"""
+    self.assertEqual(expected, output)
+
+  def testParseSimpleEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO,
+        VALUE_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.class_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+                                              ('VALUE_ONE', 1)]),
+                     definition.entries)
+
+  def testParseTwoEnums(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A,
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumOne', definition.class_name)
+    self.assertEqual('test.namespace', definition.class_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.class_package)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentNoneDefined(self):
+    definition = EnumDefinition('c', 'p', [])
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentAllDefined(self):
+    definition = EnumDefinition('c', 'p', [])
+    definition.AppendEntry('A', '1')
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', '3')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', '2'),
+                                              ('C', '3')]),
+                     definition.entries)
+
+  def testEnumValueAssignmentReferences(self):
+    definition = EnumDefinition('c', 'p', [])
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', None)
+    definition.AppendEntry('D', 'C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 1),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentRaises(self):
+    definition = EnumDefinition('c', 'p', [])
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', '1')
+    definition.AppendEntry('C', None)
+    with self.assertRaises(Exception):
+      definition.Finalize()
+
+  def testExplicitPrefixStripping(self):
+    definition = EnumDefinition('c', 'p', [])
+    definition.AppendEntry('P_A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('P_C', None)
+    definition.prefix_to_strip = 'P_'
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'C'], definition.entries.keys())
+
+  def testImplicitPrefixStripping(self):
+    definition = EnumDefinition('ClassName', 'p', [])
+    definition.AppendEntry('CLASS_NAME_A', None)
+    definition.AppendEntry('CLASS_NAME_B', None)
+    definition.AppendEntry('CLASS_NAME_C', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'C'], definition.entries.keys())
+
+  def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+    definition = EnumDefinition('Name', 'p', [])
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('NAME_LAST', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py
new file mode 100755
index 0000000..b0ef7fd
--- /dev/null
+++ b/build/android/gyp/javac.py
@@ -0,0 +1,266 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import optparse
+import os
+import shutil
+import re
+import sys
+import textwrap
+
+from util import build_utils
+from util import md5_check
+
+import jar
+
+sys.path.append(build_utils.COLORAMA_ROOT)
+import colorama
+
+
+def ColorJavacOutput(output):
+  fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+  warning_re = re.compile(
+      fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+  error_re = re.compile(
+      fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
+  marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+  warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+  error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+  marker_color = ['marker',  colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+  def Colorize(line, regex, color):
+    match = regex.match(line)
+    start = match.start(color[0])
+    end = match.end(color[0])
+    return (line[:start]
+            + color[1] + line[start:end]
+            + colorama.Fore.RESET + colorama.Style.RESET_ALL
+            + line[end:])
+
+  def ApplyColor(line):
+    if warning_re.match(line):
+      line = Colorize(line, warning_re, warning_color)
+    elif error_re.match(line):
+      line = Colorize(line, error_re, error_color)
+    elif marker_re.match(line):
+      line = Colorize(line, marker_re, marker_color)
+    return line
+
+  return '\n'.join(map(ApplyColor, output.split('\n')))
+
+
+def DoJavac(
+    classpath, classes_dir, chromium_code, java_files):
+  """Runs javac.
+
+  Builds |java_files| with the provided |classpath| and puts the generated
+  .class files into |classes_dir|. If |chromium_code| is true, extra lint
+  checking will be enabled.
+  """
+
+  jar_inputs = []
+  for path in classpath:
+    if os.path.exists(path + '.TOC'):
+      jar_inputs.append(path + '.TOC')
+    else:
+      jar_inputs.append(path)
+
+  javac_args = [
+      '-g',
+      '-source', '1.7',
+      '-target', '1.7',
+      '-classpath', ':'.join(classpath),
+      '-d', classes_dir]
+  if chromium_code:
+    javac_args.extend(['-Xlint:unchecked'])
+    # TODO(aurimas): re-enable this after the L SDK is launched and make
+    # everyone fix new deprecation warnings correctly.
+    # http://crbug.com/405174,398669,411361,411366,411367,411376,416041
+    # '-Xlint:deprecation'
+  else:
+    # XDignore.symbol.file makes javac compile against rt.jar instead of
+    # ct.sym. This means that using a java internal package/class will not
+    # trigger a compile warning or error.
+    javac_args.extend(['-XDignore.symbol.file'])
+
+  javac_cmd = ['javac'] + javac_args + java_files
+
+  def Compile():
+    build_utils.CheckOutput(
+        javac_cmd,
+        print_stdout=chromium_code,
+        stderr_filter=ColorJavacOutput)
+
+  record_path = os.path.join(classes_dir, 'javac.md5.stamp')
+  md5_check.CallAndRecordIfStale(
+      Compile,
+      record_path=record_path,
+      input_paths=java_files + jar_inputs,
+      input_strings=javac_cmd)
+
+
+_MAX_MANIFEST_LINE_LEN = 72
+
+
+def CreateManifest(manifest_path, classpath, main_class=None):
+  """Creates a manifest file with the given parameters.
+
+  This generates a manifest file that compiles with the spec found at
+  http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#JAR_Manifest
+
+  Args:
+    manifest_path: The path to the manifest file that should be created.
+    classpath: The JAR files that should be listed on the manifest file's
+      classpath.
+    main_class: If present, the class containing the main() function.
+
+  """
+  output = ['Manifest-Version: 1.0']
+  if main_class:
+    output.append('Main-Class: %s' % main_class)
+  if classpath:
+    sanitized_paths = []
+    for path in classpath:
+      sanitized_paths.append(os.path.basename(path.strip('"')))
+    output.append('Class-Path: %s' % ' '.join(sanitized_paths))
+  output.append('Created-By: ')
+  output.append('')
+
+  wrapper = textwrap.TextWrapper(break_long_words=True,
+                                 drop_whitespace=False,
+                                 subsequent_indent=' ',
+                                 width=_MAX_MANIFEST_LINE_LEN - 2)
+  output = '\r\n'.join(w for l in output for w in wrapper.wrap(l))
+
+  with open(manifest_path, 'w') as f:
+    f.write(output)
+
+
+def main(argv):
+  colorama.init()
+
+  argv = build_utils.ExpandFileArgs(argv)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option(
+      '--src-gendirs',
+      help='Directories containing generated java files.')
+  parser.add_option(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_option(
+      '--classpath',
+      action='append',
+      help='Classpath for javac. If this is specified multiple times, they '
+      'will all be appended to construct the classpath.')
+  parser.add_option(
+      '--javac-includes',
+      help='A list of file patterns. If provided, only java files that match'
+      'one of the patterns will be compiled.')
+  parser.add_option(
+      '--jar-excluded-classes',
+      default='',
+      help='List of .class file patterns to exclude from the jar.')
+
+  parser.add_option(
+      '--chromium-code',
+      type='int',
+      help='Whether code being compiled should be built with stricter '
+      'warnings for chromium code.')
+
+  parser.add_option(
+      '--classes-dir',
+      help='Directory for compiled .class files.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option(
+      '--main-class',
+      help='The class containing the main method.')
+
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, args = parser.parse_args(argv)
+
+  if options.main_class and not options.jar_path:
+    parser.error('--main-class requires --jar-path')
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+
+  java_srcjars = []
+  for arg in options.java_srcjars:
+    java_srcjars += build_utils.ParseGypList(arg)
+
+  java_files = args
+  if options.src_gendirs:
+    src_gendirs = build_utils.ParseGypList(options.src_gendirs)
+    java_files += build_utils.FindInDirectories(src_gendirs, '*.java')
+
+  input_files = classpath + java_srcjars + java_files
+  with build_utils.TempDir() as temp_dir:
+    classes_dir = os.path.join(temp_dir, 'classes')
+    os.makedirs(classes_dir)
+    if java_srcjars:
+      java_dir = os.path.join(temp_dir, 'java')
+      os.makedirs(java_dir)
+      for srcjar in java_srcjars:
+        build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
+      java_files += build_utils.FindInDirectory(java_dir, '*.java')
+
+    if options.javac_includes:
+      javac_includes = build_utils.ParseGypList(options.javac_includes)
+      filtered_java_files = []
+      for f in java_files:
+        for include in javac_includes:
+          if fnmatch.fnmatch(f, include):
+            filtered_java_files.append(f)
+            break
+      java_files = filtered_java_files
+
+    DoJavac(
+        classpath,
+        classes_dir,
+        options.chromium_code,
+        java_files)
+
+    if options.jar_path:
+      if options.main_class:
+        manifest_file = os.path.join(temp_dir, 'manifest')
+        CreateManifest(manifest_file, classpath,
+                       options.main_class)
+      else:
+        manifest_file = None
+      jar.JarDirectory(classes_dir,
+                       build_utils.ParseGypList(options.jar_excluded_classes),
+                       options.jar_path,
+                       manifest_file=manifest_file)
+
+    if options.classes_dir:
+      # Delete the old classes directory. This ensures that all .class files in
+      # the output are actually from the input .java files. For example, if a
+      # .java file is deleted or an inner class is removed, the classes
+      # directory should not contain the corresponding old .class file after
+      # running this action.
+      build_utils.DeleteDirectory(options.classes_dir)
+      shutil.copytree(classes_dir, options.classes_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_files + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py
new file mode 100755
index 0000000..c8aef92
--- /dev/null
+++ b/build/android/gyp/lint.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs Android's lint tool."""
+
+
+import optparse
+import os
+import sys
+from xml.dom import minidom
+
+from util import build_utils
+
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                         '..', '..', '..'))
+
+
+def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
+             result_path, product_dir, sources, jar_path):
+
+  def _RelativizePath(path):
+    """Returns relative path to top-level src dir.
+
+    Args:
+      path: A path relative to cwd.
+    """
+    return os.path.relpath(os.path.abspath(path), _SRC_ROOT)
+
+  def _ProcessConfigFile():
+    if not build_utils.IsTimeStale(processed_config_path, [config_path]):
+      return
+
+    with open(config_path, 'rb') as f:
+      content = f.read().replace(
+          'PRODUCT_DIR', _RelativizePath(product_dir))
+
+    with open(processed_config_path, 'wb') as f:
+      f.write(content)
+
+  def _ProcessResultFile():
+    with open(result_path, 'rb') as f:
+      content = f.read().replace(
+          _RelativizePath(product_dir), 'PRODUCT_DIR')
+
+    with open(result_path, 'wb') as f:
+      f.write(content)
+
+  def _ParseAndShowResultFile():
+    dom = minidom.parse(result_path)
+    issues = dom.getElementsByTagName('issue')
+    print >> sys.stderr
+    for issue in issues:
+      issue_id = issue.attributes['id'].value
+      message = issue.attributes['message'].value
+      location_elem = issue.getElementsByTagName('location')[0]
+      path = location_elem.attributes['file'].value
+      line = location_elem.getAttribute('line')
+      if line:
+        error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
+      else:
+        # Issues in class files don't have a line number.
+        error = '%s %s: %s [warning]' % (path, message, issue_id)
+      print >> sys.stderr, error
+      for attr in ['errorLine1', 'errorLine2']:
+        error_line = issue.getAttribute(attr)
+        if error_line:
+          print >> sys.stderr, error_line
+    return len(issues)
+
+  with build_utils.TempDir() as temp_dir:
+    _ProcessConfigFile()
+
+    cmd = [
+        _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall',
+        '--config', _RelativizePath(processed_config_path),
+        '--classpath', _RelativizePath(jar_path),
+        '--xml', _RelativizePath(result_path),
+    ]
+
+    # There may be multiple source files with the same basename (but in
+    # different directories). It is difficult to determine what part of the path
+    # corresponds to the java package, and so instead just link the source files
+    # into temporary directories (creating a new one whenever there is a name
+    # conflict).
+    src_dirs = []
+    def NewSourceDir():
+      new_dir = os.path.join(temp_dir, str(len(src_dirs)))
+      os.mkdir(new_dir)
+      src_dirs.append(new_dir)
+      cmd.extend(['--sources', _RelativizePath(new_dir)])
+      return new_dir
+
+    def PathInDir(d, src):
+      return os.path.join(d, os.path.basename(src))
+
+    for src in sources:
+      src_dir = None
+      for d in src_dirs:
+        if not os.path.exists(PathInDir(d, src)):
+          src_dir = d
+          break
+      if not src_dir:
+        src_dir = NewSourceDir()
+      os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+    cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
+
+    if os.path.exists(result_path):
+      os.remove(result_path)
+
+    try:
+      build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
+    except build_utils.CalledProcessError as e:
+      # There is a problem with lint usage
+      if not os.path.exists(result_path):
+        print 'Something is wrong:'
+        print e
+        return 0
+
+      # There are actual lint issues
+      else:
+        try:
+          num_issues = _ParseAndShowResultFile()
+        except Exception:
+          print 'Lint created unparseable xml file...'
+          print 'File contents:'
+          with open(result_path) as f:
+            print f.read()
+          return 0
+
+        _ProcessResultFile()
+        msg = ('\nLint found %d new issues.\n'
+               ' - For full explanation refer to %s\n'
+               ' - Wanna suppress these issues?\n'
+               '    1. Read comment in %s\n'
+               '    2. Run "python %s %s"\n' %
+               (num_issues,
+                _RelativizePath(result_path),
+                _RelativizePath(config_path),
+                _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
+                                             'lint', 'suppress.py')),
+                _RelativizePath(result_path)))
+        print >> sys.stderr, msg
+        # Lint errors do not fail the build.
+        return 0
+
+  return 0
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--lint-path', help='Path to lint executable.')
+  parser.add_option('--config-path', help='Path to lint suppressions file.')
+  parser.add_option('--processed-config-path',
+                    help='Path to processed lint suppressions file.')
+  parser.add_option('--manifest-path', help='Path to AndroidManifest.xml')
+  parser.add_option('--result-path', help='Path to XML lint result file.')
+  parser.add_option('--product-dir', help='Path to product dir.')
+  parser.add_option('--src-dirs', help='Directories containing java files.')
+  parser.add_option('--java-files', help='Paths to java files.')
+  parser.add_option('--jar-path', help='Jar file containing class files.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--enable', action='store_true',
+                    help='Run lint instead of just touching stamp.')
+
+  options, _ = parser.parse_args()
+
+  build_utils.CheckOptions(
+      options, parser, required=['lint_path', 'config_path',
+                                 'processed_config_path', 'manifest_path',
+                                 'result_path', 'product_dir',
+                                 'jar_path'])
+
+  rc = 0
+
+  if options.enable:
+    sources = []
+    if options.src_dirs:
+      src_dirs = build_utils.ParseGypList(options.src_dirs)
+      sources = build_utils.FindInDirectories(src_dirs, '*.java')
+    elif options.java_files:
+      sources = build_utils.ParseGypList(options.java_files)
+    else:
+      print 'One of --src-dirs or --java-files must be specified.'
+      return 1
+    rc = _RunLint(options.lint_path, options.config_path,
+                  options.processed_config_path,
+                  options.manifest_path, options.result_path,
+                  options.product_dir, sources, options.jar_path)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp and not rc:
+    build_utils.Touch(options.stamp)
+
+  return rc
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/pack_arm_relocations.py b/build/android/gyp/pack_arm_relocations.py
new file mode 100755
index 0000000..d650927
--- /dev/null
+++ b/build/android/gyp/pack_arm_relocations.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pack ARM relative relocations in a library (or copy unchanged).
+
+If --enable-packing and --configuration-name=='Release', invoke the
+relocation_packer tool to pack the .rel.dyn section in the given library
+files.  This step is inserted after the libraries are stripped.  Packing
+adds a new .android.rel.dyn section to the file and reduces the size of
+.rel.dyn accordingly.
+
+Currently packing only understands ARM32 shared libraries.  For all other
+architectures --enable-packing should be set to zero.  In this case the
+script copies files verbatim, with no attempt to pack relative relocations.
+
+Any library listed in --exclude-packing-list is also copied verbatim,
+irrespective of any --enable-packing setting.  Typically this would be
+'libchromium_android_linker.so'.
+"""
+
+import optparse
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def PackArmLibraryRelocations(android_pack_relocations,
+                              android_objcopy,
+                              library_path,
+                              output_path):
+  if not build_utils.IsTimeStale(output_path, [library_path]):
+    return
+
+  # Copy and add a 'NULL' .android.rel.dyn section for the packing tool.
+  with tempfile.NamedTemporaryFile() as stream:
+    stream.write('NULL')
+    stream.flush()
+    objcopy_command = [android_objcopy,
+                       '--add-section', '.android.rel.dyn=%s' % stream.name,
+                       library_path, output_path]
+    build_utils.CheckOutput(objcopy_command)
+
+  # Pack R_ARM_RELATIVE relocations.
+  pack_command = [android_pack_relocations, output_path]
+  build_utils.CheckOutput(pack_command)
+
+
+def CopyArmLibraryUnchanged(library_path, output_path):
+  if not build_utils.IsTimeStale(output_path, [library_path]):
+    return
+
+  shutil.copy(library_path, output_path)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+
+  parser.add_option('--configuration-name',
+      default='Release',
+      help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--enable-packing',
+      choices=['0', '1'],
+      help=('Pack relocations if 1 and configuration name is \'Release\','
+            ' otherwise plain file copy'))
+  parser.add_option('--exclude-packing-list',
+      default='',
+      help='Names of any libraries explicitly not packed')
+  parser.add_option('--android-pack-relocations',
+      help='Path to the ARM relocations packer binary')
+  parser.add_option('--android-objcopy',
+      help='Path to the toolchain\'s objcopy binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--packed-libraries-dir',
+      help='Directory for packed libraries')
+  parser.add_option('--libraries',
+      help='List of libraries')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  enable_packing = (options.enable_packing == '1' and
+                    options.configuration_name == 'Release')
+  exclude_packing_set = set(shlex.split(options.exclude_packing_list))
+
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  build_utils.MakeDirectory(options.packed_libraries_dir)
+
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+    output_path = os.path.join(options.packed_libraries_dir, library)
+
+    if enable_packing and library not in exclude_packing_set:
+      PackArmLibraryRelocations(options.android_pack_relocations,
+                                options.android_objcopy,
+                                library_path,
+                                output_path)
+    else:
+      CopyArmLibraryUnchanged(library_path, output_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/package_resources.py b/build/android/gyp/package_resources.py
new file mode 100755
index 0000000..f34578f
--- /dev/null
+++ b/build/android/gyp/package_resources.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=C0301
+"""Package resources into an apk.
+
+See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java
+and
+https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml
+"""
+# pylint: enable=C0301
+
+import optparse
+import os
+import shutil
+
+from util import build_utils
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK build tools folder')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp\'s configuration name (Debug or Release).')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--version-code', help='Version code for apk.')
+  parser.add_option('--version-name', help='Version name for apk.')
+  parser.add_option('--resource-zips',
+                    help='zip files containing resources to be packaged')
+  parser.add_option('--asset-dir',
+                    help='directories containing assets to be packaged')
+  parser.add_option('--no-compress', help='disables compression for the '
+                    'given comma separated list of extensions')
+
+  parser.add_option('--apk-path',
+                    help='Path to output (partial) apk.')
+
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('android_sdk', 'android_sdk_tools', 'configuration_name',
+                      'android_manifest', 'version_code', 'version_name',
+                      'resource_zips', 'asset_dir', 'apk_path')
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def MoveImagesToNonMdpiFolders(res_root):
+  """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+  Why? http://crbug.com/289843
+  """
+  for src_dir_name in os.listdir(res_root):
+    src_components = src_dir_name.split('-')
+    if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+      continue
+    src_dir = os.path.join(res_root, src_dir_name)
+    if not os.path.isdir(src_dir):
+      continue
+    dst_components = [c for c in src_components if c != 'mdpi']
+    assert dst_components != src_components
+    dst_dir_name = '-'.join(dst_components)
+    dst_dir = os.path.join(res_root, dst_dir_name)
+    build_utils.MakeDirectory(dst_dir)
+    for src_file_name in os.listdir(src_dir):
+      if not src_file_name.endswith('.png'):
+        continue
+      src_file = os.path.join(src_dir, src_file_name)
+      dst_file = os.path.join(dst_dir, src_file_name)
+      assert not os.path.lexists(dst_file)
+      shutil.move(src_file, dst_file)
+
+
+def PackageArgsForExtractedZip(d):
+  """Returns the aapt args for an extracted resources zip.
+
+  A resources zip either contains the resources for a single target or for
+  multiple targets. If it is multiple targets merged into one, the actual
+  resource directories will be contained in the subdirectories 0, 1, 2, ...
+  """
+  res_dirs = []
+  subdirs = [os.path.join(d, s) for s in os.listdir(d)]
+  subdirs = sorted([s for s in subdirs if os.path.isdir(s)])
+  if subdirs and os.path.basename(subdirs[0]) == '0':
+    res_dirs = subdirs
+  else:
+    res_dirs = [d]
+  package_command = []
+  for d in res_dirs:
+    MoveImagesToNonMdpiFolders(d)
+    package_command += ['-S', d]
+  return package_command
+
+
+def main():
+  options = ParseArgs()
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = os.path.join(options.android_sdk_tools, 'aapt')
+
+  with build_utils.TempDir() as temp_dir:
+    package_command = [aapt,
+                       'package',
+                       '--version-code', options.version_code,
+                       '--version-name', options.version_name,
+                       '-M', options.android_manifest,
+                       '--no-crunch',
+                       '-f',
+                       '--auto-add-overlay',
+
+                       '-I', android_jar,
+                       '-F', options.apk_path,
+                       ]
+
+    if options.no_compress:
+      for ext in options.no_compress.split(','):
+        package_command += ['-0', ext]
+
+    if os.path.exists(options.asset_dir):
+      package_command += ['-A', options.asset_dir]
+
+    dep_zips = build_utils.ParseGypList(options.resource_zips)
+    for z in dep_zips:
+      subdir = os.path.join(temp_dir, os.path.basename(z))
+      if os.path.exists(subdir):
+        raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+      build_utils.ExtractAll(z, path=subdir)
+      package_command += PackageArgsForExtractedZip(subdir)
+
+    if 'Debug' in options.configuration_name:
+      package_command += ['--debug-mode']
+
+    build_utils.CheckOutput(
+        package_command, print_stdout=False, print_stderr=False)
+
+    if options.depfile:
+      build_utils.WriteDepfile(
+          options.depfile,
+          build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/process_resources.py b/build/android/gyp/process_resources.py
new file mode 100755
index 0000000..6f35388
--- /dev/null
+++ b/build/android/gyp/process_resources.py
@@ -0,0 +1,286 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resources to generate R.java, and prepare for packaging.
+
+This will crunch images and generate v14 compatible resources
+(see generate_v14_compatible_resources.py).
+"""
+
+import optparse
+import os
+import re
+import shutil
+import sys
+import zipfile
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+
+def ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK build tools folder')
+  parser.add_option('--non-constant-id', action='store_true')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--custom-package', help='Java package for R.java')
+
+  parser.add_option('--resource-dirs',
+                    help='Directories containing resources of this target.')
+  parser.add_option('--dependencies-res-zips',
+                    help='Resources from dependents.')
+
+  parser.add_option('--resource-zip-out',
+                    help='Path for output zipped resources.')
+
+  parser.add_option('--R-dir',
+                    help='directory to hold generated R.java.')
+  parser.add_option('--srcjar-out',
+                    help='Path to srcjar to contain generated R.java.')
+
+  parser.add_option('--proguard-file',
+                    help='Path to proguard.txt generated file')
+
+  parser.add_option(
+      '--v14-verify-only',
+      action='store_true',
+      help='Do not generate v14 resources. Instead, just verify that the '
+      'resources are already compatible with v14, i.e. they don\'t use '
+      'attributes that cause crashes on certain devices.')
+
+  parser.add_option(
+      '--extra-res-packages',
+      help='Additional package names to generate R.java files for')
+  # TODO(cjhopman): Actually use --extra-r-text-files. We currently include all
+  # the resources in all R.java files for a particular apk.
+  parser.add_option(
+      '--extra-r-text-files',
+      help='For each additional package, the R.txt file should contain a '
+      'list of resources to be included in the R.java file in the format '
+      'generated by aapt')
+
+  parser.add_option(
+      '--all-resources-zip-out',
+      help='Path for output of all resources. This includes resources in '
+      'dependencies.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  (options, args) = parser.parse_args(args)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'android_sdk_tools',
+      'android_manifest',
+      'dependencies_res_zips',
+      'resource_dirs',
+      'resource_zip_out',
+      )
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.R_dir is None) == (options.srcjar_out is None):
+    raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.')
+
+  return options
+
+
+def CreateExtraRJavaFiles(r_dir, extra_packages):
+  java_files = build_utils.FindInDirectory(r_dir, "R.java")
+  if len(java_files) != 1:
+    return
+  r_java_file = java_files[0]
+  r_java_contents = open(r_java_file).read()
+
+  for package in extra_packages:
+    package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+    build_utils.MakeDirectory(package_r_java_dir)
+    package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+    open(package_r_java_path, 'w').write(
+        re.sub(r'package [.\w]*;', 'package %s;' % package, r_java_contents))
+    # TODO(cjhopman): These extra package's R.java files should be filtered to
+    # only contain the resources listed in their R.txt files. At this point, we
+    # have already compiled those other libraries, so doing this would only
+    # affect how the code in this .apk target could refer to the resources.
+
+
+def FilterCrunchStderr(stderr):
+  """Filters out lines from aapt crunch's stderr that can safely be ignored."""
+  filtered_lines = []
+  for line in stderr.splitlines(True):
+    # Ignore this libpng warning, which is a known non-error condition.
+    # http://crbug.com/364355
+    if ('libpng warning: iCCP: Not recognizing known sRGB profile that has '
+        + 'been edited' in line):
+      continue
+    filtered_lines.append(line)
+  return ''.join(filtered_lines)
+
+
+def DidCrunchFail(returncode, stderr):
+  """Determines whether aapt crunch failed from its return code and output.
+
+  Because aapt's return code cannot be trusted, any output to stderr is
+  an indication that aapt has failed (http://crbug.com/314885).
+  """
+  return returncode != 0 or stderr
+
+
+def ZipResources(resource_dirs, zip_path):
+  # Python zipfile does not provide a way to replace a file (it just writes
+  # another file with the same name). So, first collect all the files to put
+  # in the zip (with proper overriding), and then zip them.
+  files_to_zip = dict()
+  for d in resource_dirs:
+    for root, _, files in os.walk(d):
+      for f in files:
+        archive_path = os.path.join(os.path.relpath(root, d), f)
+        path = os.path.join(root, f)
+        files_to_zip[archive_path] = path
+  with zipfile.ZipFile(zip_path, 'w') as outzip:
+    for archive_path, path in files_to_zip.iteritems():
+      outzip.write(path, archive_path)
+
+
+def CombineZips(zip_files, output_path):
+  # When packaging resources, if the top-level directories in the zip file are
+  # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a
+  # resources directory. While some resources just clobber others (image files,
+  # etc), other resources (particularly .xml files) need to be more
+  # intelligently merged. That merging is left up to aapt.
+  with zipfile.ZipFile(output_path, 'w') as outzip:
+    for i, z in enumerate(zip_files):
+      with zipfile.ZipFile(z, 'r') as inzip:
+        for name in inzip.namelist():
+          new_name = '%d/%s' % (i, name)
+          outzip.writestr(new_name, inzip.read(name))
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+
+  options = ParseArgs(args)
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = os.path.join(options.android_sdk_tools, 'aapt')
+
+  input_files = []
+
+  with build_utils.TempDir() as temp_dir:
+    deps_dir = os.path.join(temp_dir, 'deps')
+    build_utils.MakeDirectory(deps_dir)
+    v14_dir = os.path.join(temp_dir, 'v14')
+    build_utils.MakeDirectory(v14_dir)
+
+    gen_dir = os.path.join(temp_dir, 'gen')
+    build_utils.MakeDirectory(gen_dir)
+
+    input_resource_dirs = build_utils.ParseGypList(options.resource_dirs)
+
+    for resource_dir in input_resource_dirs:
+      generate_v14_compatible_resources.GenerateV14Resources(
+          resource_dir,
+          v14_dir,
+          options.v14_verify_only)
+
+    dep_zips = build_utils.ParseGypList(options.dependencies_res_zips)
+    input_files += dep_zips
+    dep_subdirs = []
+    for z in dep_zips:
+      subdir = os.path.join(deps_dir, os.path.basename(z))
+      if os.path.exists(subdir):
+        raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+      build_utils.ExtractAll(z, path=subdir)
+      dep_subdirs.append(subdir)
+
+    # Generate R.java. This R.java contains non-final constants and is used only
+    # while compiling the library jar (e.g. chromium_content.jar). When building
+    # an apk, a new R.java file with the correct resource -> ID mappings will be
+    # generated by merging the resources from all libraries and the main apk
+    # project.
+    package_command = [aapt,
+                       'package',
+                       '-m',
+                       '-M', options.android_manifest,
+                       '--auto-add-overlay',
+                       '-I', android_jar,
+                       '--output-text-symbols', gen_dir,
+                       '-J', gen_dir]
+
+    for d in input_resource_dirs:
+      package_command += ['-S', d]
+
+    for d in dep_subdirs:
+      package_command += ['-S', d]
+
+    if options.non_constant_id:
+      package_command.append('--non-constant-id')
+    if options.custom_package:
+      package_command += ['--custom-package', options.custom_package]
+    if options.proguard_file:
+      package_command += ['-G', options.proguard_file]
+    build_utils.CheckOutput(package_command, print_stderr=False)
+
+    if options.extra_res_packages:
+      CreateExtraRJavaFiles(
+          gen_dir,
+          build_utils.ParseGypList(options.extra_res_packages))
+
+    # This is the list of directories with resources to put in the final .zip
+    # file. The order of these is important so that crunched/v14 resources
+    # override the normal ones.
+    zip_resource_dirs = input_resource_dirs + [v14_dir]
+
+    base_crunch_dir = os.path.join(temp_dir, 'crunch')
+
+    # Crunch image resources. This shrinks png files and is necessary for
+    # 9-patch images to display correctly. 'aapt crunch' accepts only a single
+    # directory at a time and deletes everything in the output directory.
+    for idx, d in enumerate(input_resource_dirs):
+      crunch_dir = os.path.join(base_crunch_dir, str(idx))
+      build_utils.MakeDirectory(crunch_dir)
+      zip_resource_dirs.append(crunch_dir)
+      aapt_cmd = [aapt,
+                  'crunch',
+                  '-C', crunch_dir,
+                  '-S', d]
+      build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr,
+                              fail_func=DidCrunchFail)
+
+    ZipResources(zip_resource_dirs, options.resource_zip_out)
+
+    if options.all_resources_zip_out:
+      CombineZips([options.resource_zip_out] + dep_zips,
+                  options.all_resources_zip_out)
+
+    if options.R_dir:
+      build_utils.DeleteDirectory(options.R_dir)
+      shutil.copytree(gen_dir, options.R_dir)
+    else:
+      build_utils.ZipDir(options.srcjar_out, gen_dir)
+
+  if options.depfile:
+    input_files += build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, input_files)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py
new file mode 100755
index 0000000..ca58770
--- /dev/null
+++ b/build/android/gyp/proguard.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoProguard(options):
+  injars = options.input_path
+  outjars = options.output_path
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+  classpath = list(set(classpath))
+  libraryjars = ':'.join(classpath)
+  # proguard does its own dependency checking, which can be avoided by deleting
+  # the output.
+  if os.path.exists(options.output_path):
+    os.remove(options.output_path)
+  proguard_cmd = ['java', '-jar',
+                  options.proguard_path,
+                  '-injars', injars,
+                  '-outjars', outjars,
+                  '-libraryjars', libraryjars,
+                  '@' + options.proguard_config]
+  build_utils.CheckOutput(proguard_cmd, print_stdout=True)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--proguard-path',
+                    help='Path to the proguard executable.')
+  parser.add_option('--input-path',
+                    help='Path to the .jar file proguard should run on.')
+  parser.add_option('--output-path', help='Path to the generated .jar file.')
+  parser.add_option('--proguard-config',
+                    help='Path to the proguard configuration file.')
+  parser.add_option('--classpath', action='append',
+                    help="Classpath for proguard.")
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args(args)
+
+  DoProguard(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py
new file mode 100755
index 0000000..63421e9
--- /dev/null
+++ b/build/android/gyp/push_libraries.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pushes native libraries to a device.
+
+"""
+
+import optparse
+import os
+import sys
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+def DoPush(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  serial_number = device.GetSerialNumber()
+  # A list so that it is modifiable in Push below.
+  needs_directory = [True]
+  for lib in libraries:
+    device_path = os.path.join(options.device_dir, lib)
+    host_path = os.path.join(options.libraries_dir, lib)
+
+    def Push():
+      if needs_directory:
+        device.RunShellCommand('mkdir -p ' + options.device_dir)
+        needs_directory[:] = [] # = False
+      device.PushChangedFiles(host_path, device_path)
+
+    record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number)
+    md5_check.CallAndRecordIfStale(
+        Push,
+        record_path=record_path,
+        input_paths=[host_path],
+        input_strings=[device_path])
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-dir',
+      help='Directory that contains stripped libraries.')
+  parser.add_option('--device-dir',
+      help='Device directory to push the libraries to.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['libraries', 'device_dir', 'libraries']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  DoPush(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/strip_library_for_device.py b/build/android/gyp/strip_library_for_device.py
new file mode 100755
index 0000000..9e2daae
--- /dev/null
+++ b/build/android/gyp/strip_library_for_device.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def StripLibrary(android_strip, android_strip_args, library_path, output_path):
+  if build_utils.IsTimeStale(output_path, [library_path]):
+    strip_cmd = ([android_strip] +
+                 android_strip_args +
+                 ['-o', output_path, library_path])
+    build_utils.CheckOutput(strip_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-strip',
+      help='Path to the toolchain\'s strip binary')
+  parser.add_option('--android-strip-arg', action='append',
+      help='Argument to be passed to strip')
+  parser.add_option('--libraries-dir',
+      help='Directory for un-stripped libraries')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--libraries',
+      help='List of libraries to strip')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  build_utils.MakeDirectory(options.stripped_libraries_dir)
+
+  for library in libraries:
+    for base_path in options.libraries_dir.split(','):
+      library_path = os.path.join(base_path, library)
+      if (os.path.exists(library_path)):
+        break
+    stripped_library_path = os.path.join(
+        options.stripped_libraries_dir, library)
+    StripLibrary(options.android_strip, options.android_strip_arg, library_path,
+        stripped_library_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/touch.py b/build/android/gyp/touch.py
new file mode 100755
index 0000000..7b4375e
--- /dev/null
+++ b/build/android/gyp/touch.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from util import build_utils
+
+def main(argv):
+  for f in argv[1:]:
+    build_utils.Touch(f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/gyp/util/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/gyp/util/build_device.py b/build/android/gyp/util/build_device.py
new file mode 100644
index 0000000..b153a15
--- /dev/null
+++ b/build/android/gyp/util/build_device.py
@@ -0,0 +1,105 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" A simple device interface for build steps.
+
+"""
+
+import logging
+import os
+import re
+import sys
+
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..', '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import android_commands
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+GetAttachedDevices = android_commands.GetAttachedDevices
+
+
+class BuildDevice(object):
+  def __init__(self, configuration):
+    self.id = configuration['id']
+    self.description = configuration['description']
+    self.install_metadata = configuration['install_metadata']
+    self.device = device_utils.DeviceUtils(self.id)
+
+  def RunShellCommand(self, *args, **kwargs):
+    return self.device.RunShellCommand(*args, **kwargs)
+
+  def PushChangedFiles(self, *args, **kwargs):
+    return self.device.PushChangedFiles(*args, **kwargs)
+
+  def GetSerialNumber(self):
+    return self.id
+
+  def Install(self, *args, **kwargs):
+    return self.device.old_interface.Install(*args, **kwargs)
+
+  def GetInstallMetadata(self, apk_package):
+    """Gets the metadata on the device for the apk_package apk."""
+    # Matches lines like:
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.shell.apk
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.shell-1.apk
+    apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?.apk$' % apk_package, s)
+    matches = filter(apk_matcher, self.install_metadata)
+    return matches[0] if matches else None
+
+
+def GetConfigurationForDevice(device_id):
+  device = device_utils.DeviceUtils(device_id)
+  configuration = None
+  has_root = False
+  is_online = device.IsOnline()
+  if is_online:
+    cmd = 'ls -l /data/app; getprop ro.build.description'
+    cmd_output = device.RunShellCommand(cmd)
+    has_root = not 'Permission denied' in cmd_output[0]
+    if not has_root:
+      # Disable warning log messages from EnableRoot()
+      logging.getLogger().disabled = True
+      try:
+        device.EnableRoot()
+        has_root = True
+      except device_errors.CommandFailedError:
+        has_root = False
+      finally:
+        logging.getLogger().disabled = False
+      cmd_output = device.RunShellCommand(cmd)
+
+    configuration = {
+        'id': device_id,
+        'description': cmd_output[-1],
+        'install_metadata': cmd_output[:-1],
+      }
+  return configuration, is_online, has_root
+
+
+def WriteConfigurations(configurations, path):
+  # Currently we only support installing to the first device.
+  build_utils.WriteJson(configurations[:1], path, only_if_changed=True)
+
+
+def ReadConfigurations(path):
+  return build_utils.ReadJson(path)
+
+
+def GetBuildDevice(configurations):
+  assert len(configurations) == 1
+  return BuildDevice(configurations[0])
+
+
+def GetBuildDeviceFromPath(path):
+  configurations = ReadConfigurations(path)
+  if len(configurations) > 0:
+    return GetBuildDevice(ReadConfigurations(path))
+  return None
+
diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000..e3a3525
--- /dev/null
+++ b/build/android/gyp/util/build_utils.py
@@ -0,0 +1,371 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ast
+import contextlib
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+
+CHROMIUM_SRC = os.path.normpath(
+    os.path.join(os.path.dirname(__file__),
+                 os.pardir, os.pardir, os.pardir, os.pardir))
+COLORAMA_ROOT = os.path.join(CHROMIUM_SRC,
+                             'third_party', 'colorama', 'src')
+
+
+@contextlib.contextmanager
+def TempDir():
+  dirname = tempfile.mkdtemp()
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+  try:
+    os.makedirs(dir_path)
+  except OSError:
+    pass
+
+
+def DeleteDirectory(dir_path):
+  if os.path.exists(dir_path):
+    shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+  if fail_if_missing and not os.path.exists(path):
+    raise Exception(path + ' doesn\'t exist.')
+
+  MakeDirectory(os.path.dirname(path))
+  with open(path, 'a'):
+    os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter):
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    matched_files = fnmatch.filter(filenames, filename_filter)
+    files.extend((os.path.join(root, f) for f in matched_files))
+  return files
+
+
+def FindInDirectories(directories, filename_filter):
+  all_files = []
+  for directory in directories:
+    all_files.extend(FindInDirectory(directory, filename_filter))
+  return all_files
+
+
+def ParseGnList(gn_string):
+  return ast.literal_eval(gn_string)
+
+
+def ParseGypList(gyp_string):
+  # The ninja generator doesn't support $ in strings, so use ## to
+  # represent $.
+  # TODO(cjhopman): Remove when
+  # https://code.google.com/p/gyp/issues/detail?id=327
+  # is addressed.
+  gyp_string = gyp_string.replace('##', '$')
+
+  if gyp_string.startswith('['):
+    return ParseGnList(gyp_string)
+  return shlex.split(gyp_string)
+
+
+def CheckOptions(options, parser, required=None):
+  if not required:
+    return
+  for option_name in required:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+  old_dump = None
+  if os.path.exists(path):
+    with open(path, 'r') as oldfile:
+      old_dump = oldfile.read()
+
+  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+  if not only_if_changed or old_dump != new_dump:
+    with open(path, 'w') as outfile:
+      outfile.write(new_dump)
+
+
+def ReadJson(path):
+  with open(path, 'r') as jsonfile:
+    return json.load(jsonfile)
+
+
+class CalledProcessError(Exception):
+  """This exception is raised when the process run by CheckOutput
+  exits with a non-zero exit code."""
+
+  def __init__(self, cwd, args, output):
+    super(CalledProcessError, self).__init__()
+    self.cwd = cwd
+    self.args = args
+    self.output = output
+
+  def __str__(self):
+    # A user should be able to simply copy and paste the command that failed
+    # into their shell.
+    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+        ' '.join(map(pipes.quote, self.args)))
+    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args, cwd=None,
+                print_stdout=False, print_stderr=True,
+                stdout_filter=None,
+                stderr_filter=None,
+                fail_func=lambda returncode, stderr: returncode != 0):
+  if not cwd:
+    cwd = os.getcwd()
+
+  child = subprocess.Popen(args,
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
+  stdout, stderr = child.communicate()
+
+  if stdout_filter is not None:
+    stdout = stdout_filter(stdout)
+
+  if stderr_filter is not None:
+    stderr = stderr_filter(stderr)
+
+  if fail_func(child.returncode, stderr):
+    raise CalledProcessError(cwd, args, stdout + stderr)
+
+  if print_stdout:
+    sys.stdout.write(stdout)
+  if print_stderr:
+    sys.stderr.write(stderr)
+
+  return stdout
+
+
+def GetModifiedTime(path):
+  # For a symlink, the modified time should be the greater of the link's
+  # modified time and the modified time of the target.
+  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+  if not os.path.exists(output):
+    return True
+
+  output_time = GetModifiedTime(output)
+  for i in inputs:
+    if GetModifiedTime(i) > output_time:
+      return True
+  return False
+
+
+def IsDeviceReady():
+  device_state = CheckOutput(['adb', 'get-state'])
+  return device_state.strip() == 'device'
+
+
+def CheckZipPath(name):
+  if os.path.normpath(name) != name:
+    raise Exception('Non-canonical zip path: %s' % name)
+  if os.path.isabs(name):
+    raise Exception('Absolute zip path: %s' % name)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None):
+  if path is None:
+    path = os.getcwd()
+  elif not os.path.exists(path):
+    MakeDirectory(path)
+
+  with zipfile.ZipFile(zip_path) as z:
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if pattern is not None:
+        if not fnmatch.fnmatch(name, pattern):
+          continue
+      CheckZipPath(name)
+      if no_clobber:
+        output_path = os.path.join(path, name)
+        if os.path.exists(output_path):
+          raise Exception(
+              'Path already exists from zip: %s %s %s'
+              % (zip_path, name, output_path))
+
+    z.extractall(path=path)
+
+
+def DoZip(inputs, output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for f in inputs:
+      CheckZipPath(os.path.relpath(f, base_dir))
+      outfile.write(f, os.path.relpath(f, base_dir))
+
+
+def ZipDir(output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for root, _, files in os.walk(base_dir):
+      for f in files:
+        path = os.path.join(root, f)
+        archive_path = os.path.relpath(path, base_dir)
+        CheckZipPath(archive_path)
+        outfile.write(path, archive_path)
+
+
+def MergeZips(output, inputs, exclude_patterns=None):
+  def Allow(name):
+    if exclude_patterns is not None:
+      for p in exclude_patterns:
+        if fnmatch.fnmatch(name, p):
+          return False
+    return True
+
+  with zipfile.ZipFile(output, 'w') as out_zip:
+    for in_file in inputs:
+      with zipfile.ZipFile(in_file, 'r') as in_zip:
+        for name in in_zip.namelist():
+          if Allow(name):
+            out_zip.writestr(name, in_zip.read(name))
+
+
+def PrintWarning(message):
+  print 'WARNING: ' + message
+
+
+def PrintBigWarning(message):
+  print '*****     ' * 8
+  PrintWarning(message)
+  print '*****     ' * 8
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+  """Gets the list of all transitive dependencies in sorted order.
+
+  There should be no cycles in the dependency graph.
+
+  Args:
+    top: a list of the top level nodes
+    deps_func: A function that takes a node and returns its direct dependencies.
+  Returns:
+    A list of all transitive dependencies of nodes in top, in order (a node will
+    appear in the list at a higher index than all of its dependencies).
+  """
+  def Node(dep):
+    return (dep, deps_func(dep))
+
+  # First: find all deps
+  unchecked_deps = list(top)
+  all_deps = set(top)
+  while unchecked_deps:
+    dep = unchecked_deps.pop()
+    new_deps = deps_func(dep).difference(all_deps)
+    unchecked_deps.extend(new_deps)
+    all_deps = all_deps.union(new_deps)
+
+  # Then: simple, slow topological sort.
+  sorted_deps = []
+  unsorted_deps = dict(map(Node, all_deps))
+  while unsorted_deps:
+    for library, dependencies in unsorted_deps.items():
+      if not dependencies.intersection(unsorted_deps.keys()):
+        sorted_deps.append(library)
+        del unsorted_deps[library]
+
+  return sorted_deps
+
+
+def GetPythonDependencies():
+  """Gets the paths of imported non-system python modules.
+
+  A path is assumed to be a "system" import if it is outside of chromium's
+  src/. The paths will be relative to the current directory.
+  """
+  module_paths = (m.__file__ for m in sys.modules.itervalues()
+                  if m is not None and hasattr(m, '__file__'))
+
+  abs_module_paths = map(os.path.abspath, module_paths)
+
+  non_system_module_paths = [
+      p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)]
+  def ConvertPycToPy(s):
+    if s.endswith('.pyc'):
+      return s[:-1]
+    return s
+
+  non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
+  non_system_module_paths = map(os.path.relpath, non_system_module_paths)
+  return sorted(set(non_system_module_paths))
+
+
+def AddDepfileOption(parser):
+  parser.add_option('--depfile',
+                    help='Path to depfile. This must be specified as the '
+                    'action\'s first output.')
+
+
+def WriteDepfile(path, dependencies):
+  with open(path, 'w') as depfile:
+    depfile.write(path)
+    depfile.write(': ')
+    depfile.write(' '.join(dependencies))
+    depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+  """Replaces file-arg placeholders in args.
+
+  These placeholders have the form:
+    @FileArg(filename:key1:key2:...:keyn)
+
+  The value of such a placeholder is calculated by reading 'filename' as json.
+  And then extracting the value at [key1][key2]...[keyn].
+
+  Note: This intentionally does not return the list of files that appear in such
+  placeholders. An action that uses file-args *must* know the paths of those
+  files prior to the parsing of the arguments (typically by explicitly listing
+  them in the action's inputs in build files).
+  """
+  new_args = list(args)
+  file_jsons = dict()
+  r = re.compile('@FileArg\((.*?)\)')
+  for i, arg in enumerate(args):
+    match = r.search(arg)
+    if not match:
+      continue
+
+    if match.end() != len(arg):
+      raise Exception('Unexpected characters after FileArg: ' + arg)
+
+    lookup_path = match.group(1).split(':')
+    file_path = lookup_path[0]
+    if not file_path in file_jsons:
+      file_jsons[file_path] = ReadJson(file_path)
+
+    expansion = file_jsons[file_path]
+    for k in lookup_path[1:]:
+      expansion = expansion[k]
+
+    new_args[i] = arg[:match.start()] + str(expansion)
+
+  return new_args
+
diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000..9f365aa
--- /dev/null
+++ b/build/android/gyp/util/md5_check.py
@@ -0,0 +1,86 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+
+def CallAndRecordIfStale(
+    function, record_path=None, input_paths=None, input_strings=None,
+    force=False):
+  """Calls function if the md5sum of the input paths/strings has changed.
+
+  The md5sum of the inputs is compared with the one stored in record_path. If
+  this has changed (or the record doesn't exist), function will be called and
+  the new md5sum will be recorded.
+
+  If force is True, the function will be called regardless of whether the
+  md5sum is out of date.
+  """
+  if not input_paths:
+    input_paths = []
+  if not input_strings:
+    input_strings = []
+  md5_checker = _Md5Checker(
+      record_path=record_path,
+      input_paths=input_paths,
+      input_strings=input_strings)
+  if force or md5_checker.IsStale():
+    function()
+    md5_checker.Write()
+
+
+def _UpdateMd5ForFile(md5, path, block_size=2**16):
+  with open(path, 'rb') as infile:
+    while True:
+      data = infile.read(block_size)
+      if not data:
+        break
+      md5.update(data)
+
+
+def _UpdateMd5ForDirectory(md5, dir_path):
+  for root, _, files in os.walk(dir_path):
+    for f in files:
+      _UpdateMd5ForFile(md5, os.path.join(root, f))
+
+
+def _UpdateMd5ForPath(md5, path):
+  if os.path.isdir(path):
+    _UpdateMd5ForDirectory(md5, path)
+  else:
+    _UpdateMd5ForFile(md5, path)
+
+
+class _Md5Checker(object):
+  def __init__(self, record_path=None, input_paths=None, input_strings=None):
+    if not input_paths:
+      input_paths = []
+    if not input_strings:
+      input_strings = []
+
+    assert record_path.endswith('.stamp'), (
+        'record paths must end in \'.stamp\' so that they are easy to find '
+        'and delete')
+
+    self.record_path = record_path
+
+    md5 = hashlib.md5()
+    for i in sorted(input_paths):
+      _UpdateMd5ForPath(md5, i)
+    for s in input_strings:
+      md5.update(s)
+    self.new_digest = md5.hexdigest()
+
+    self.old_digest = ''
+    if os.path.exists(self.record_path):
+      with open(self.record_path, 'r') as old_record:
+        self.old_digest = old_record.read()
+
+  def IsStale(self):
+    return self.old_digest != self.new_digest
+
+  def Write(self):
+    with open(self.record_path, 'w') as new_record:
+      new_record.write(self.new_digest)
diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py
new file mode 100644
index 0000000..4f89fc2
--- /dev/null
+++ b/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,72 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import tempfile
+import unittest
+
+import md5_check # pylint: disable=W0403
+
+
+class TestMd5Check(unittest.TestCase):
+  def setUp(self):
+    self.called = False
+
+  def testCallAndRecordIfStale(self):
+    input_strings = ['string1', 'string2']
+    input_file1 = tempfile.NamedTemporaryFile()
+    input_file2 = tempfile.NamedTemporaryFile()
+    file1_contents = 'input file 1'
+    file2_contents = 'input file 2'
+    input_file1.write(file1_contents)
+    input_file1.flush()
+    input_file2.write(file2_contents)
+    input_file2.flush()
+    input_files = [input_file1.name, input_file2.name]
+
+    record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+    def CheckCallAndRecord(should_call, message, force=False):
+      self.called = False
+      def MarkCalled():
+        self.called = True
+      md5_check.CallAndRecordIfStale(
+          MarkCalled,
+          record_path=record_path.name,
+          input_paths=input_files,
+          input_strings=input_strings,
+          force=force)
+      self.failUnlessEqual(should_call, self.called, message)
+
+    CheckCallAndRecord(True, 'should call when record doesn\'t exist')
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+    CheckCallAndRecord(True, force=True, message='should call when forced')
+
+    input_file1.write('some more input')
+    input_file1.flush()
+    CheckCallAndRecord(True, 'changed input file should trigger call')
+
+    input_files = input_files[::-1]
+    CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+    input_files = input_files[:1]
+    CheckCallAndRecord(True, 'removing file should trigger call')
+
+    input_files.append(input_file2.name)
+    CheckCallAndRecord(True, 'added input file should trigger call')
+
+    input_strings[0] = input_strings[0] + ' a bit longer'
+    CheckCallAndRecord(True, 'changed input string should trigger call')
+
+    input_strings = input_strings[::-1]
+    CheckCallAndRecord(True, 'reordering of string inputs should trigger call')
+
+    input_strings = input_strings[:1]
+    CheckCallAndRecord(True, 'removing a string should trigger call')
+
+    input_strings.append('a brand new string')
+    CheckCallAndRecord(True, 'added input string should trigger call')
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000..ab70a79
--- /dev/null
+++ b/build/android/gyp/write_build_config.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+There are several different types of build_configs:
+  android_library: An android library containing java code.
+  android_resources: A target containing android resources.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+  --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+  1. inputs/deps of the action ensure that the files are available the first
+  time the action runs.
+  2. Either (a) or (b)
+    a. inputs/deps ensure that the action runs whenever one of the files changes
+    b. the files are added to the action's depfile
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+import write_ordered_libraries
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+  if not path in dep_config_cache:
+    dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
+  return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+  return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths):
+  def Deps(path):
+    return set(GetDepConfig(path)['deps_configs'])
+  return build_utils.GetSortedTransitiveDependencies(deps_config_paths, Deps)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--build-config', help='Path to build_config output.')
+  parser.add_option(
+      '--type',
+      help='Type of this target (e.g. android_library).')
+  parser.add_option(
+      '--possible-deps-configs',
+      help='List of paths for dependency\'s build_config files. Some '
+      'dependencies may not write build_config files. Missing build_config '
+      'files are handled differently based on the type of this target.')
+
+  # android_resources options
+  parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+  parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+  parser.add_option('--package-name',
+      help='Java package name for these resources.')
+  parser.add_option('--android-manifest', help='Path to android manifest.')
+
+  # android_library/apk options
+  parser.add_option('--jar-path', help='Path to target\'s jar output.')
+  parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+  # apk native library options
+  parser.add_option('--native-libs', help='List of top-level native libs.')
+  parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
+
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+
+  if not options.type in [
+      'android_library', 'android_resources', 'android_apk']:
+    raise Exception('Unknown type: <%s>' % options.type)
+
+
+  required_options = ['build_config'] + {
+      'android_library': ['jar_path', 'dex_path'],
+      'android_resources': ['resources_zip'],
+      'android_apk': ['jar_path', 'dex_path', 'resources_zip']
+    }[options.type]
+
+  if options.native_libs:
+    required_options += ['readelf_path']
+
+  build_utils.CheckOptions(options, parser, required_options)
+
+  possible_deps_config_paths = build_utils.ParseGypList(
+      options.possible_deps_configs)
+
+
+  allow_unknown_deps = options.type == 'android_apk'
+  unknown_deps = [
+      c for c in possible_deps_config_paths if not os.path.exists(c)]
+  if unknown_deps and not allow_unknown_deps:
+    raise Exception('Unknown deps: ' + str(unknown_deps))
+
+  direct_deps_config_paths = [
+      c for c in possible_deps_config_paths if not c in unknown_deps]
+  all_deps_config_paths = GetAllDepsConfigsInOrder(direct_deps_config_paths)
+
+  direct_deps_configs = [GetDepConfig(p) for p in direct_deps_config_paths]
+  all_deps_configs = [GetDepConfig(p) for p in all_deps_config_paths]
+
+  direct_library_deps = DepsOfType('android_library', direct_deps_configs)
+  all_library_deps = DepsOfType('android_library', all_deps_configs)
+
+  direct_resources_deps = DepsOfType('android_resources', direct_deps_configs)
+  all_resources_deps = DepsOfType('android_resources', all_deps_configs)
+
+  # Initialize some common config.
+  config = {
+    'deps_info': {
+      'path': options.build_config,
+      'type': options.type,
+      'deps_configs': direct_deps_config_paths,
+    }
+  }
+  deps_info = config['deps_info']
+
+  if options.type in ['android_library', 'android_apk']:
+    javac_classpath = [c['jar_path'] for c in direct_library_deps]
+    deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
+    deps_info['jar_path'] = options.jar_path
+    deps_info['dex_path'] = options.dex_path
+    config['javac'] = {
+      'classpath': javac_classpath,
+    }
+
+  if options.type == 'android_library':
+    # Only resources might have srcjars (normal srcjar targets are listed in
+    # srcjar_deps). A resource's srcjar contains the R.java file for those
+    # resources, and (like Android's default build system) we allow a library to
+    # refer to the resources in any of its dependents.
+    config['javac']['srcjars'] = [
+        c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
+
+  if options.type == 'android_apk':
+    config['javac']['srcjars'] = []
+
+
+  if options.type == 'android_resources':
+    deps_info['resources_zip'] = options.resources_zip
+    if options.srcjar:
+      deps_info['srcjar'] = options.srcjar
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+
+  if options.type == 'android_resources' or options.type == 'android_apk':
+    config['resources'] = {}
+    config['resources']['dependency_zips'] = [
+        c['resources_zip'] for c in all_resources_deps]
+    config['resources']['extra_package_names'] = [
+        c['package_name'] for c in all_resources_deps if 'package_name' in c]
+
+
+  if options.type == 'android_apk':
+    config['apk_dex'] = {}
+    dex_config = config['apk_dex']
+    # TODO(cjhopman): proguard version
+    dex_deps_files = [c['dex_path'] for c in all_library_deps]
+    dex_config['dependency_dex_files'] = dex_deps_files
+
+    config['dist_jar'] = {
+      'dependency_jars': [
+        c['jar_path'] for c in all_library_deps
+      ]
+    }
+
+    library_paths = []
+    java_libraries_list = []
+    if options.native_libs:
+      libraries = build_utils.ParseGypList(options.native_libs)
+      if libraries:
+        libraries_dir = os.path.dirname(libraries[0])
+        write_ordered_libraries.SetReadelfPath(options.readelf_path)
+        write_ordered_libraries.SetLibraryDirs([libraries_dir])
+        all_native_library_deps = (
+            write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
+                libraries))
+        # Create a java literal array with the "base" library names:
+        # e.g. libfoo.so -> foo
+        java_libraries_list = '{%s}' % ','.join(
+            ['"%s"' % s[3:-3] for s in all_native_library_deps])
+        library_paths = map(
+            write_ordered_libraries.FullLibraryPath, all_native_library_deps)
+
+      config['native'] = {
+        'libraries': library_paths,
+        'java_libraries_list': java_libraries_list
+      }
+
+  build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        all_deps_config_paths + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py
new file mode 100755
index 0000000..b361bc3
--- /dev/null
+++ b/build/android/gyp/write_ordered_libraries.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes dependency ordered list of native libraries.
+
+The list excludes any Android system libraries, as those are not bundled with
+the APK.
+
+This list of libraries is used for several steps of building an APK.
+In the component build, the --input-libraries only needs to be the top-level
+library (i.e. libcontent_shell_content_view). This will then use readelf to
+inspect the shared libraries and determine the full list of (non-system)
+libraries that should be included in the APK.
+"""
+
+# TODO(cjhopman): See if we can expose the list of library dependencies from
+# gyp, rather than calculating it ourselves.
+# http://crbug.com/225558
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_utils
+
+_readelf = None
+_library_dirs = None
+
+_library_re = re.compile(
+    '.*NEEDED.*Shared library: \[(?P<library_name>[\w/.]+)\]')
+
+
+def SetReadelfPath(path):
+  global _readelf
+  _readelf = path
+
+
+def SetLibraryDirs(dirs):
+  global _library_dirs
+  _library_dirs = dirs
+
+
+def FullLibraryPath(library_name):
+  assert _library_dirs is not None
+  for directory in _library_dirs:
+    path = '%s/%s' % (directory, library_name)
+    if os.path.exists(path):
+      return path
+  return library_name
+
+
+def IsSystemLibrary(library_name):
+  # If the library doesn't exist in the libraries directory, assume that it is
+  # an Android system library.
+  return not os.path.exists(FullLibraryPath(library_name))
+
+
+def CallReadElf(library_or_executable):
+  assert _readelf is not None
+  readelf_cmd = [_readelf,
+                 '-d',
+                 FullLibraryPath(library_or_executable)]
+  return build_utils.CheckOutput(readelf_cmd)
+
+
+def GetDependencies(library_or_executable):
+  elf = CallReadElf(library_or_executable)
+  return set(_library_re.findall(elf))
+
+
+def GetNonSystemDependencies(library_name):
+  all_deps = GetDependencies(FullLibraryPath(library_name))
+  return set((lib for lib in all_deps if not IsSystemLibrary(lib)))
+
+
+def GetSortedTransitiveDependencies(libraries):
+  """Returns all transitive library dependencies in dependency order."""
+  return build_utils.GetSortedTransitiveDependencies(
+      libraries, GetNonSystemDependencies)
+
+
+def GetSortedTransitiveDependenciesForBinaries(binaries):
+  if binaries[0].endswith('.so'):
+    libraries = [os.path.basename(lib) for lib in binaries]
+  else:
+    assert len(binaries) == 1
+    all_deps = GetDependencies(binaries[0])
+    libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)]
+
+  return GetSortedTransitiveDependencies(libraries)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--input-libraries',
+      help='A list of top-level input libraries.')
+  parser.add_option('--libraries-dir',
+      help='The directory which contains shared libraries.')
+  parser.add_option('--readelf', help='Path to the readelf binary.')
+  parser.add_option('--output', help='Path to the generated .json file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  SetReadelfPath(options.readelf)
+  SetLibraryDirs(options.libraries_dir.split(','))
+
+  libraries = build_utils.ParseGypList(options.input_libraries)
+  if len(libraries):
+    libraries = GetSortedTransitiveDependenciesForBinaries(libraries)
+
+  # Convert to "base" library names: e.g. libfoo.so -> foo
+  java_libraries_list = (
+      '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+  build_utils.WriteJson(
+      {'libraries': libraries, 'java_libraries_list': java_libraries_list},
+      options.output,
+      only_if_changed=True)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  if options.depfile:
+    print libraries
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
+
diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py
new file mode 100755
index 0000000..51322df
--- /dev/null
+++ b/build/android/gyp/zip.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--input-dir', help='Directory of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  options, _ = parser.parse_args()
+
+  inputs = build_utils.FindInDirectory(options.input_dir, '*')
+  build_utils.DoZip(inputs, options.output, options.input_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/host_heartbeat.py b/build/android/host_heartbeat.py
new file mode 100755
index 0000000..429fca9
--- /dev/null
+++ b/build/android/host_heartbeat.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+  while True:
+    try:
+      devices = android_commands.GetAttachedDevices()
+      for device_serial in devices:
+        device_utils.DeviceUtils(device_serial).RunShellCommand(
+            'touch /sdcard/host_heartbeat')
+    except:
+      # Keep the heatbeat running bypassing all errors.
+      pass
+    time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/increase_size_for_speed.gypi b/build/android/increase_size_for_speed.gypi
new file mode 100644
index 0000000..f5f2d62
--- /dev/null
+++ b/build/android/increase_size_for_speed.gypi
@@ -0,0 +1,26 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to optimize a target for speed
+# rather than for size on Android.
+# This is used in some carefully tailored targets and is not meant
+# to be included everywhere. Before adding the template to another target,
+# please ask in chromium-dev@. See crbug.com/411909
+
+{
+  'configurations': {
+    'Release': {
+      'target_conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['OS=="android"', {
+              'cflags!': ['-Os'],
+              'cflags': ['-O2'],
+            }],
+          ],
+        }],
+      ],
+    },
+  },
+}
diff --git a/build/android/insert_chromium_version.gypi b/build/android/insert_chromium_version.gypi
new file mode 100644
index 0000000..158a227
--- /dev/null
+++ b/build/android/insert_chromium_version.gypi
@@ -0,0 +1,53 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# inserts a chromium version string into native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'insert_chromium_version',
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'libraries_source_dir': 'the directory contains native libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'version_string': 'chromium version string to be inserted'
+#      'includes': [ '../../build/android/insert_chromium_version.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Inserting chromium version string into native libraries',
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/insert_chromium_version.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/insert_chromium_version.py',
+    '--android-objcopy=<(android_objcopy)',
+    '--libraries-source-dir=<(libraries_source_dir)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--version-string=<(version_string)',
+    '--stamp=<(stamp)',
+  ],
+  'conditions': [
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+}
diff --git a/build/android/install_emulator_deps.py b/build/android/install_emulator_deps.py
new file mode 100755
index 0000000..d980c2c
--- /dev/null
+++ b/build/android/install_emulator_deps.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs deps for using SDK emulator for testing.
+
+The script will download the SDK and system images, if they are not present, and
+install and enable KVM, if virtualization has been enabled in the BIOS.
+"""
+
+
+import logging
+import optparse
+import os
+import re
+import shutil
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.utils import run_tests_helper
+
+# Android API level
+DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION
+
+# From the Android Developer's website.
+# Keep this up to date; the user can install older API levels as necessary.
+SDK_BASE_URL = 'http://dl.google.com/android/adt'
+SDK_ZIP = 'adt-bundle-linux-x86_64-20131030.zip'
+
+# pylint: disable=C0301
+# Android x86 system image from the Intel website:
+# http://software.intel.com/en-us/articles/intel-eula-x86-android-4-2-jelly-bean-bin
+# These don't exist prior to Android-15.
+# As of 08 Nov 2013, Android-19 is not yet available either.
+X86_IMG_URLS = {
+  15: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-15_r01.zip',
+  16: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-16_r01.zip',
+  17: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-17_r01.zip',
+  18: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-18_r01.zip',
+  19: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-19_r01.zip'}
+#pylint: enable=C0301
+
+def CheckSDK():
+  """Check if SDK is already installed.
+
+  Returns:
+    True if the emulator SDK directory (src/android_emulator_sdk/) exists.
+  """
+  return os.path.exists(constants.EMULATOR_SDK_ROOT)
+
+
+def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Check if the "SDK Platform" for the specified API level is installed.
+     This is necessary in order for the emulator to run when the target
+     is specified.
+
+  Args:
+    api_level: the Android API level to check; defaults to the latest API.
+
+  Returns:
+    True if the platform is already installed.
+  """
+  android_binary = os.path.join(constants.EMULATOR_SDK_ROOT,
+                                'sdk', 'tools', 'android')
+  pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level)
+  try:
+    exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(
+        [android_binary, 'list'])
+    if exit_code != 0:
+      raise Exception('\'android list\' command failed')
+    for line in stdout.split('\n'):
+      if pattern.match(line):
+        return True
+    return False
+  except OSError:
+    logging.exception('Unable to execute \'android list\'')
+    return False
+
+
+def CheckX86Image(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Check if Android system images have been installed.
+
+  Args:
+    api_level: the Android API level to check for; defaults to the latest API.
+
+  Returns:
+    True if sdk/system-images/android-<api_level>/x86 exists inside
+    EMULATOR_SDK_ROOT.
+  """
+  api_target = 'android-%d' % api_level
+  return os.path.exists(os.path.join(constants.EMULATOR_SDK_ROOT,
+                                     'sdk', 'system-images',
+                                     api_target, 'x86'))
+
+
+def CheckKVM():
+  """Quickly check whether KVM is enabled.
+
+  Returns:
+    True iff /dev/kvm exists (Linux only).
+  """
+  return os.path.exists('/dev/kvm')
+
+
+def RunKvmOk():
+  """Run kvm-ok as root to check that KVM is properly enabled after installation
+     of the required packages.
+
+  Returns:
+    True iff KVM is enabled (/dev/kvm exists). On failure, returns False
+    but also print detailed information explaining why KVM isn't enabled
+    (e.g. CPU doesn't support it, or BIOS disabled it).
+  """
+  try:
+    # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it.
+    return not cmd_helper.RunCmd(['sudo', 'kvm-ok'])
+  except OSError:
+    logging.info('kvm-ok not installed')
+    return False
+
+
+def GetSDK():
+  """Download the SDK and unzip it into EMULATOR_SDK_ROOT."""
+  logging.info('Download Android SDK.')
+  sdk_url = '%s/%s' % (SDK_BASE_URL, SDK_ZIP)
+  try:
+    cmd_helper.RunCmd(['curl', '-o', '/tmp/sdk.zip', sdk_url])
+    print 'curled unzipping...'
+    rc = cmd_helper.RunCmd(['unzip', '-o', '/tmp/sdk.zip', '-d', '/tmp/'])
+    if rc:
+      raise Exception('ERROR: could not download/unzip Android SDK.')
+    # Get the name of the sub-directory that everything will be extracted to.
+    dirname, _ = os.path.splitext(SDK_ZIP)
+    zip_dir = '/tmp/%s' % dirname
+    # Move the extracted directory to EMULATOR_SDK_ROOT
+    shutil.move(zip_dir, constants.EMULATOR_SDK_ROOT)
+  finally:
+    os.unlink('/tmp/sdk.zip')
+
+
+def InstallKVM():
+  """Installs KVM packages."""
+  rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm'])
+  if rc:
+    logging.critical('ERROR: Did not install KVM. Make sure hardware '
+                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
+                     'AMD SVM).')
+  # TODO(navabi): Use modprobe kvm-amd on AMD processors.
+  rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel'])
+  if rc:
+    logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure '
+                     'hardware virtualization is enabled in BIOS.')
+  # Now check to ensure KVM acceleration can be used.
+  if not RunKvmOk():
+    logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware '
+                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
+                     'AMD SVM).')
+
+
+def GetX86Image(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Download x86 system image from Intel's website.
+
+  Args:
+    api_level: the Android API level to download for.
+  """
+  logging.info('Download x86 system image directory into sdk directory.')
+  # TODO(andrewhayden): Use python tempfile lib instead
+  temp_file = '/tmp/x86_img_android-%d.zip' % api_level
+  if api_level not in X86_IMG_URLS:
+    raise Exception('ERROR: no URL known for x86 image for android-%s' %
+                    api_level)
+  try:
+    cmd_helper.RunCmd(['curl', '-o', temp_file, X86_IMG_URLS[api_level]])
+    rc = cmd_helper.RunCmd(['unzip', '-o', temp_file, '-d', '/tmp/'])
+    if rc:
+      raise Exception('ERROR: Could not download/unzip image zip.')
+    api_target = 'android-%d' % api_level
+    sys_imgs = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk',
+                            'system-images', api_target, 'x86')
+    logging.info('Deploying system image to %s' % sys_imgs)
+    shutil.move('/tmp/x86', sys_imgs)
+  finally:
+    os.unlink(temp_file)
+
+
+def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Update the SDK to include the platform specified.
+
+  Args:
+    api_level: the Android API level to download
+  """
+  android_binary = os.path.join(constants.EMULATOR_SDK_ROOT,
+                                'sdk', 'tools', 'android')
+  pattern = re.compile('\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' %
+                       api_level)
+  # Example:
+  #   2- SDK Platform Android 4.3, API 18, revision 2
+  exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(
+      [android_binary, 'list', 'sdk'])
+  if exit_code != 0:
+    raise Exception('\'android list sdk\' command return %d' % exit_code)
+  for line in stdout.split('\n'):
+    match = pattern.match(line)
+    if match:
+      index = match.group(1)
+      print('package %s corresponds to platform level %d' % (index, api_level))
+      # update sdk --no-ui --filter $INDEX
+      update_command = [android_binary,
+                        'update', 'sdk', '--no-ui', '--filter', index]
+      update_command_str = ' '.join(update_command)
+      logging.info('running update command: %s' % update_command_str)
+      update_process = pexpect.spawn(update_command_str)
+      # TODO(andrewhayden): Do we need to bug the user about this?
+      if update_process.expect('Do you accept the license') != 0:
+        raise Exception('License agreement check failed')
+      update_process.sendline('y')
+      if update_process.expect('Done. 1 package installed.') == 0:
+        print('Successfully installed platform for API level %d' % api_level)
+        return
+      else:
+        raise Exception('Failed to install platform update')
+  raise Exception('Could not find android-%d update for the SDK!' % api_level)
+
+
+def main(argv):
+  opt_parser = optparse.OptionParser(
+      description='Install dependencies for running the Android emulator')
+  opt_parser.add_option('--api-level', dest='api_level',
+      help='The API level (e.g., 19 for Android 4.4) to ensure is available',
+      type='int', default=DEFAULT_ANDROID_API_LEVEL)
+  opt_parser.add_option('-v', dest='verbose', action='store_true',
+      help='enable verbose logging')
+  options, _ = opt_parser.parse_args(argv[1:])
+
+  # run_tests_helper will set logging to INFO or DEBUG
+  # We achieve verbose output by configuring it with 2 (==DEBUG)
+  verbosity = 1
+  if (options.verbose):
+    verbosity = 2
+  logging.basicConfig(level=logging.INFO,
+                      format='# %(asctime)-15s: %(message)s')
+  run_tests_helper.SetLogLevel(verbose_count=verbosity)
+
+  # Calls below will download emulator SDK and/or system images only if needed.
+  if CheckSDK():
+    logging.info('android_emulator_sdk/ already exists, skipping download.')
+  else:
+    GetSDK()
+
+  # Check target. The target has to be installed in order to run the emulator.
+  if CheckSDKPlatform(options.api_level):
+    logging.info('SDK platform android-%d already present, skipping.' %
+                 options.api_level)
+  else:
+    logging.info('SDK platform android-%d not present, installing.' %
+                 options.api_level)
+    GetSDKPlatform(options.api_level)
+
+  # Download the x86 system image only if needed.
+  if CheckX86Image(options.api_level):
+    logging.info('x86 image for android-%d already present, skipping.' %
+                 options.api_level)
+  else:
+    GetX86Image(options.api_level)
+
+  # Make sure KVM packages are installed and enabled.
+  if CheckKVM():
+    logging.info('KVM already installed and enabled.')
+  else:
+    InstallKVM()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/instr_action.gypi b/build/android/instr_action.gypi
new file mode 100644
index 0000000..fa6d062
--- /dev/null
+++ b/build/android/instr_action.gypi
@@ -0,0 +1,53 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# instruments either java class files, or jars.
+
+{
+  'variables': {
+    'instr_type%': 'jar',
+    'input_path%': '',
+    'output_path%': '',
+    'stamp_path%': '',
+    'extra_instr_args': [
+      '--coverage-file=<(_target_name).em',
+      '--sources-file=<(_target_name)_sources.txt',
+    ],
+    'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar',
+    'conditions': [
+      ['emma_instrument != 0', {
+        'extra_instr_args': [
+          '--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)',
+          '--src-root=<(DEPTH)',
+          '--emma-jar=<(emma_jar)',
+          '--filter-string=<(emma_filter)',
+        ],
+        'conditions': [
+          ['instr_type == "jar"', {
+            'instr_action': 'instrument_jar',
+          }, {
+            'instr_action': 'instrument_classes',
+          }]
+        ],
+      }, {
+        'instr_action': 'copy',
+        'extra_instr_args': [],
+      }]
+    ]
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/emma_instr.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/pylib/utils/command_option_parser.py',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/emma_instr.py',
+    '<(instr_action)',
+    '--input-path=<(input_path)',
+    '--output-path=<(output_path)',
+    '--stamp=<(stamp_path)',
+    '<@(extra_instr_args)',
+  ]
+}
diff --git a/build/android/java_cpp_enum.gypi b/build/android/java_cpp_enum.gypi
new file mode 100644
index 0000000..3d6b326
--- /dev/null
+++ b/build/android/java_cpp_enum.gypi
@@ -0,0 +1,59 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide an action
+# to generate Java source files from a C++ header file containing annotated
+# enum definitions using a Python script.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'bitmap_format_java',
+#    'type': 'none',
+#    'variables': {
+#      'source_file': 'ui/android/bitmap_format.h',
+#    },
+#    'includes': [ '../build/android/java_cpp_enum.gypi' ],
+#  },
+#
+# Then have the gyp target which compiles the java code depend on the newly
+# created target.
+
+{
+  'variables': {
+    # Location where all generated Java sources will be placed.
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/enums/<(_target_name)',
+    'generator_path': '<(DEPTH)/build/android/gyp/java_cpp_enum.py',
+    'generator_args': '--output_dir=<(output_dir) <(source_file)',
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      # Ensure that the output directory is used in the class path
+      # when building targets that depend on this one.
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'generate_java_constants',
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(generator_path)',
+        '<(source_file)',
+      ],
+      'outputs': [
+        # This is the main reason this is an action and not a rule. Gyp doesn't
+        # properly expand RULE_INPUT_PATH here and so it's impossible to
+        # calculate the list of outputs.
+        '<!@pymod_do_main(java_cpp_enum --print_output_only '
+            '<@(generator_args))',
+      ],
+      'action': [
+        'python', '<(generator_path)', '<@(generator_args)'
+      ],
+      'message': 'Generating Java from cpp header <(source_file)',
+    },
+  ],
+}
diff --git a/build/android/java_cpp_template.gypi b/build/android/java_cpp_template.gypi
new file mode 100644
index 0000000..fe4238a
--- /dev/null
+++ b/build/android/java_cpp_template.gypi
@@ -0,0 +1,78 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate Java source files from templates that are processed
+# through the host C pre-processor.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'android_net_java_constants',
+#    'type': 'none',
+#    'sources': [
+#      'net/android/NetError.template',
+#    ],
+#    'variables': {
+#      'package_name': 'org/chromium/net',
+#      'template_deps': ['net/base/certificate_mime_type_list.h'],
+#    },
+#    'includes': [ '../build/android/java_cpp_template.gypi' ],
+#  },
+#
+# The 'sources' entry should only list template file. The template file
+# itself should use the 'ClassName.template' format, and will generate
+# 'gen/templates/<package-name>/ClassName.java. The files which template
+# dependents on and typically included by the template should be listed
+# in template_deps variables. Any change to them will force a rebuild of
+# the template, and hence of any source that depends on it.
+#
+
+{
+  # Location where all generated Java sources will be placed.
+  'variables': {
+    'include_path%': '<(DEPTH)',
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/templates/<(package_name)',
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      # Ensure that the output directory is used in the class path
+      # when building targets that depend on this one.
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+      # Ensure dependents are rebuilt when sources for this rule change.
+      'additional_input_paths': [
+        '<@(_sources)',
+        '<@(template_deps)',
+      ],
+    },
+  },
+  # Define a single rule that will be apply to each .template file
+  # listed in 'sources'.
+  'rules': [
+    {
+      'rule_name': 'generate_java_constants',
+      'extension': 'template',
+      # Set template_deps as additional dependencies.
+      'variables': {
+        'output_path': '<(output_dir)/<(RULE_INPUT_ROOT).java',
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+        '<@(template_deps)'
+      ],
+      'outputs': [
+        '<(output_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+        '--include-path=<(include_path)',
+        '--output=<(output_path)',
+        '--template=<(RULE_INPUT_PATH)',
+      ],
+      'message': 'Generating Java from cpp template <(RULE_INPUT_PATH)',
+    }
+  ],
+}
diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py
new file mode 100755
index 0000000..a5195ac
--- /dev/null
+++ b/build/android/lighttpd_server.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        ommitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temprary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  @staticmethod
+  def _GetRandomPort():
+    # The ports of test server is arranged in constants.py.
+    return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+                          constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # If we want a specific port, make sure no one else is listening on it.
+    if self.fixed_port:
+      self._KillProcessListeningOnPort(self.fixed_port)
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or not 'in use' in server_error:
+        print 'Client error:', client_error
+        print 'Server error:', server_error
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0)
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  @staticmethod
+  def _KillProcessListeningOnPort(port):
+    """Checks if there is a process listening on port number |port| and
+    terminates it if found.
+
+    Args:
+      port: Port number to check.
+    """
+    if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+      # Give the process some time to terminate and check that it is gone.
+      time.sleep(2)
+      assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+          'Unable to kill process listening on port %d.' % port
+
+  @staticmethod
+  def _GetDefaultBaseConfig():
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print 'Server exit code:', server.process.exitstatus
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/lint/suppress.py b/build/android/lint/suppress.py
new file mode 100755
index 0000000..52d7579
--- /dev/null
+++ b/build/android/lint/suppress.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Add all generated lint_result.xml files to suppressions.xml"""
+
+
+import collections
+import optparse
+import os
+import sys
+from xml.dom import minidom
+
+_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(_BUILD_ANDROID_DIR)
+
+from pylib import constants
+
+
+_THIS_FILE = os.path.abspath(__file__)
+_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE), 'suppressions.xml')
+_DOC = (
+    '\nSTOP! It looks like you want to suppress some lint errors:\n'
+    '- Have you tried identifing the offending patch?\n'
+    '  Ask the author for a fix and/or revert the patch.\n'
+    '- It is preferred to add suppressions in the code instead of\n'
+    '  sweeping it under the rug here. See:\n\n'
+    '    http://developer.android.com/tools/debugging/improving-w-lint.html\n'
+    '\n'
+    'Still reading?\n'
+    '- You can edit this file manually to suppress an issue\n'
+    '  globally if it is not applicable to the project.\n'
+    '- You can also automatically add issues found so for in the\n'
+    '  build process by running:\n\n'
+    '    ' + os.path.relpath(_THIS_FILE, constants.DIR_SOURCE_ROOT) + '\n\n'
+    '  which will generate this file (Comments are not preserved).\n'
+    '  Note: PRODUCT_DIR will be substituted at run-time with actual\n'
+    '  directory path (e.g. out/Debug)\n'
+)
+
+
+_Issue = collections.namedtuple('Issue', ['severity', 'paths'])
+
+
+def _ParseConfigFile(config_path):
+  print 'Parsing %s' % config_path
+  issues_dict = {}
+  dom = minidom.parse(config_path)
+  for issue in dom.getElementsByTagName('issue'):
+    issue_id = issue.attributes['id'].value
+    severity = issue.getAttribute('severity')
+    paths = set(
+        [p.attributes['path'].value for p in
+         issue.getElementsByTagName('ignore')])
+    issues_dict[issue_id] = _Issue(severity, paths)
+  return issues_dict
+
+
+def _ParseAndMergeResultFile(result_path, issues_dict):
+  print 'Parsing and merging %s' % result_path
+  dom = minidom.parse(result_path)
+  for issue in dom.getElementsByTagName('issue'):
+    issue_id = issue.attributes['id'].value
+    severity = issue.attributes['severity'].value
+    path = issue.getElementsByTagName('location')[0].attributes['file'].value
+    if issue_id not in issues_dict:
+      issues_dict[issue_id] = _Issue(severity, set())
+    issues_dict[issue_id].paths.add(path)
+
+
+def _WriteConfigFile(config_path, issues_dict):
+  new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None)
+  top_element = new_dom.documentElement
+  top_element.appendChild(new_dom.createComment(_DOC))
+  for issue_id in sorted(issues_dict.keys()):
+    severity = issues_dict[issue_id].severity
+    paths = issues_dict[issue_id].paths
+    issue = new_dom.createElement('issue')
+    issue.attributes['id'] = issue_id
+    if severity:
+      issue.attributes['severity'] = severity
+    if severity == 'ignore':
+      print 'Warning: [%s] is suppressed globally.' % issue_id
+    else:
+      for path in sorted(paths):
+        ignore = new_dom.createElement('ignore')
+        ignore.attributes['path'] = path
+        issue.appendChild(ignore)
+    top_element.appendChild(issue)
+
+  with open(config_path, 'w') as f:
+    f.write(new_dom.toprettyxml(indent='  ', encoding='utf-8'))
+  print 'Updated %s' % config_path
+
+
+def _Suppress(config_path, result_path):
+  issues_dict = _ParseConfigFile(config_path)
+  _ParseAndMergeResultFile(result_path, issues_dict)
+  _WriteConfigFile(config_path, issues_dict)
+
+
+def main():
+  parser = optparse.OptionParser(usage='%prog RESULT-FILE')
+  _, args = parser.parse_args()
+
+  if len(args) != 1 or not os.path.exists(args[0]):
+    parser.error('Must provide RESULT-FILE')
+
+  _Suppress(_CONFIG_PATH, args[0])
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/lint/suppressions.xml b/build/android/lint/suppressions.xml
new file mode 100644
index 0000000..d8422a7
--- /dev/null
+++ b/build/android/lint/suppressions.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+  <!--
+STOP! It looks like you want to suppress some lint errors:
+- Have you tried identifing the offending patch?
+  Ask the author for a fix and/or revert the patch.
+- It is preferred to add suppressions in the code instead of
+  sweeping it under the rug here. See:
+
+    http://developer.android.com/tools/debugging/improving-w-lint.html
+
+Still reading?
+- You can edit this file manually to suppress an issue
+  globally if it is not applicable to the project.
+- You can also automatically add issues found so for in the
+  build process by running:
+
+    build/android/lint/suppress.py
+
+  which will generate this file (Comments are not preserved).
+  Note: PRODUCT_DIR will be substituted at run-time with actual
+  directory path (e.g. out/Debug)
+-->
+  <issue id="AllowBackup">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="Assert" severity="ignore"/>
+  <issue id="CommitPrefEdits">
+    <ignore path="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/AndroidChannelPreferences.java"/>
+  </issue>
+  <issue id="DefaultLocale">
+    <ignore path="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/>
+  </issue>
+  <issue id="DrawAllocation">
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/ContentViewRenderView.java"/>
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/PopupZoomer.java"/>
+  </issue>
+  <issue id="ExportedContentProvider">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="HandlerLeak">
+    <ignore path="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java"/>
+  </issue>
+  <issue id="MissingApplicationIcon" severity="ignore"/>
+  <issue id="MissingRegistered" severity="ignore"/>
+  <issue id="MissingVersion">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <!-- Disabling is InlinedApi and NewApi is bad but we have too many of these errors and nobody is fixing it. crbug.com/411461 -->
+  <issue id="InlinedApi" severity="ignore"/>
+  <issue id="NewApi" severity="ignore"/>
+  <issue id="OldTargetApi">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="Recycle" severity="ignore"/>
+  <issue id="Registered" severity="ignore"/>
+  <issue id="SdCardPath">
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/MediaResourceGetter.java"/>
+  </issue>
+  <issue id="SetJavaScriptEnabled" severity="ignore"/>
+  <issue id="ViewConstructor" severity="ignore"/>
+  <issue id="WrongCall" severity="ignore"/>
+</lint>
diff --git a/build/android/lint_action.gypi b/build/android/lint_action.gypi
new file mode 100644
index 0000000..186d64c
--- /dev/null
+++ b/build/android/lint_action.gypi
@@ -0,0 +1,40 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule to
+# run lint on java/class files.
+
+{
+  'action_name': 'lint_<(_target_name)',
+  'message': 'Linting <(_target_name)',
+  'variables': {
+    'conditions': [
+      ['chromium_code != 0 and android_lint != 0 and never_lint == 0', {
+        'is_enabled': '--enable',
+      }, {
+        'is_enabled': '',
+      }]
+    ]
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/lint.py',
+    '<(DEPTH)/build/android/lint/suppressions.xml',
+    '<(DEPTH)/build/android/AndroidManifest.xml',
+    '<(lint_jar_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/lint.py',
+    '--lint-path=<(android_sdk_root)/tools/lint',
+    '--config-path=<(DEPTH)/build/android/lint/suppressions.xml',
+    '--processed-config-path=<(config_path)',
+    '--manifest-path=<(DEPTH)/build/android/AndroidManifest.xml',
+    '--result-path=<(result_path)',
+    '--product-dir=<(PRODUCT_DIR)',
+    '--src-dirs=>(src_dirs)',
+    '--jar-path=<(lint_jar_path)',
+    '--stamp=<(stamp_path)',
+    '<(is_enabled)',
+  ],
+}
diff --git a/build/android/native_app_dependencies.gypi b/build/android/native_app_dependencies.gypi
new file mode 100644
index 0000000..d9241cc
--- /dev/null
+++ b/build/android/native_app_dependencies.gypi
@@ -0,0 +1,62 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to strip and place dependent shared libraries required by a native binary in a
+# single folder that can later be pushed to the device.
+#
+# NOTE: consider packaging your binary as an apk instead of running a native
+# library.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'target_that_depends_on_my_binary',
+#    'type': 'none',
+#    'dependencies': [
+#      'my_binary',
+#    ],
+#    'variables': {
+#      'native_binary': '<(PRODUCT_DIR)/my_binary',
+#      'output_dir': 'location to place binary and dependent libraries'
+#    },
+#    'includes': [ '../../build/android/native_app_dependencies.gypi' ],
+#  },
+#
+
+{
+  'copies': [
+    {
+      'destination': '<(output_dir)',
+      'files': [ '<(native_binary)' ],
+    }
+  ],
+  'conditions': [
+      ['component == "shared_library"', {
+        'dependencies': [
+          '<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+        ],
+        'variables': {
+          'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)',
+          'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json',
+        },
+        'actions': [
+          {
+            'variables': {
+              'input_libraries': ['<(native_binary)'],
+            },
+            'includes': ['../../build/android/write_ordered_libraries.gypi'],
+          },
+          {
+            'action_name': 'stripping native libraries',
+            'variables': {
+              'stripped_libraries_dir%': '<(output_dir)',
+              'input_paths': ['<(native_binary)'],
+              'stamp': '<(intermediate_dir)/strip.stamp',
+            },
+            'includes': ['../../build/android/strip_native_libraries.gypi'],
+          },
+        ],
+      }],
+  ],
+}
diff --git a/build/android/pack_arm_relocations.gypi b/build/android/pack_arm_relocations.gypi
new file mode 100644
index 0000000..e982527
--- /dev/null
+++ b/build/android/pack_arm_relocations.gypi
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# packs ARM relative relocations in Release builds of native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'pack_arm_relocations',
+#    'actions': [
+#      'variables': {
+#        'enable_packing': 'pack relocations if 1, plain file copy if 0'
+#        'exclude_packing_list': 'names of libraries explicitly not packed',
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'stripped_libraries_dir': 'directory holding stripped libraries',
+#        'packed_libraries_dir': 'directory holding packed libraries',
+#      'includes': [ '../../build/android/pack_arm_relocations.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/pack_arm_relocations.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'conditions': [
+    ['enable_packing == 1', {
+      'message': 'Packing ARM relative relocations for <(_target_name)',
+      'dependencies': [
+        '<(DEPTH)/tools/relocation_packer/relocation_packer.gyp:relocation_packer#host',
+      ],
+      'inputs': [
+        '<(PRODUCT_DIR)/relocation_packer',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/pack_arm_relocations.py',
+        '--configuration-name=<(CONFIGURATION_NAME)',
+        '--enable-packing=1',
+        '--exclude-packing-list=<@(exclude_packing_list)',
+        '--android-pack-relocations=<(PRODUCT_DIR)/relocation_packer',
+        '--android-objcopy=<(android_objcopy)',
+        '--stripped-libraries-dir=<(stripped_libraries_dir)',
+        '--packed-libraries-dir=<(packed_libraries_dir)',
+        '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+        '--stamp=<(stamp)',
+      ],
+    }, {
+      'message': 'Copying libraries (no relocation packing) for <(_target_name)',
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/pack_arm_relocations.py',
+        '--configuration-name=<(CONFIGURATION_NAME)',
+        '--enable-packing=0',
+        '--stripped-libraries-dir=<(stripped_libraries_dir)',
+        '--packed-libraries-dir=<(packed_libraries_dir)',
+        '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+        '--stamp=<(stamp)',
+      ],
+    }],
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+}
diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py
new file mode 100755
index 0000000..54c90c3
--- /dev/null
+++ b/build/android/provision_devices.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+  ./provision_devices.py [-d <device serial number>]
+"""
+
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import time
+
+from pylib import android_commands
+from pylib import constants
+from pylib import device_settings
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
+                             'third_party', 'android_testrunner'))
+import errors
+
+def KillHostHeartbeat():
+  ps = subprocess.Popen(['ps', 'aux'], stdout = subprocess.PIPE)
+  stdout, _ = ps.communicate()
+  matches = re.findall('\\n.*host_heartbeat.*', stdout)
+  for match in matches:
+    logging.info('An instance of host heart beart running... will kill')
+    pid = re.findall('(\S+)', match)[1]
+    subprocess.call(['kill', str(pid)])
+
+
+def LaunchHostHeartbeat():
+  # Kill if existing host_heartbeat
+  KillHostHeartbeat()
+  # Launch a new host_heartbeat
+  logging.info('Spawning host heartbeat...')
+  subprocess.Popen([os.path.join(constants.DIR_SOURCE_ROOT,
+                                 'build/android/host_heartbeat.py')])
+
+
+def PushAndLaunchAdbReboot(device, target):
+  """Pushes and launches the adb_reboot binary on the device.
+
+  Arguments:
+    device: The DeviceUtils instance for the device to which the adb_reboot
+            binary should be pushed.
+    target: The build target (example, Debug or Release) which helps in
+            locating the adb_reboot binary.
+  """
+  logging.info('Will push and launch adb_reboot on %s' % str(device))
+  # Kill if adb_reboot is already running.
+  try:
+    # Don't try to kill adb_reboot more than once. We don't expect it to be
+    # running at all.
+    device.KillAll('adb_reboot', blocking=True, timeout=2, retries=0)
+  except device_errors.CommandFailedError:
+    # We can safely ignore the exception because we don't expect adb_reboot
+    # to be running.
+    pass
+  # Push adb_reboot
+  logging.info('  Pushing adb_reboot ...')
+  adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT,
+                            'out/%s/adb_reboot' % target)
+  device.PushChangedFiles(adb_reboot, '/data/local/tmp/')
+  # Launch adb_reboot
+  logging.info('  Launching adb_reboot ...')
+  device.old_interface.GetAndroidToolStatusAndOutput(
+      '/data/local/tmp/adb_reboot')
+
+
+def _ConfigureLocalProperties(device, is_perf):
+  """Set standard readonly testing device properties prior to reboot."""
+  local_props = [
+      'persist.sys.usb.config=adb',
+      'ro.monkey=1',
+      'ro.test_harness=1',
+      'ro.audio.silent=1',
+      'ro.setupwizard.mode=DISABLED',
+      ]
+  if not is_perf:
+    local_props.append('%s=all' % android_commands.JAVA_ASSERT_PROPERTY)
+    local_props.append('debug.checkjni=1')
+  try:
+    device.WriteFile(
+        constants.DEVICE_LOCAL_PROPERTIES_PATH,
+        '\n'.join(local_props), as_root=True)
+    # Android will not respect the local props file if it is world writable.
+    device.RunShellCommand(
+        'chmod 644 %s' % constants.DEVICE_LOCAL_PROPERTIES_PATH,
+        as_root=True)
+  except device_errors.CommandFailedError as e:
+    logging.warning(str(e))
+
+  # LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+
+def WipeDeviceData(device):
+  """Wipes data from device, keeping only the adb_keys for authorization.
+
+  After wiping data on a device that has been authorized, adb can still
+  communicate with the device, but after reboot the device will need to be
+  re-authorized because the adb keys file is stored in /data/misc/adb/.
+  Thus, adb_keys file is rewritten so the device does not need to be
+  re-authorized.
+
+  Arguments:
+    device: the device to wipe
+  """
+  device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+  if device_authorized:
+    adb_keys = device.RunShellCommand('cat %s' % constants.ADB_KEYS_FILE,
+                                      as_root=True)
+  device.RunShellCommand('wipe data', as_root=True)
+  if device_authorized:
+    path_list = constants.ADB_KEYS_FILE.split('/')
+    dir_path = '/'.join(path_list[:len(path_list)-1])
+    device.RunShellCommand('mkdir -p %s' % dir_path, as_root=True)
+    device.RunShellCommand('restorecon %s' % dir_path, as_root=True)
+    device.RunShellCommand('echo %s > %s' %
+                           (adb_keys[0], constants.ADB_KEYS_FILE), as_root=True)
+    for adb_key in adb_keys[1:]:
+      device.RunShellCommand(
+        'echo %s >> %s' % (adb_key, constants.ADB_KEYS_FILE), as_root=True)
+    device.RunShellCommand('restorecon %s' % constants.ADB_KEYS_FILE,
+                           as_root=True)
+
+
+def WipeDeviceIfPossible(device):
+  try:
+    device.EnableRoot()
+    WipeDeviceData(device)
+    # TODO(jbudorick): Tune the timeout per OS version.
+    device.Reboot(True, timeout=600, retries=0)
+  except (errors.DeviceUnresponsiveError, device_errors.CommandFailedError):
+    pass
+
+
+def ProvisionDevice(device, options, is_perf):
+  try:
+    if not options.skip_wipe:
+      WipeDeviceIfPossible(device)
+    try:
+      device.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      logging.warning(str(e))
+    _ConfigureLocalProperties(device, is_perf)
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+    if options.disable_location:
+      device_settings.ConfigureContentSettings(
+          device, device_settings.DISABLE_LOCATION_SETTINGS)
+    else:
+      device_settings.ConfigureContentSettings(
+          device, device_settings.ENABLE_LOCATION_SETTINGS)
+    device_settings.SetLockScreenSettings(device)
+    if is_perf:
+      # TODO(tonyg): We eventually want network on. However, currently radios
+      # can cause perfbots to drain faster than they charge.
+      device_settings.ConfigureContentSettings(
+          device, device_settings.NETWORK_DISABLED_SETTINGS)
+      # Some perf bots run benchmarks with USB charging disabled which leads
+      # to gradual draining of the battery. We must wait for a full charge
+      # before starting a run in order to keep the devices online.
+      try:
+        battery_info = device.old_interface.GetBatteryInfo()
+      except Exception as e:
+        battery_info = {}
+        logging.error('Unable to obtain battery info for %s, %s',
+                      str(device), e)
+
+      while int(battery_info.get('level', 100)) < 95:
+        if not device.old_interface.IsDeviceCharging():
+          if device.old_interface.CanControlUsbCharging():
+            device.old_interface.EnableUsbCharging()
+          else:
+            logging.error('Device is not charging')
+            break
+        logging.info('Waiting for device to charge. Current level=%s',
+                     battery_info.get('level', 0))
+        time.sleep(60)
+        battery_info = device.old_interface.GetBatteryInfo()
+    device.RunShellCommand('date -u %f' % time.time(), as_root=True)
+    # TODO(jbudorick): Tune the timeout per OS version.
+    device.Reboot(True, timeout=600, retries=0)
+    props = device.RunShellCommand('getprop')
+    for prop in props:
+      logging.info('  %s' % prop)
+    if options.auto_reconnect:
+      PushAndLaunchAdbReboot(device, options.target)
+  except (errors.WaitForResponseTimedOutError,
+          device_errors.CommandTimeoutError):
+    logging.info('Timed out waiting for device %s. Adding to blacklist.',
+                 str(device))
+    # Device black list is reset by bb_device_status_check.py per build.
+    device_blacklist.ExtendBlacklist([str(device)])
+  except (device_errors.CommandFailedError):
+    logging.info('Failed to provision device %s. Adding to blacklist.',
+                 str(device))
+    device_blacklist.ExtendBlacklist([str(device)])
+
+
+def ProvisionDevices(options):
+  is_perf = 'perf' in os.environ.get('BUILDBOT_BUILDERNAME', '').lower()
+  if options.device is not None:
+    devices = [options.device]
+  else:
+    devices = android_commands.GetAttachedDevices()
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  parallel_devices.pMap(ProvisionDevice, options, is_perf)
+  if options.auto_reconnect:
+    LaunchHostHeartbeat()
+  blacklist = device_blacklist.ReadBlacklist()
+  if all(d in blacklist for d in devices):
+    raise device_errors.NoDevicesError
+  return 0
+
+
+def main(argv):
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
+  logging.getLogger().setLevel(logging.INFO)
+
+  parser = optparse.OptionParser()
+  parser.add_option('--skip-wipe', action='store_true', default=False,
+                    help="Don't wipe device data during provisioning.")
+  parser.add_option('--disable-location', action='store_true', default=False,
+                    help="Disallow Google location services on devices.")
+  parser.add_option('-d', '--device',
+                    help='The serial number of the device to be provisioned')
+  parser.add_option('-t', '--target', default='Debug', help='The build target')
+  parser.add_option(
+      '-r', '--auto-reconnect', action='store_true',
+      help='Push binary which will reboot the device on adb disconnections.')
+  options, args = parser.parse_args(argv[1:])
+  constants.SetBuildType(options.target)
+
+  if args:
+    print >> sys.stderr, 'Unused args %s' % args
+    return 1
+
+  return ProvisionDevices(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/push_libraries.gypi b/build/android/push_libraries.gypi
new file mode 100644
index 0000000..d74fb21
--- /dev/null
+++ b/build/android/push_libraries.gypi
@@ -0,0 +1,48 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# pushes stripped shared libraries to the attached Android device. This should
+# only be used with the gyp_managed_install flag set.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'strip_stamp': 'stamp from strip action to block on'
+#        'libraries_source_dir': 'location where stripped libraries are stored'
+#        'device_library_dir': 'location on the device where to put pushed libraries',
+#        'push_stamp': 'file to touch when the action is complete'
+#        'configuration_name': 'The build CONFIGURATION_NAME'
+#      },
+#      'includes': [ '../../build/android/push_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'action_name': 'push_libraries_<(_target_name)',
+  'message': 'Pushing libraries to device for <(_target_name)',
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/util/md5_check.py',
+    '<(DEPTH)/build/android/gyp/push_libraries.py',
+    '<(strip_stamp)',
+    '<(strip_additional_stamp)',
+    '<(build_device_config_path)',
+  ],
+  'outputs': [
+    '<(push_stamp)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/push_libraries.py',
+    '--build-device-configuration=<(build_device_config_path)',
+    '--libraries-dir=<(libraries_source_dir)',
+    '--device-dir=<(device_library_dir)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--stamp=<(push_stamp)',
+    '--configuration-name=<(configuration_name)',
+  ],
+}
diff --git a/build/android/pylib/OWNERS b/build/android/pylib/OWNERS
new file mode 100644
index 0000000..3899fa3
--- /dev/null
+++ b/build/android/pylib/OWNERS
@@ -0,0 +1,4 @@
+frankf@chromium.org
+jbudorick@chromium.org
+navabi@chromium.org
+skyostil@chromium.org
diff --git a/build/android/pylib/__init__.py b/build/android/pylib/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/build/android/pylib/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py
new file mode 100644
index 0000000..ea86e6d
--- /dev/null
+++ b/build/android/pylib/android_commands.py
@@ -0,0 +1,1974 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+"""
+# pylint: disable-all
+
+import collections
+import datetime
+import inspect
+import logging
+import os
+import random
+import re
+import shlex
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+
+import cmd_helper
+import constants
+import system_properties
+from utils import host_utils
+
+try:
+  from pylib import pexpect
+except ImportError:
+  pexpect = None
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+import adb_interface
+import am_instrument_parser
+import errors
+
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+
+# Pattern to search for the next whole line of pexpect output and capture it
+# into a match group. We can't use ^ and $ for line start end with pexpect,
+# see http://www.noah.org/python/pexpect/#doc for explanation why.
+PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r')
+
+# Set the adb shell prompt to be a unique marker that will [hopefully] not
+# appear at the start of any line of a command's output.
+SHELL_PROMPT = '~+~PQ\x17RS~+~'
+
+# Java properties file
+LOCAL_PROPERTIES_PATH = constants.DEVICE_LOCAL_PROPERTIES_PATH
+
+# Property in /data/local.prop that controls Java assertions.
+JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+# Keycode "enum" suitable for passing to AndroidCommands.SendKey().
+KEYCODE_HOME = 3
+KEYCODE_BACK = 4
+KEYCODE_DPAD_UP = 19
+KEYCODE_DPAD_DOWN = 20
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
+MD5SUM_DEVICE_FOLDER = constants.TEST_EXECUTABLE_DIR + '/md5sum/'
+MD5SUM_DEVICE_PATH = MD5SUM_DEVICE_FOLDER + 'md5sum_bin'
+
+PIE_WRAPPER_PATH = constants.TEST_EXECUTABLE_DIR + '/run_pie'
+
+CONTROL_USB_CHARGING_COMMANDS = [
+  {
+    # Nexus 4
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': 'echo 0 > /sys/module/pm8921_charger/parameters/disabled',
+    'disable_command':
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled',
+  },
+  {
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+  },
+]
+
+class DeviceTempFile(object):
+  def __init__(self, android_commands, prefix='temp_file', suffix=''):
+    """Find an unused temporary file path in the devices external directory.
+
+    When this object is closed, the file will be deleted on the device.
+    """
+    self.android_commands = android_commands
+    while True:
+      # TODO(cjhopman): This could actually return the same file in multiple
+      # calls if the caller doesn't write to the files immediately. This is
+      # expected to never happen.
+      i = random.randint(0, 1000000)
+      self.name = '%s/%s-%d-%010d%s' % (
+          android_commands.GetExternalStorage(),
+          prefix, int(time.time()), i, suffix)
+      if not android_commands.FileExistsOnDevice(self.name):
+        break
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, type, value, traceback):
+    self.close()
+
+  def close(self):
+    self.android_commands.RunShellCommand('rm ' + self.name)
+
+
+def GetAVDs():
+  """Returns a list of AVDs."""
+  re_avd = re.compile('^[ ]+Name: ([a-zA-Z0-9_:.-]+)', re.MULTILINE)
+  avds = re_avd.findall(cmd_helper.GetCmdOutput(['android', 'list', 'avd']))
+  return avds
+
+def ResetBadDevices():
+  """Removes the blacklist that keeps track of bad devices for a current
+     build.
+  """
+  device_blacklist.ResetBlacklist()
+
+def ExtendBadDevices(devices):
+  """Adds devices to the blacklist that keeps track of bad devices for a
+     current build.
+
+  The devices listed in the bad devices file will not be returned by
+  GetAttachedDevices.
+
+  Args:
+    devices: list of bad devices to be added to the bad devices file.
+  """
+  device_blacklist.ExtendBlacklist(devices)
+
+
+def GetAttachedDevices(hardware=True, emulator=True, offline=False):
+  """Returns a list of attached, android devices and emulators.
+
+  If a preferred device has been set with ANDROID_SERIAL, it will be first in
+  the returned list. The arguments specify what devices to include in the list.
+
+  Example output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+
+  Args:
+    hardware: Include attached actual devices that are online.
+    emulator: Include emulators (i.e. AVD's) currently on host.
+    offline: Include devices and emulators that are offline.
+
+  Returns: List of devices.
+  """
+  adb_devices_output = cmd_helper.GetCmdOutput([constants.GetAdbPath(),
+                                                'devices'])
+
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
+  online_devices = re_device.findall(adb_devices_output)
+
+  re_device = re.compile('^(emulator-[0-9]+)\tdevice', re.MULTILINE)
+  emulator_devices = re_device.findall(adb_devices_output)
+
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\toffline$', re.MULTILINE)
+  offline_devices = re_device.findall(adb_devices_output)
+
+  devices = []
+  # First determine list of online devices (e.g. hardware and/or emulator).
+  if hardware and emulator:
+    devices = online_devices
+  elif hardware:
+    devices = [device for device in online_devices
+               if device not in emulator_devices]
+  elif emulator:
+    devices = emulator_devices
+
+  # Now add offline devices if offline is true
+  if offline:
+    devices = devices + offline_devices
+
+  # Remove any devices in the blacklist.
+  blacklist = device_blacklist.ReadBlacklist()
+  if len(blacklist):
+    logging.info('Avoiding bad devices %s', ' '.join(blacklist))
+    devices = [device for device in devices if device not in blacklist]
+
+  preferred_device = os.environ.get('ANDROID_SERIAL')
+  if preferred_device in devices:
+    devices.remove(preferred_device)
+    devices.insert(0, preferred_device)
+  return devices
+
+
+def IsDeviceAttached(device):
+  """Return true if the device is attached and online."""
+  return device in GetAttachedDevices()
+
+
+def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None):
+  """Gets a list of files from `ls` command output.
+
+  Python's os.walk isn't used because it doesn't work over adb shell.
+
+  Args:
+    path: The path to list.
+    ls_output: A list of lines returned by an `ls -lR` command.
+    re_file: A compiled regular expression which parses a line into named groups
+        consisting of at minimum "filename", "date", "time", "size" and
+        optionally "timezone".
+    utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a
+        2-digit string giving the number of UTC offset hours, and MM is a
+        2-digit string giving the number of UTC offset minutes. If the input
+        utc_offset is None, will try to look for the value of "timezone" if it
+        is specified in re_file.
+
+  Returns:
+    A dict of {"name": (size, lastmod), ...} where:
+      name: The file name relative to |path|'s directory.
+      size: The file size in bytes (0 for directories).
+      lastmod: The file last modification date in UTC.
+  """
+  re_directory = re.compile('^%s/(?P<dir>[^:]+):$' % re.escape(path))
+  path_dir = os.path.dirname(path)
+
+  current_dir = ''
+  files = {}
+  for line in ls_output:
+    directory_match = re_directory.match(line)
+    if directory_match:
+      current_dir = directory_match.group('dir')
+      continue
+    file_match = re_file.match(line)
+    if file_match:
+      filename = os.path.join(current_dir, file_match.group('filename'))
+      if filename.startswith(path_dir):
+        filename = filename[len(path_dir) + 1:]
+      lastmod = datetime.datetime.strptime(
+          file_match.group('date') + ' ' + file_match.group('time')[:5],
+          '%Y-%m-%d %H:%M')
+      if not utc_offset and 'timezone' in re_file.groupindex:
+        utc_offset = file_match.group('timezone')
+      if isinstance(utc_offset, str) and len(utc_offset) == 5:
+        utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]),
+                                       minutes=int(utc_offset[3:5]))
+        if utc_offset[0:1] == '-':
+          utc_delta = -utc_delta
+        lastmod -= utc_delta
+      files[filename] = (int(file_match.group('size')), lastmod)
+  return files
+
+
+def _ParseMd5SumOutput(md5sum_output):
+  """Returns a list of tuples from the provided md5sum output.
+
+  Args:
+    md5sum_output: output directly from md5sum binary.
+
+  Returns:
+    List of namedtuples with attributes |hash| and |path|, where |path| is the
+    absolute path to the file with an Md5Sum of |hash|.
+  """
+  HashAndPath = collections.namedtuple('HashAndPath', ['hash', 'path'])
+  split_lines = [line.split('  ') for line in md5sum_output]
+  return [HashAndPath._make(s) for s in split_lines if len(s) == 2]
+
+
+def _HasAdbPushSucceeded(command_output):
+  """Returns whether adb push has succeeded from the provided output."""
+  # TODO(frankf): We should look at the return code instead of the command
+  # output for many of the commands in this file.
+  if not command_output:
+    return True
+  # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)"
+  # Errors look like this: "failed to copy  ... "
+  if not re.search('^[0-9]', command_output.splitlines()[-1]):
+    logging.critical('PUSH FAILED: ' + command_output)
+    return False
+  return True
+
+
+def GetLogTimestamp(log_line, year):
+  """Returns the timestamp of the given |log_line| in the given year."""
+  try:
+    return datetime.datetime.strptime('%s-%s' % (year, log_line[:18]),
+                                      '%Y-%m-%d %H:%M:%S.%f')
+  except (ValueError, IndexError):
+    logging.critical('Error reading timestamp from ' + log_line)
+    return None
+
+
+class AndroidCommands(object):
+  """Helper class for communicating with Android device via adb."""
+
+  def __init__(self, device=None):
+    """Constructor.
+
+    Args:
+      device: If given, adb commands are only send to the device of this ID.
+          Otherwise commands are sent to all attached devices.
+    """
+    adb_dir = os.path.dirname(constants.GetAdbPath())
+    if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+      # Required by third_party/android_testrunner to call directly 'adb'.
+      os.environ['PATH'] += os.pathsep + adb_dir
+    self._adb = adb_interface.AdbInterface()
+    if device:
+      self._adb.SetTargetSerial(device)
+    self._device = device
+    self._logcat = None
+    self.logcat_process = None
+    self._logcat_tmpoutfile = None
+    self._pushed_files = []
+    self._device_utc_offset = None
+    self._potential_push_size = 0
+    self._actual_push_size = 0
+    self._external_storage = ''
+    self._util_wrapper = ''
+    self._system_properties = system_properties.SystemProperties(self.Adb())
+    self._push_if_needed_cache = {}
+    self._control_usb_charging_command = {
+        'command': None,
+        'cached': False,
+    }
+    self._protected_file_access_method_initialized = None
+    self._privileged_command_runner = None
+    self._pie_wrapper = None
+
+  @property
+  def system_properties(self):
+    return self._system_properties
+
+  def _LogShell(self, cmd):
+    """Logs the adb shell command."""
+    if self._device:
+      device_repr = self._device[-4:]
+    else:
+      device_repr = '????'
+    logging.info('[%s]> %s', device_repr, cmd)
+
+  def Adb(self):
+    """Returns our AdbInterface to avoid us wrapping all its methods."""
+    # TODO(tonyg): Goal should be to git rid of this method by making this API
+    # complete and alleviating the need.
+    return self._adb
+
+  def GetDevice(self):
+    """Returns the device serial."""
+    return self._device
+
+  def IsOnline(self):
+    """Checks whether the device is online.
+
+    Returns:
+      True if device is in 'device' mode, False otherwise.
+    """
+    # TODO(aurimas): revert to using adb get-state when android L adb is fixed.
+    #out = self._adb.SendCommand('get-state')
+    #return out.strip() == 'device'
+
+    out = self._adb.SendCommand('devices')
+    for line in out.split('\n'):
+      if self._device in line and 'device' in line:
+        return True
+    return False
+
+  def IsRootEnabled(self):
+    """Checks if root is enabled on the device."""
+    root_test_output = self.RunShellCommand('ls /root') or ['']
+    return not 'Permission denied' in root_test_output[0]
+
+  def EnableAdbRoot(self):
+    """Enables adb root on the device.
+
+    Returns:
+      True: if output from executing adb root was as expected.
+      False: otherwise.
+    """
+    if self.GetBuildType() == 'user':
+      logging.warning("Can't enable root in production builds with type user")
+      return False
+    else:
+      return_value = self._adb.EnableAdbRoot()
+      # EnableAdbRoot inserts a call for wait-for-device only when adb logcat
+      # output matches what is expected. Just to be safe add a call to
+      # wait-for-device.
+      self._adb.SendCommand('wait-for-device')
+      return return_value
+
+  def GetDeviceYear(self):
+    """Returns the year information of the date on device."""
+    return self.RunShellCommand('date +%Y')[0]
+
+  def GetExternalStorage(self):
+    if not self._external_storage:
+      self._external_storage = self.RunShellCommand('echo $EXTERNAL_STORAGE')[0]
+      if not self._external_storage:
+        raise device_errors.CommandFailedError(
+            ['shell', "'echo $EXTERNAL_STORAGE'"],
+            'Unable to find $EXTERNAL_STORAGE')
+    return self._external_storage
+
+  def WaitForDevicePm(self, timeout=120):
+    """Blocks until the device's package manager is available.
+
+    To workaround http://b/5201039, we restart the shell and retry if the
+    package manager isn't back after 120 seconds.
+
+    Raises:
+      errors.WaitForResponseTimedOutError after max retries reached.
+    """
+    last_err = None
+    retries = 3
+    while retries:
+      try:
+        self._adb.WaitForDevicePm(wait_time=timeout)
+        return  # Success
+      except errors.WaitForResponseTimedOutError as e:
+        last_err = e
+        logging.warning('Restarting and retrying after timeout: %s', e)
+        retries -= 1
+        self.RestartShell()
+    raise last_err # Only reached after max retries, re-raise the last error.
+
+  def RestartShell(self):
+    """Restarts the shell on the device. Does not block for it to return."""
+    self.RunShellCommand('stop')
+    self.RunShellCommand('start')
+
+  def Reboot(self, full_reboot=True):
+    """Reboots the device and waits for the package manager to return.
+
+    Args:
+      full_reboot: Whether to fully reboot the device or just restart the shell.
+    """
+    # TODO(torne): hive can't reboot the device either way without breaking the
+    # connection; work out if we can handle this better
+    if os.environ.get('USING_HIVE'):
+      logging.warning('Ignoring reboot request as we are on hive')
+      return
+    if full_reboot or not self.IsRootEnabled():
+      self._adb.SendCommand('reboot')
+      self._system_properties = system_properties.SystemProperties(self.Adb())
+      timeout = 300
+      retries = 1
+      # Wait for the device to disappear.
+      while retries < 10 and self.IsOnline():
+        time.sleep(1)
+        retries += 1
+    else:
+      self.RestartShell()
+      timeout = 120
+    # To run tests we need at least the package manager and the sd card (or
+    # other external storage) to be ready.
+    self.WaitForDevicePm(timeout)
+    self.WaitForSdCardReady(timeout)
+
+  def Shutdown(self):
+    """Shuts down the device."""
+    self._adb.SendCommand('reboot -p')
+    self._system_properties = system_properties.SystemProperties(self.Adb())
+
+  def Uninstall(self, package):
+    """Uninstalls the specified package from the device.
+
+    Args:
+      package: Name of the package to remove.
+
+    Returns:
+      A status string returned by adb uninstall
+    """
+    uninstall_command = 'uninstall %s' % package
+
+    self._LogShell(uninstall_command)
+    return self._adb.SendCommand(uninstall_command, timeout_time=60)
+
+  def Install(self, package_file_path, reinstall=False):
+    """Installs the specified package to the device.
+
+    Args:
+      package_file_path: Path to .apk file to install.
+      reinstall: Reinstall an existing apk, keeping the data.
+
+    Returns:
+      A status string returned by adb install
+    """
+    assert os.path.isfile(package_file_path), ('<%s> is not file' %
+                                               package_file_path)
+
+    install_cmd = ['install']
+
+    if reinstall:
+      install_cmd.append('-r')
+
+    install_cmd.append(package_file_path)
+    install_cmd = ' '.join(install_cmd)
+
+    self._LogShell(install_cmd)
+    return self._adb.SendCommand(install_cmd,
+                                 timeout_time=2 * 60,
+                                 retry_count=0)
+
+  def ManagedInstall(self, apk_path, keep_data=False, package_name=None,
+                     reboots_on_timeout=2):
+    """Installs specified package and reboots device on timeouts.
+
+    If package_name is supplied, checks if the package is already installed and
+    doesn't reinstall if the apk md5sums match.
+
+    Args:
+      apk_path: Path to .apk file to install.
+      keep_data: Reinstalls instead of uninstalling first, preserving the
+        application data.
+      package_name: Package name (only needed if keep_data=False).
+      reboots_on_timeout: number of time to reboot if package manager is frozen.
+    """
+    # Check if package is already installed and up to date.
+    if package_name:
+      installed_apk_path = self.GetApplicationPath(package_name)
+      if (installed_apk_path and
+          not self.GetFilesChanged(apk_path, installed_apk_path,
+                                   ignore_filenames=True)):
+        logging.info('Skipped install: identical %s APK already installed' %
+            package_name)
+        return
+    # Install.
+    reboots_left = reboots_on_timeout
+    while True:
+      try:
+        if not keep_data:
+          assert package_name
+          self.Uninstall(package_name)
+        install_status = self.Install(apk_path, reinstall=keep_data)
+        if 'Success' in install_status:
+          return
+        else:
+          raise Exception('Install failure: %s' % install_status)
+      except errors.WaitForResponseTimedOutError:
+        print '@@@STEP_WARNINGS@@@'
+        logging.info('Timeout on installing %s on device %s', apk_path,
+                     self._device)
+
+        if reboots_left <= 0:
+          raise Exception('Install timed out')
+
+        # Force a hard reboot on last attempt
+        self.Reboot(full_reboot=(reboots_left == 1))
+        reboots_left -= 1
+
+  def MakeSystemFolderWritable(self):
+    """Remounts the /system folder rw."""
+    out = self._adb.SendCommand('remount')
+    if out.strip() != 'remount succeeded':
+      raise errors.MsgException('Remount failed: %s' % out)
+
+  def RestartAdbdOnDevice(self):
+    logging.info('Restarting adbd on the device...')
+    with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+      host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      'build',
+                                      'android',
+                                      'pylib',
+                                      'restart_adbd.sh')
+      self._adb.Push(host_script_path, temp_script_file.name)
+      self.RunShellCommand('. %s' % temp_script_file.name)
+      self._adb.SendCommand('wait-for-device')
+
+  def RestartAdbServer(self):
+    """Restart the adb server."""
+    ret = self.KillAdbServer()
+    if ret != 0:
+      raise errors.MsgException('KillAdbServer: %d' % ret)
+
+    ret = self.StartAdbServer()
+    if ret != 0:
+      raise errors.MsgException('StartAdbServer: %d' % ret)
+
+  @staticmethod
+  def KillAdbServer():
+    """Kill adb server."""
+    adb_cmd = [constants.GetAdbPath(), 'kill-server']
+    ret = cmd_helper.RunCmd(adb_cmd)
+    retry = 0
+    while retry < 3:
+      ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+      if ret != 0:
+        # pgrep didn't find adb, kill-server succeeded.
+        return 0
+      retry += 1
+      time.sleep(retry)
+    return ret
+
+  def StartAdbServer(self):
+    """Start adb server."""
+    adb_cmd = ['taskset', '-c', '0', constants.GetAdbPath(), 'start-server']
+    ret, _ = cmd_helper.GetCmdStatusAndOutput(adb_cmd)
+    retry = 0
+    while retry < 3:
+      ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+      if ret == 0:
+        # pgrep found adb, start-server succeeded.
+        # Waiting for device to reconnect before returning success.
+        self._adb.SendCommand('wait-for-device')
+        return 0
+      retry += 1
+      time.sleep(retry)
+    return ret
+
+  def WaitForSystemBootCompleted(self, wait_time):
+    """Waits for targeted system's boot_completed flag to be set.
+
+    Args:
+      wait_time: time in seconds to wait
+
+    Raises:
+      WaitForResponseTimedOutError if wait_time elapses and flag still not
+      set.
+    """
+    logging.info('Waiting for system boot completed...')
+    self._adb.SendCommand('wait-for-device')
+    # Now the device is there, but system not boot completed.
+    # Query the sys.boot_completed flag with a basic command
+    boot_completed = False
+    attempts = 0
+    wait_period = 5
+    while not boot_completed and (attempts * wait_period) < wait_time:
+      output = self.system_properties['sys.boot_completed']
+      output = output.strip()
+      if output == '1':
+        boot_completed = True
+      else:
+        # If 'error: xxx' returned when querying the flag, it means
+        # adb server lost the connection to the emulator, so restart the adb
+        # server.
+        if 'error:' in output:
+          self.RestartAdbServer()
+        time.sleep(wait_period)
+        attempts += 1
+    if not boot_completed:
+      raise errors.WaitForResponseTimedOutError(
+          'sys.boot_completed flag was not set after %s seconds' % wait_time)
+
+  def WaitForSdCardReady(self, timeout_time):
+    """Wait for the SD card ready before pushing data into it."""
+    logging.info('Waiting for SD card ready...')
+    sdcard_ready = False
+    attempts = 0
+    wait_period = 5
+    external_storage = self.GetExternalStorage()
+    while not sdcard_ready and attempts * wait_period < timeout_time:
+      output = self.RunShellCommand('ls ' + external_storage)
+      if output:
+        sdcard_ready = True
+      else:
+        time.sleep(wait_period)
+        attempts += 1
+    if not sdcard_ready:
+      raise errors.WaitForResponseTimedOutError(
+          'SD card not ready after %s seconds' % timeout_time)
+
+  def GetAndroidToolStatusAndOutput(self, command, lib_path=None, *args, **kw):
+    """Runs a native Android binary, wrapping the command as necessary.
+
+    This is a specialization of GetShellCommandStatusAndOutput, which is meant
+    for running tools/android/ binaries and handle properly: (1) setting the
+    lib path (for component=shared_library), (2) using the PIE wrapper on ICS.
+    See crbug.com/373219 for more context.
+
+    Args:
+      command: String containing the command to send.
+      lib_path: (optional) path to the folder containing the dependent libs.
+      Same other arguments of GetCmdStatusAndOutput.
+    """
+    # The first time this command is run the device is inspected to check
+    # whether a wrapper for running PIE executable is needed (only Android ICS)
+    # or not. The results is cached, so the wrapper is pushed only once.
+    if self._pie_wrapper is None:
+      # None: did not check; '': did check and not needed; '/path': use /path.
+      self._pie_wrapper = ''
+      if self.GetBuildId().startswith('I'):  # Ixxxx = Android ICS.
+        run_pie_dist_path = os.path.join(constants.GetOutDirectory(), 'run_pie')
+        assert os.path.exists(run_pie_dist_path), 'Please build run_pie'
+        # The PIE loader must be pushed manually (i.e. no PushIfNeeded) because
+        # PushIfNeeded requires md5sum and md5sum requires the wrapper as well.
+        command = 'push %s %s' % (run_pie_dist_path, PIE_WRAPPER_PATH)
+        assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+        self._pie_wrapper = PIE_WRAPPER_PATH
+
+    if self._pie_wrapper:
+      command = '%s %s' % (self._pie_wrapper, command)
+    if lib_path:
+      command = 'LD_LIBRARY_PATH=%s %s' % (lib_path, command)
+    return self.GetShellCommandStatusAndOutput(command, *args, **kw)
+
+  # It is tempting to turn this function into a generator, however this is not
+  # possible without using a private (local) adb_shell instance (to ensure no
+  # other command interleaves usage of it), which would defeat the main aim of
+  # being able to reuse the adb shell instance across commands.
+  def RunShellCommand(self, command, timeout_time=20, log_result=False):
+    """Send a command to the adb shell and return the result.
+
+    Args:
+      command: String containing the shell command to send.
+      timeout_time: Number of seconds to wait for command to respond before
+        retrying, used by AdbInterface.SendShellCommand.
+      log_result: Boolean to indicate whether we should log the result of the
+                  shell command.
+
+    Returns:
+      list containing the lines of output received from running the command
+    """
+    self._LogShell(command)
+    if "'" in command:
+      command = command.replace('\'', '\'\\\'\'')
+    result = self._adb.SendShellCommand(
+        "'%s'" % command, timeout_time).splitlines()
+    # TODO(b.kelemen): we should really be able to drop the stderr of the
+    # command or raise an exception based on what the caller wants.
+    result = [ l for l in result if not l.startswith('WARNING') ]
+    if ['error: device not found'] == result:
+      raise errors.DeviceUnresponsiveError('device not found')
+    if log_result:
+      self._LogShell('\n'.join(result))
+    return result
+
+  def GetShellCommandStatusAndOutput(self, command, timeout_time=20,
+                                     log_result=False):
+    """See RunShellCommand() above.
+
+    Returns:
+      The tuple (exit code, list of output lines).
+    """
+    lines = self.RunShellCommand(
+        command + '; echo %$?', timeout_time, log_result)
+    last_line = lines[-1]
+    status_pos = last_line.rfind('%')
+    assert status_pos >= 0
+    status = int(last_line[status_pos + 1:])
+    if status_pos == 0:
+      lines = lines[:-1]
+    else:
+      lines = lines[:-1] + [last_line[:status_pos]]
+    return (status, lines)
+
+  def KillAll(self, process, signum=9, with_su=False):
+    """Android version of killall, connected via adb.
+
+    Args:
+      process: name of the process to kill off.
+      signum: signal to use, 9 (SIGKILL) by default.
+      with_su: wether or not to use su to kill the processes.
+
+    Returns:
+      the number of processes killed
+    """
+    pids = self.ExtractPid(process)
+    if pids:
+      cmd = 'kill -%d %s' % (signum, ' '.join(pids))
+      if with_su:
+        self.RunShellCommandWithSU(cmd)
+      else:
+        self.RunShellCommand(cmd)
+    return len(pids)
+
+  def KillAllBlocking(self, process, timeout_sec, signum=9, with_su=False):
+    """Blocking version of killall, connected via adb.
+
+    This waits until no process matching the corresponding name appears in ps'
+    output anymore.
+
+    Args:
+      process: name of the process to kill off
+      timeout_sec: the timeout in seconds
+      signum: same as |KillAll|
+      with_su: same as |KillAll|
+    Returns:
+      the number of processes killed
+    """
+    processes_killed = self.KillAll(process, signum=signum, with_su=with_su)
+    if processes_killed:
+      elapsed = 0
+      wait_period = 0.1
+      # Note that this doesn't take into account the time spent in ExtractPid().
+      while self.ExtractPid(process) and elapsed < timeout_sec:
+        time.sleep(wait_period)
+        elapsed += wait_period
+      if elapsed >= timeout_sec:
+        return processes_killed - self.ExtractPid(process)
+    return processes_killed
+
+  @staticmethod
+  def _GetActivityCommand(package, activity, wait_for_completion, action,
+                          category, data, extras, trace_file_name, force_stop,
+                          flags):
+    """Creates command to start |package|'s activity on the device.
+
+    Args - as for StartActivity
+
+    Returns:
+      the command to run on the target to start the activity
+    """
+    cmd = 'am start -a %s' % action
+    if force_stop:
+      cmd += ' -S'
+    if wait_for_completion:
+      cmd += ' -W'
+    if category:
+      cmd += ' -c %s' % category
+    if package and activity:
+      cmd += ' -n %s/%s' % (package, activity)
+    if data:
+      cmd += ' -d "%s"' % data
+    if extras:
+      for key in extras:
+        value = extras[key]
+        if isinstance(value, str):
+          cmd += ' --es'
+        elif isinstance(value, bool):
+          cmd += ' --ez'
+        elif isinstance(value, int):
+          cmd += ' --ei'
+        else:
+          raise NotImplementedError(
+              'Need to teach StartActivity how to pass %s extras' % type(value))
+        cmd += ' %s %s' % (key, value)
+    if trace_file_name:
+      cmd += ' --start-profiler ' + trace_file_name
+    if flags:
+      cmd += ' -f %s' % flags
+    return cmd
+
+  def StartActivity(self, package, activity, wait_for_completion=False,
+                    action='android.intent.action.VIEW',
+                    category=None, data=None,
+                    extras=None, trace_file_name=None,
+                    force_stop=False, flags=None):
+    """Starts |package|'s activity on the device.
+
+    Args:
+      package: Name of package to start (e.g. 'com.google.android.apps.chrome').
+      activity: Name of activity (e.g. '.Main' or
+        'com.google.android.apps.chrome.Main').
+      wait_for_completion: wait for the activity to finish launching (-W flag).
+      action: string (e.g. "android.intent.action.MAIN"). Default is VIEW.
+      category: string (e.g. "android.intent.category.HOME")
+      data: Data string to pass to activity (e.g. 'http://www.example.com/').
+      extras: Dict of extras to pass to activity. Values are significant.
+      trace_file_name: If used, turns on and saves the trace to this file name.
+      force_stop: force stop the target app before starting the activity (-S
+        flag).
+    Returns:
+      The output of the underlying command as a list of lines.
+    """
+    cmd = self._GetActivityCommand(package, activity, wait_for_completion,
+                                   action, category, data, extras,
+                                   trace_file_name, force_stop, flags)
+    return self.RunShellCommand(cmd)
+
+  def StartActivityTimed(self, package, activity, wait_for_completion=False,
+                         action='android.intent.action.VIEW',
+                         category=None, data=None,
+                         extras=None, trace_file_name=None,
+                         force_stop=False, flags=None):
+    """Starts |package|'s activity on the device, returning the start time
+
+    Args - as for StartActivity
+
+    Returns:
+      A tuple containing:
+        - the output of the underlying command as a list of lines, and
+        - a timestamp string for the time at which the activity started
+    """
+    cmd = self._GetActivityCommand(package, activity, wait_for_completion,
+                                   action, category, data, extras,
+                                   trace_file_name, force_stop, flags)
+    self.StartMonitoringLogcat()
+    out = self.RunShellCommand('log starting activity; ' + cmd)
+    activity_started_re = re.compile('.*starting activity.*')
+    m = self.WaitForLogMatch(activity_started_re, None)
+    assert m
+    start_line = m.group(0)
+    return (out, GetLogTimestamp(start_line, self.GetDeviceYear()))
+
+  def StartCrashUploadService(self, package):
+    # TODO(frankf): We really need a python wrapper around Intent
+    # to be shared with StartActivity/BroadcastIntent.
+    cmd = (
+      'am startservice -a %s.crash.ACTION_FIND_ALL -n '
+      '%s/%s.crash.MinidumpUploadService' %
+      (constants.PACKAGE_INFO['chrome'].package,
+       package,
+       constants.PACKAGE_INFO['chrome'].package))
+    am_output = self.RunShellCommandWithSU(cmd)
+    assert am_output and 'Starting' in am_output[-1], (
+        'Service failed to start: %s' % am_output)
+    time.sleep(15)
+
+  def BroadcastIntent(self, package, intent, *args):
+    """Send a broadcast intent.
+
+    Args:
+      package: Name of package containing the intent.
+      intent: Name of the intent.
+      args: Optional extra arguments for the intent.
+    """
+    cmd = 'am broadcast -a %s.%s %s' % (package, intent, ' '.join(args))
+    self.RunShellCommand(cmd)
+
+  def GoHome(self):
+    """Tell the device to return to the home screen. Blocks until completion."""
+    self.RunShellCommand('am start -W '
+        '-a android.intent.action.MAIN -c android.intent.category.HOME')
+
+  def CloseApplication(self, package):
+    """Attempt to close down the application, using increasing violence.
+
+    Args:
+      package: Name of the process to kill off, e.g.
+      com.google.android.apps.chrome
+    """
+    self.RunShellCommand('am force-stop ' + package)
+
+  def GetApplicationPath(self, package):
+    """Get the installed apk path on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      Path to the apk on the device if it exists, None otherwise.
+    """
+    pm_path_output  = self.RunShellCommand('pm path ' + package)
+    # The path output contains anything if and only if the package
+    # exists.
+    if pm_path_output:
+      # pm_path_output is of the form: "package:/path/to/foo.apk"
+      return pm_path_output[0].split(':')[1]
+    else:
+      return None
+
+  def ClearApplicationState(self, package):
+    """Closes and clears all state for the given |package|."""
+    # Check that the package exists before clearing it. Necessary because
+    # calling pm clear on a package that doesn't exist may never return.
+    pm_path_output  = self.RunShellCommand('pm path ' + package)
+    # The path output only contains anything if and only if the package exists.
+    if pm_path_output:
+      self.RunShellCommand('pm clear ' + package)
+
+  def SendKeyEvent(self, keycode):
+    """Sends keycode to the device.
+
+    Args:
+      keycode: Numeric keycode to send (see "enum" at top of file).
+    """
+    self.RunShellCommand('input keyevent %d' % keycode)
+
+  def _RunMd5Sum(self, host_path, device_path):
+    """Gets the md5sum of a host path and device path.
+
+    Args:
+      host_path: Path (file or directory) on the host.
+      device_path: Path on the device.
+
+    Returns:
+      A tuple containing lists of the host and device md5sum results as
+      created by _ParseMd5SumOutput().
+    """
+    md5sum_dist_path = os.path.join(constants.GetOutDirectory(),
+                                    'md5sum_dist')
+    assert os.path.exists(md5sum_dist_path), 'Please build md5sum.'
+    md5sum_dist_mtime = os.stat(md5sum_dist_path).st_mtime
+    if (md5sum_dist_path not in self._push_if_needed_cache or
+        self._push_if_needed_cache[md5sum_dist_path] != md5sum_dist_mtime):
+      command = 'push %s %s' % (md5sum_dist_path, MD5SUM_DEVICE_FOLDER)
+      assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+      self._push_if_needed_cache[md5sum_dist_path] = md5sum_dist_mtime
+
+    (_, md5_device_output) = self.GetAndroidToolStatusAndOutput(
+        self._util_wrapper + ' ' + MD5SUM_DEVICE_PATH + ' ' + device_path,
+        lib_path=MD5SUM_DEVICE_FOLDER,
+        timeout_time=2 * 60)
+    device_hash_tuples = _ParseMd5SumOutput(md5_device_output)
+    assert os.path.exists(host_path), 'Local path not found %s' % host_path
+    md5sum_output = cmd_helper.GetCmdOutput(
+        [os.path.join(constants.GetOutDirectory(), 'md5sum_bin_host'),
+         host_path])
+    host_hash_tuples = _ParseMd5SumOutput(md5sum_output.splitlines())
+    return (host_hash_tuples, device_hash_tuples)
+
+  def GetFilesChanged(self, host_path, device_path, ignore_filenames=False):
+    """Compares the md5sum of a host path against a device path.
+
+    Note: Ignores extra files on the device.
+
+    Args:
+      host_path: Path (file or directory) on the host.
+      device_path: Path on the device.
+      ignore_filenames: If True only the file contents are considered when
+          checking whether a file has changed, otherwise the relative path
+          must also match.
+
+    Returns:
+      A list of tuples of the form (host_path, device_path) for files whose
+      md5sums do not match.
+    """
+
+    # Md5Sum resolves symbolic links in path names so the calculation of
+    # relative path names from its output will need the real path names of the
+    # base directories. Having calculated these they are used throughout the
+    # function since this makes us less subject to any future changes to Md5Sum.
+    real_host_path = os.path.realpath(host_path)
+    real_device_path = self.RunShellCommand('realpath "%s"' % device_path)[0]
+
+    host_hash_tuples, device_hash_tuples = self._RunMd5Sum(
+        real_host_path, real_device_path)
+
+    if len(host_hash_tuples) > len(device_hash_tuples):
+      logging.info('%s files do not exist on the device' %
+                   (len(host_hash_tuples) - len(device_hash_tuples)))
+
+    host_rel = [(os.path.relpath(os.path.normpath(t.path), real_host_path),
+                 t.hash)
+                for t in host_hash_tuples]
+
+    if os.path.isdir(real_host_path):
+      def RelToRealPaths(rel_path):
+        return (os.path.join(real_host_path, rel_path),
+                os.path.join(real_device_path, rel_path))
+    else:
+      assert len(host_rel) == 1
+      def RelToRealPaths(_):
+        return (real_host_path, real_device_path)
+
+    if ignore_filenames:
+      # If we are ignoring file names, then we want to push any file for which
+      # a file with an equivalent MD5 sum does not exist on the device.
+      device_hashes = set([h.hash for h in device_hash_tuples])
+      ShouldPush = lambda p, h: h not in device_hashes
+    else:
+      # Otherwise, we want to push any file on the host for which a file with
+      # an equivalent MD5 sum does not exist at the same relative path on the
+      # device.
+      device_rel = dict([(os.path.relpath(os.path.normpath(t.path),
+                                          real_device_path),
+                          t.hash)
+                         for t in device_hash_tuples])
+      ShouldPush = lambda p, h: p not in device_rel or h != device_rel[p]
+
+    return [RelToRealPaths(path) for path, host_hash in host_rel
+            if ShouldPush(path, host_hash)]
+
+  def PushIfNeeded(self, host_path, device_path):
+    """Pushes |host_path| to |device_path|.
+
+    Works for files and directories. This method skips copying any paths in
+    |test_data_paths| that already exist on the device with the same hash.
+
+    All pushed files can be removed by calling RemovePushedFiles().
+    """
+    MAX_INDIVIDUAL_PUSHES = 50
+    if not os.path.exists(host_path):
+      raise device_errors.CommandFailedError(
+          'Local path not found %s' % host_path, device=str(self))
+
+    # See if the file on the host changed since the last push (if any) and
+    # return early if it didn't. Note that this shortcut assumes that the tests
+    # on the device don't modify the files.
+    if not os.path.isdir(host_path):
+      if host_path in self._push_if_needed_cache:
+        host_path_mtime = self._push_if_needed_cache[host_path]
+        if host_path_mtime == os.stat(host_path).st_mtime:
+          return
+
+    size = host_utils.GetRecursiveDiskUsage(host_path)
+    self._pushed_files.append(device_path)
+    self._potential_push_size += size
+
+    if os.path.isdir(host_path):
+      self.RunShellCommand('mkdir -p "%s"' % device_path)
+
+    changed_files = self.GetFilesChanged(host_path, device_path)
+    logging.info('Found %d files that need to be pushed to %s',
+        len(changed_files), device_path)
+    if not changed_files:
+      return
+
+    def Push(host, device):
+      # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout
+      # of 60 seconds which isn't sufficient for a lot of users of this method.
+      push_command = 'push %s %s' % (host, device)
+      self._LogShell(push_command)
+
+      # Retry push with increasing backoff if the device is busy.
+      retry = 0
+      while True:
+        output = self._adb.SendCommand(push_command, timeout_time=30 * 60)
+        if _HasAdbPushSucceeded(output):
+          if not os.path.isdir(host_path):
+            self._push_if_needed_cache[host] = os.stat(host).st_mtime
+          return
+        if retry < 3:
+          retry += 1
+          wait_time = 5 * retry
+          logging.error('Push failed, retrying in %d seconds: %s' %
+                        (wait_time, output))
+          time.sleep(wait_time)
+        else:
+          raise Exception('Push failed: %s' % output)
+
+    diff_size = 0
+    if len(changed_files) <= MAX_INDIVIDUAL_PUSHES:
+      diff_size = sum(host_utils.GetRecursiveDiskUsage(f[0])
+                      for f in changed_files)
+
+    # TODO(craigdh): Replace this educated guess with a heuristic that
+    # approximates the push time for each method.
+    if len(changed_files) > MAX_INDIVIDUAL_PUSHES or diff_size > 0.5 * size:
+      self._actual_push_size += size
+      Push(host_path, device_path)
+    else:
+      for f in changed_files:
+        Push(f[0], f[1])
+      self._actual_push_size += diff_size
+
+  def GetPushSizeInfo(self):
+    """Get total size of pushes to the device done via PushIfNeeded()
+
+    Returns:
+      A tuple:
+        1. Total size of push requests to PushIfNeeded (MB)
+        2. Total size that was actually pushed (MB)
+    """
+    return (self._potential_push_size, self._actual_push_size)
+
+  def GetFileContents(self, filename, log_result=False):
+    """Gets contents from the file specified by |filename|."""
+    return self.RunShellCommand('cat "%s" 2>/dev/null' % filename,
+                                log_result=log_result)
+
+  def SetFileContents(self, filename, contents):
+    """Writes |contents| to the file specified by |filename|."""
+    with tempfile.NamedTemporaryFile() as f:
+      f.write(contents)
+      f.flush()
+      self._adb.Push(f.name, filename)
+
+  def RunShellCommandWithSU(self, command, timeout_time=20, log_result=False):
+    return self.RunShellCommand('su -c %s' % command, timeout_time, log_result)
+
+  def CanAccessProtectedFileContents(self):
+    """Returns True if Get/SetProtectedFileContents would work via "su" or adb
+    shell running as root.
+
+    Devices running user builds don't have adb root, but may provide "su" which
+    can be used for accessing protected files.
+    """
+    return (self._GetProtectedFileCommandRunner() != None)
+
+  def _GetProtectedFileCommandRunner(self):
+    """Finds the best method to access protected files on the device.
+
+    Returns:
+      1. None when privileged files cannot be accessed on the device.
+      2. Otherwise: A function taking a single parameter: a string with command
+         line arguments. Running that function executes the command with
+         the appropriate method.
+    """
+    if self._protected_file_access_method_initialized:
+      return self._privileged_command_runner
+
+    self._privileged_command_runner = None
+    self._protected_file_access_method_initialized = True
+
+    for cmd in [self.RunShellCommand, self.RunShellCommandWithSU]:
+      # Get contents of the auxv vector for the init(8) process from a small
+      # binary file that always exists on linux and is always read-protected.
+      contents = cmd('cat /proc/1/auxv')
+      # The leading 4 or 8-bytes of auxv vector is a_type. There are not many
+      # reserved a_type values, hence byte 2 must always be '\0' for a realistic
+      # auxv. See /usr/include/elf.h.
+      if len(contents) > 0 and (contents[0][2] == '\0'):
+        self._privileged_command_runner = cmd
+        break
+    return self._privileged_command_runner
+
+  def GetProtectedFileContents(self, filename):
+    """Gets contents from the protected file specified by |filename|.
+
+    This is potentially less efficient than GetFileContents.
+    """
+    command = 'cat "%s" 2> /dev/null' % filename
+    command_runner = self._GetProtectedFileCommandRunner()
+    if command_runner:
+      return command_runner(command)
+    else:
+      logging.warning('Could not access protected file: %s' % filename)
+      return []
+
+  def SetProtectedFileContents(self, filename, contents):
+    """Writes |contents| to the protected file specified by |filename|.
+
+    This is less efficient than SetFileContents.
+    """
+    with DeviceTempFile(self) as temp_file:
+      with DeviceTempFile(self, suffix=".sh") as temp_script:
+        # Put the contents in a temporary file
+        self.SetFileContents(temp_file.name, contents)
+        # Create a script to copy the file contents to its final destination
+        self.SetFileContents(temp_script.name,
+                             'cat %s > %s' % (temp_file.name, filename))
+
+        command = 'sh %s' % temp_script.name
+        command_runner = self._GetProtectedFileCommandRunner()
+        if command_runner:
+          return command_runner(command)
+        else:
+          logging.warning(
+              'Could not set contents of protected file: %s' % filename)
+
+
+  def RemovePushedFiles(self):
+    """Removes all files pushed with PushIfNeeded() from the device."""
+    for p in self._pushed_files:
+      self.RunShellCommand('rm -r %s' % p, timeout_time=2 * 60)
+
+  def ListPathContents(self, path):
+    """Lists files in all subdirectories of |path|.
+
+    Args:
+      path: The path to list.
+
+    Returns:
+      A dict of {"name": (size, lastmod), ...}.
+    """
+    # Example output:
+    # /foo/bar:
+    # -rw-r----- user group   102 2011-05-12 12:29:54.131623387 +0100 baz.txt
+    re_file = re.compile('^-(?P<perms>[^\s]+)\s+'
+                         '(?P<user>[^\s]+)\s+'
+                         '(?P<group>[^\s]+)\s+'
+                         '(?P<size>[^\s]+)\s+'
+                         '(?P<date>[^\s]+)\s+'
+                         '(?P<time>[^\s]+)\s+'
+                         '(?P<filename>[^\s]+)$')
+    return _GetFilesFromRecursiveLsOutput(
+        path, self.RunShellCommand('ls -lR %s' % path), re_file,
+        self.GetUtcOffset())
+
+  def GetUtcOffset(self):
+    if not self._device_utc_offset:
+      self._device_utc_offset = self.RunShellCommand('date +%z')[0]
+    return self._device_utc_offset
+
+  def SetJavaAssertsEnabled(self, enable):
+    """Sets or removes the device java assertions property.
+
+    Args:
+      enable: If True the property will be set.
+
+    Returns:
+      True if the file was modified (reboot is required for it to take effect).
+    """
+    # First ensure the desired property is persisted.
+    temp_props_file = tempfile.NamedTemporaryFile()
+    properties = ''
+    if self._adb.Pull(LOCAL_PROPERTIES_PATH, temp_props_file.name):
+      with open(temp_props_file.name) as f:
+        properties = f.read()
+    re_search = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                           r'\s*=\s*all\s*$', re.MULTILINE)
+    if enable != bool(re.search(re_search, properties)):
+      re_replace = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                              r'\s*=\s*\w+\s*$', re.MULTILINE)
+      properties = re.sub(re_replace, '', properties)
+      if enable:
+        properties += '\n%s=all\n' % JAVA_ASSERT_PROPERTY
+
+      file(temp_props_file.name, 'w').write(properties)
+      self._adb.Push(temp_props_file.name, LOCAL_PROPERTIES_PATH)
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    was_set = 'all' in self.system_properties[JAVA_ASSERT_PROPERTY]
+    if was_set == enable:
+      return False
+    self.system_properties[JAVA_ASSERT_PROPERTY] = enable and 'all' or ''
+    return True
+
+  def GetBuildId(self):
+    """Returns the build ID of the system (e.g. JRM79C)."""
+    build_id = self.system_properties['ro.build.id']
+    assert build_id
+    return build_id
+
+  def GetBuildType(self):
+    """Returns the build type of the system (e.g. eng)."""
+    build_type = self.system_properties['ro.build.type']
+    assert build_type
+    return build_type
+
+  def GetBuildProduct(self):
+    """Returns the build product of the device (e.g. maguro)."""
+    build_product = self.system_properties['ro.build.product']
+    assert build_product
+    return build_product
+
+  def GetProductName(self):
+    """Returns the product name of the device (e.g. takju)."""
+    name = self.system_properties['ro.product.name']
+    assert name
+    return name
+
+  def GetBuildFingerprint(self):
+    """Returns the build fingerprint of the device."""
+    build_fingerprint = self.system_properties['ro.build.fingerprint']
+    assert build_fingerprint
+    return build_fingerprint
+
+  def GetDescription(self):
+    """Returns the description of the system.
+
+    For example, "yakju-userdebug 4.1 JRN54F 364167 dev-keys".
+    """
+    description = self.system_properties['ro.build.description']
+    assert description
+    return description
+
+  def GetProductModel(self):
+    """Returns the name of the product model (e.g. "Galaxy Nexus") """
+    model = self.system_properties['ro.product.model']
+    assert model
+    return model
+
+  def GetWifiIP(self):
+    """Returns the wifi IP on the device."""
+    wifi_ip = self.system_properties['dhcp.wlan0.ipaddress']
+    # Do not assert here. Devices (e.g. emulators) may not have a WifiIP.
+    return wifi_ip
+
+  def GetSubscriberInfo(self):
+    """Returns the device subscriber info (e.g. GSM and device ID) as string."""
+    iphone_sub = self.RunShellCommand('dumpsys iphonesubinfo')
+    # Do not assert here. Devices (e.g. Nakasi on K) may not have iphonesubinfo.
+    return '\n'.join(iphone_sub)
+
+  def GetBatteryInfo(self):
+    """Returns a {str: str} dict of battery info (e.g. status, level, etc)."""
+    battery = self.RunShellCommand('dumpsys battery')
+    assert battery
+    battery_info = {}
+    for line in battery[1:]:
+      k, _, v = line.partition(': ')
+      battery_info[k.strip()] = v.strip()
+    return battery_info
+
+  def GetSetupWizardStatus(self):
+    """Returns the status of the device setup wizard (e.g. DISABLED)."""
+    status = self.system_properties['ro.setupwizard.mode']
+    # On some devices, the status is empty if not otherwise set. In such cases
+    # the caller should expect an empty string to be returned.
+    return status
+
+  def StartMonitoringLogcat(self, clear=True, logfile=None, filters=None):
+    """Starts monitoring the output of logcat, for use with WaitForLogMatch.
+
+    Args:
+      clear: If True the existing logcat output will be cleared, to avoiding
+             matching historical output lurking in the log.
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self.RunShellCommand('logcat -c')
+    args = []
+    if self._adb._target_arg:
+      args += shlex.split(self._adb._target_arg)
+    args += ['logcat', '-v', 'threadtime']
+    if filters:
+      args.extend(filters)
+    else:
+      args.append('*:v')
+
+    if logfile:
+      logfile = NewLineNormalizer(logfile)
+
+    # Spawn logcat and synchronize with it.
+    for _ in range(4):
+      self._logcat = pexpect.spawn(constants.GetAdbPath(), args, timeout=10,
+                                   logfile=logfile)
+      if not clear or self.SyncLogCat():
+        break
+      self._logcat.close(force=True)
+    else:
+      logging.critical('Error reading from logcat: ' + str(self._logcat.match))
+      sys.exit(1)
+
+  def SyncLogCat(self):
+    """Synchronize with logcat.
+
+    Synchronize with the monitored logcat so that WaitForLogMatch will only
+    consider new message that are received after this point in time.
+
+    Returns:
+      True if the synchronization succeeded.
+    """
+    assert self._logcat
+    tag = 'logcat_sync_%s' % time.time()
+    self.RunShellCommand('log ' + tag)
+    return self._logcat.expect([tag, pexpect.EOF, pexpect.TIMEOUT]) == 0
+
+  def GetMonitoredLogCat(self):
+    """Returns an "adb logcat" command as created by pexpected.spawn."""
+    if not self._logcat:
+      self.StartMonitoringLogcat(clear=False)
+    return self._logcat
+
+  def WaitForLogMatch(self, success_re, error_re, clear=False, timeout=10):
+    """Blocks until a matching line is logged or a timeout occurs.
+
+    Args:
+      success_re: A compiled re to search each line for.
+      error_re: A compiled re which, if found, terminates the search for
+          |success_re|. If None is given, no error condition will be detected.
+      clear: If True the existing logcat output will be cleared, defaults to
+          false.
+      timeout: Timeout in seconds to wait for a log match.
+
+    Raises:
+      pexpect.TIMEOUT after |timeout| seconds without a match for |success_re|
+      or |error_re|.
+
+    Returns:
+      The re match object if |success_re| is matched first or None if |error_re|
+      is matched first.
+    """
+    logging.info('<<< Waiting for logcat:' + str(success_re.pattern))
+    t0 = time.time()
+    while True:
+      if not self._logcat:
+        self.StartMonitoringLogcat(clear)
+      try:
+        while True:
+          # Note this will block for upto the timeout _per log line_, so we need
+          # to calculate the overall timeout remaining since t0.
+          time_remaining = t0 + timeout - time.time()
+          if time_remaining < 0:
+            raise pexpect.TIMEOUT(self._logcat)
+          self._logcat.expect(PEXPECT_LINE_RE, timeout=time_remaining)
+          line = self._logcat.match.group(1)
+          if error_re:
+            error_match = error_re.search(line)
+            if error_match:
+              return None
+          success_match = success_re.search(line)
+          if success_match:
+            return success_match
+          logging.info('<<< Skipped Logcat Line:' + str(line))
+      except pexpect.TIMEOUT:
+        raise pexpect.TIMEOUT(
+            'Timeout (%ds) exceeded waiting for pattern "%s" (tip: use -vv '
+            'to debug)' %
+            (timeout, success_re.pattern))
+      except pexpect.EOF:
+        # It seems that sometimes logcat can end unexpectedly. This seems
+        # to happen during Chrome startup after a reboot followed by a cache
+        # clean. I don't understand why this happens, but this code deals with
+        # getting EOF in logcat.
+        logging.critical('Found EOF in adb logcat. Restarting...')
+        # Rerun spawn with original arguments. Note that self._logcat.args[0] is
+        # the path of adb, so we don't want it in the arguments.
+        self._logcat = pexpect.spawn(constants.GetAdbPath(),
+                                     self._logcat.args[1:],
+                                     timeout=self._logcat.timeout,
+                                     logfile=self._logcat.logfile)
+
+  def StartRecordingLogcat(self, clear=True, filters=None):
+    """Starts recording logcat output to eventually be saved as a string.
+
+    This call should come before some series of tests are run, with either
+    StopRecordingLogcat or SearchLogcatRecord following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    if not filters:
+      filters = ['*:v']
+    if clear:
+      self._adb.SendCommand('logcat -c')
+    logcat_command = 'adb %s logcat -v threadtime %s' % (self._adb._target_arg,
+                                                         ' '.join(filters))
+    self._logcat_tmpoutfile = tempfile.NamedTemporaryFile(bufsize=0)
+    self.logcat_process = subprocess.Popen(logcat_command, shell=True,
+                                           stdout=self._logcat_tmpoutfile)
+
+  def GetCurrentRecordedLogcat(self):
+    """Return the current content of the logcat being recorded.
+       Call this after StartRecordingLogcat() and before StopRecordingLogcat().
+       This can be useful to perform timed polling/parsing.
+    Returns:
+       Current logcat output as a single string, or None if
+       StopRecordingLogcat() was already called.
+    """
+    if not self._logcat_tmpoutfile:
+      return None
+
+    with open(self._logcat_tmpoutfile.name) as f:
+      return f.read()
+
+  def StopRecordingLogcat(self):
+    """Stops an existing logcat recording subprocess and returns output.
+
+    Returns:
+      The logcat output as a string or an empty string if logcat was not
+      being recorded at the time.
+    """
+    if not self.logcat_process:
+      return ''
+    # Cannot evaluate directly as 0 is a possible value.
+    # Better to read the self.logcat_process.stdout before killing it,
+    # Otherwise the communicate may return incomplete output due to pipe break.
+    if self.logcat_process.poll() is None:
+      self.logcat_process.kill()
+    self.logcat_process.wait()
+    self.logcat_process = None
+    self._logcat_tmpoutfile.seek(0)
+    output = self._logcat_tmpoutfile.read()
+    self._logcat_tmpoutfile.close()
+    self._logcat_tmpoutfile = None
+    return output
+
+  @staticmethod
+  def SearchLogcatRecord(record, message, thread_id=None, proc_id=None,
+                         log_level=None, component=None):
+    """Searches the specified logcat output and returns results.
+
+    This method searches through the logcat output specified by record for a
+    certain message, narrowing results by matching them against any other
+    specified criteria.  It returns all matching lines as described below.
+
+    Args:
+      record: A string generated by Start/StopRecordingLogcat to search.
+      message: An output string to search for.
+      thread_id: The thread id that is the origin of the message.
+      proc_id: The process that is the origin of the message.
+      log_level: The log level of the message.
+      component: The name of the component that would create the message.
+
+    Returns:
+      A list of dictionaries represeting matching entries, each containing keys
+      thread_id, proc_id, log_level, component, and message.
+    """
+    if thread_id:
+      thread_id = str(thread_id)
+    if proc_id:
+      proc_id = str(proc_id)
+    results = []
+    reg = re.compile('(\d+)\s+(\d+)\s+([A-Z])\s+([A-Za-z]+)\s*:(.*)$',
+                     re.MULTILINE)
+    log_list = reg.findall(record)
+    for (tid, pid, log_lev, comp, msg) in log_list:
+      if ((not thread_id or thread_id == tid) and
+          (not proc_id or proc_id == pid) and
+          (not log_level or log_level == log_lev) and
+          (not component or component == comp) and msg.find(message) > -1):
+        match = dict({'thread_id': tid, 'proc_id': pid,
+                      'log_level': log_lev, 'component': comp,
+                      'message': msg})
+        results.append(match)
+    return results
+
+  def ExtractPid(self, process_name):
+    """Extracts Process Ids for a given process name from Android Shell.
+
+    Args:
+      process_name: name of the process on the device.
+
+    Returns:
+      List of all the process ids (as strings) that match the given name.
+      If the name of a process exactly matches the given name, the pid of
+      that process will be inserted to the front of the pid list.
+    """
+    pids = []
+    for line in self.RunShellCommand('ps', log_result=False):
+      data = line.split()
+      try:
+        if process_name in data[-1]:  # name is in the last column
+          if process_name == data[-1]:
+            pids.insert(0, data[1])  # PID is in the second column
+          else:
+            pids.append(data[1])
+      except IndexError:
+        pass
+    return pids
+
+  def GetIoStats(self):
+    """Gets cumulative disk IO stats since boot (for all processes).
+
+    Returns:
+      Dict of {num_reads, num_writes, read_ms, write_ms} or None if there
+      was an error.
+    """
+    IoStats = collections.namedtuple(
+        'IoStats',
+        ['device',
+         'num_reads_issued',
+         'num_reads_merged',
+         'num_sectors_read',
+         'ms_spent_reading',
+         'num_writes_completed',
+         'num_writes_merged',
+         'num_sectors_written',
+         'ms_spent_writing',
+         'num_ios_in_progress',
+         'ms_spent_doing_io',
+         'ms_spent_doing_io_weighted',
+        ])
+
+    for line in self.GetFileContents('/proc/diskstats', log_result=False):
+      fields = line.split()
+      stats = IoStats._make([fields[2]] + [int(f) for f in fields[3:]])
+      if stats.device == 'mmcblk0':
+        return {
+            'num_reads': stats.num_reads_issued,
+            'num_writes': stats.num_writes_completed,
+            'read_ms': stats.ms_spent_reading,
+            'write_ms': stats.ms_spent_writing,
+        }
+    logging.warning('Could not find disk IO stats.')
+    return None
+
+  def GetMemoryUsageForPid(self, pid):
+    """Returns the memory usage for given pid.
+
+    Args:
+      pid: The pid number of the specific process running on device.
+
+    Returns:
+      Dict of {metric:usage_kb}, for the process which has specified pid.
+      The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+      Shared_Dirty, Private_Clean, Private_Dirty, VmHWM.
+    """
+    showmap = self.RunShellCommand('showmap %d' % pid)
+    if not showmap or not showmap[-1].endswith('TOTAL'):
+      logging.warning('Invalid output for showmap %s', str(showmap))
+      return {}
+    items = showmap[-1].split()
+    if len(items) != 9:
+      logging.warning('Invalid TOTAL for showmap %s', str(items))
+      return {}
+    usage_dict = collections.defaultdict(int)
+    usage_dict.update({
+        'Size': int(items[0].strip()),
+        'Rss': int(items[1].strip()),
+        'Pss': int(items[2].strip()),
+        'Shared_Clean': int(items[3].strip()),
+        'Shared_Dirty': int(items[4].strip()),
+        'Private_Clean': int(items[5].strip()),
+        'Private_Dirty': int(items[6].strip()),
+    })
+    peak_value_kb = 0
+    for line in self.GetProtectedFileContents('/proc/%s/status' % pid):
+      if not line.startswith('VmHWM:'):  # Format: 'VmHWM: +[0-9]+ kB'
+        continue
+      peak_value_kb = int(line.split(':')[1].strip().split(' ')[0])
+      break
+    usage_dict['VmHWM'] = peak_value_kb
+    if not peak_value_kb:
+      logging.warning('Could not find memory peak value for pid ' + str(pid))
+
+    return usage_dict
+
+  def ProcessesUsingDevicePort(self, device_port):
+    """Lists processes using the specified device port on loopback interface.
+
+    Args:
+      device_port: Port on device we want to check.
+
+    Returns:
+      A list of (pid, process_name) tuples using the specified port.
+    """
+    tcp_results = self.RunShellCommand('cat /proc/net/tcp', log_result=False)
+    tcp_address = '0100007F:%04X' % device_port
+    pids = []
+    for single_connect in tcp_results:
+      connect_results = single_connect.split()
+      # Column 1 is the TCP port, and Column 9 is the inode of the socket
+      if connect_results[1] == tcp_address:
+        socket_inode = connect_results[9]
+        socket_name = 'socket:[%s]' % socket_inode
+        lsof_results = self.RunShellCommand('lsof', log_result=False)
+        for single_process in lsof_results:
+          process_results = single_process.split()
+          # Ignore the line if it has less than nine columns in it, which may
+          # be the case when a process stops while lsof is executing.
+          if len(process_results) <= 8:
+            continue
+          # Column 0 is the executable name
+          # Column 1 is the pid
+          # Column 8 is the Inode in use
+          if process_results[8] == socket_name:
+            pids.append((int(process_results[1]), process_results[0]))
+        break
+    logging.info('PidsUsingDevicePort: %s', pids)
+    return pids
+
+  def FileExistsOnDevice(self, file_name):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      file_name: Full path of file to check.
+
+    Returns:
+      True if the file exists, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -e "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+
+      status = self._adb.SendShellCommand(
+          '\'ls "%s" >/dev/null 2>&1; echo $?\'' % (file_name))
+      return int(status) == 0
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  def IsFileWritableOnDevice(self, file_name):
+    """Checks whether the given file (or directory) is writable on the device.
+
+    Args:
+      file_name: Full path of file/directory to check.
+
+    Returns:
+      True if writable, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -w "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+      raise errors.AbortError('"test" binary not found. OS too old.')
+
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  @staticmethod
+  def GetTimestamp():
+    return time.strftime('%Y-%m-%d-%H%M%S', time.localtime())
+
+  @staticmethod
+  def EnsureHostDirectory(host_file):
+    host_dir = os.path.dirname(os.path.abspath(host_file))
+    if not os.path.exists(host_dir):
+      os.makedirs(host_dir)
+
+  def TakeScreenshot(self, host_file=None):
+    """Saves a screenshot image to |host_file| on the host.
+
+    Args:
+      host_file: Absolute path to the image file to store on the host or None to
+                 use an autogenerated file name.
+
+    Returns:
+      Resulting host file name of the screenshot.
+    """
+    host_file = os.path.abspath(host_file or
+                                'screenshot-%s.png' % self.GetTimestamp())
+    self.EnsureHostDirectory(host_file)
+    device_file = '%s/screenshot.png' % self.GetExternalStorage()
+    self.RunShellCommand(
+        '/system/bin/screencap -p %s' % device_file)
+    self.PullFileFromDevice(device_file, host_file)
+    self.RunShellCommand('rm -f "%s"' % device_file)
+    return host_file
+
+  def PullFileFromDevice(self, device_file, host_file):
+    """Download |device_file| on the device from to |host_file| on the host.
+
+    Args:
+      device_file: Absolute path to the file to retrieve from the device.
+      host_file: Absolute path to the file to store on the host.
+    """
+    if not self._adb.Pull(device_file, host_file):
+      raise device_errors.AdbCommandFailedError(
+          ['pull', device_file, host_file], 'Failed to pull file from device.')
+    assert os.path.exists(host_file)
+
+  def SetUtilWrapper(self, util_wrapper):
+    """Sets a wrapper prefix to be used when running a locally-built
+    binary on the device (ex.: md5sum_bin).
+    """
+    self._util_wrapper = util_wrapper
+
+  def RunUIAutomatorTest(self, test, test_package, timeout):
+    """Runs a single uiautomator test.
+
+    Args:
+      test: Test class/method.
+      test_package: Name of the test jar.
+      timeout: Timeout time in seconds.
+
+    Returns:
+      An instance of am_instrument_parser.TestResult object.
+    """
+    cmd = 'uiautomator runtest %s -e class %s' % (test_package, test)
+    self._LogShell(cmd)
+    output = self._adb.SendShellCommand(cmd, timeout_time=timeout)
+    # uiautomator doesn't fully conform to the instrumenation test runner
+    # convention and doesn't terminate with INSTRUMENTATION_CODE.
+    # Just assume the first result is valid.
+    (test_results, _) = am_instrument_parser.ParseAmInstrumentOutput(output)
+    if not test_results:
+      raise errors.InstrumentationError(
+          'no test results... device setup correctly?')
+    return test_results[0]
+
+  def DismissCrashDialogIfNeeded(self):
+    """Dismiss the error/ANR dialog if present.
+
+    Returns: Name of the crashed package if a dialog is focused,
+             None otherwise.
+    """
+    re_focus = re.compile(
+        r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+    def _FindFocusedWindow():
+      match = None
+      for line in self.RunShellCommand('dumpsys window windows'):
+        match = re.match(re_focus, line)
+        if match:
+          break
+      return match
+
+    match = _FindFocusedWindow()
+    if not match:
+      return
+    package = match.group(2)
+    logging.warning('Trying to dismiss %s dialog for %s' % match.groups())
+    self.SendKeyEvent(KEYCODE_DPAD_RIGHT)
+    self.SendKeyEvent(KEYCODE_DPAD_RIGHT)
+    self.SendKeyEvent(KEYCODE_ENTER)
+    match = _FindFocusedWindow()
+    if match:
+      logging.error('Still showing a %s dialog for %s' % match.groups())
+    return package
+
+  def EfficientDeviceDirectoryCopy(self, source, dest):
+    """ Copy a directory efficiently on the device
+
+    Uses a shell script running on the target to copy new and changed files the
+    source directory to the destination directory and remove added files. This
+    is in some cases much faster than cp -r.
+
+    Args:
+      source: absolute path of source directory
+      dest: absolute path of destination directory
+    """
+    logging.info('In EfficientDeviceDirectoryCopy %s %s', source, dest)
+    with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+      host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      'build',
+                                      'android',
+                                      'pylib',
+                                      'efficient_android_directory_copy.sh')
+      self._adb.Push(host_script_path, temp_script_file.name)
+      out = self.RunShellCommand(
+          'sh %s %s %s' % (temp_script_file.name, source, dest),
+          timeout_time=120)
+      if self._device:
+        device_repr = self._device[-4:]
+      else:
+        device_repr = '????'
+      for line in out:
+        logging.info('[%s]> %s', device_repr, line)
+
+  def _GetControlUsbChargingCommand(self):
+    if self._control_usb_charging_command['cached']:
+      return self._control_usb_charging_command['command']
+    self._control_usb_charging_command['cached'] = True
+    if not self.IsRootEnabled():
+      return None
+    for command in CONTROL_USB_CHARGING_COMMANDS:
+      # Assert command is valid.
+      assert 'disable_command' in command
+      assert 'enable_command' in command
+      assert 'witness_file' in command
+      witness_file = command['witness_file']
+      if self.FileExistsOnDevice(witness_file):
+        self._control_usb_charging_command['command'] = command
+        return command
+    return None
+
+  def CanControlUsbCharging(self):
+    return self._GetControlUsbChargingCommand() is not None
+
+  def DisableUsbCharging(self, timeout=10):
+    command = self._GetControlUsbChargingCommand()
+    if not command:
+      raise Exception('Unable to act on usb charging.')
+    disable_command = command['disable_command']
+    t0 = time.time()
+    # Do not loop directly on self.IsDeviceCharging to cut the number of calls
+    # to the device.
+    while True:
+      if t0 + timeout - time.time() < 0:
+        raise pexpect.TIMEOUT('Unable to enable USB charging in time.')
+      self.RunShellCommand(disable_command)
+      if not self.IsDeviceCharging():
+        break
+
+  def EnableUsbCharging(self, timeout=10):
+    command = self._GetControlUsbChargingCommand()
+    if not command:
+      raise Exception('Unable to act on usb charging.')
+    disable_command = command['enable_command']
+    t0 = time.time()
+    # Do not loop directly on self.IsDeviceCharging to cut the number of calls
+    # to the device.
+    while True:
+      if t0 + timeout - time.time() < 0:
+        raise pexpect.TIMEOUT('Unable to enable USB charging in time.')
+      self.RunShellCommand(disable_command)
+      if self.IsDeviceCharging():
+        break
+
+  def IsDeviceCharging(self):
+    for line in self.RunShellCommand('dumpsys battery'):
+      if 'powered: ' in line:
+        if line.split('powered: ')[1] == 'true':
+          return True
+
+
+class NewLineNormalizer(object):
+  """A file-like object to normalize EOLs to '\n'.
+
+  Pexpect runs adb within a pseudo-tty device (see
+  http://www.noah.org/wiki/pexpect), so any '\n' printed by adb is written
+  as '\r\n' to the logfile. Since adb already uses '\r\n' to terminate
+  lines, the log ends up having '\r\r\n' at the end of each line. This
+  filter replaces the above with a single '\n' in the data stream.
+  """
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    data = data.replace('\r\r\n', '\n')
+    self._output.write(data)
+
+  def flush(self):
+    self._output.flush()
diff --git a/build/android/pylib/android_commands_unittest.py b/build/android/pylib/android_commands_unittest.py
new file mode 100644
index 0000000..21c34f9
--- /dev/null
+++ b/build/android/pylib/android_commands_unittest.py
@@ -0,0 +1,191 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+
+from pylib import android_commands
+
+# pylint: disable=W0212,W0702
+
+class TestDeviceTempFile(unittest.TestCase):
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self.ac = android_commands.AndroidCommands(device=devices[0])
+
+  def testTempFileDeleted(self):
+    """Tests that DeviceTempFile deletes files when closed."""
+    temp_file = android_commands.DeviceTempFile(self.ac)
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+    self.ac.SetFileContents(temp_file.name, "contents")
+    self.assertTrue(self.ac.FileExistsOnDevice(temp_file.name))
+    temp_file.close()
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+
+    with android_commands.DeviceTempFile(self.ac) as with_temp_file:
+      self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+      self.ac.SetFileContents(with_temp_file.name, "contents")
+      self.assertTrue(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+    self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+  def testTempFileNotWritten(self):
+    """Tests that device temp files work successfully even if not written to."""
+    temp_file = android_commands.DeviceTempFile(self.ac)
+    temp_file.close()
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+
+    with android_commands.DeviceTempFile(self.ac) as with_temp_file:
+      pass
+    self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+  def testNaming(self):
+    """Tests that returned filenames are as requested."""
+    temp_file = android_commands.DeviceTempFile(self.ac, prefix="cat")
+    self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
+
+    temp_file = android_commands.DeviceTempFile(self.ac, suffix="dog")
+    self.assertTrue(temp_file.name.endswith("dog"))
+
+    temp_file = android_commands.DeviceTempFile(
+        self.ac, prefix="cat", suffix="dog")
+    self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
+    self.assertTrue(temp_file.name.endswith("dog"))
+
+
+class TestGetFilesChanged(unittest.TestCase):
+
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self.ac = android_commands.AndroidCommands(device=devices[0])
+    self.host_data_dir = os.path.realpath('test_push_data')
+    self.device_data_dir = '%s/test_push_data' % (
+        self.ac.RunShellCommand('realpath %s' %
+            self.ac.GetExternalStorage())[0])
+
+    os.mkdir(self.host_data_dir)
+    for i in xrange(1, 10):
+      with open('%s/%d.txt' % (self.host_data_dir, i), 'w') as f:
+        f.write('file #%d' % i)
+
+    self.ac.RunShellCommand('mkdir %s' % self.device_data_dir)
+
+  def testGetFilesChangedAllNeeded(self):
+    """ Tests GetFilesChanged when none of the files are on the device.
+    """
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 10)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedSomeIdentical(self):
+    """ Tests GetFilesChanged when some of the files are on the device.
+    """
+    for i in xrange(1, 5):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(5, 10)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedAllIdentical(self):
+    """ Tests GetFilesChanged when all of the files are on the device.
+    """
+    for i in xrange(1, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    expected = []
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedRename(self):
+    """ Tests GetFilesChanged when one of the files has been renamed.
+
+        This tests both with and without the ignore_filenames flag set.
+    """
+    for i in xrange(5, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    os.rename('%s/5.txt' % (self.host_data_dir),
+              '%s/99.txt' % (self.host_data_dir))
+
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 5)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
+                                     ignore_filenames=True)
+    self.assertSequenceEqual(expected, actual)
+
+    expected.append(('%s/99.txt' % self.host_data_dir,
+                     '%s/99.txt' % self.device_data_dir))
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedCopy(self):
+    """ Tests GetFilesChanged when one of the files has been copied.
+
+        This tests both with and without the ignore_filenames flag set.
+    """
+    for i in xrange(5, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    shutil.copy('%s/5.txt' % self.host_data_dir,
+                '%s/99.txt' % self.host_data_dir)
+
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 5)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
+                                     ignore_filenames=True)
+    self.assertSequenceEqual(expected, actual)
+
+    expected.append(('%s/99.txt' % self.host_data_dir,
+                     '%s/99.txt' % self.device_data_dir))
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedIndividual(self):
+    """ Tests GetFilesChanged when provided one file.
+    """
+    expected = [('%s/1.txt' % self.host_data_dir,
+                 '%s/1.txt' % self.device_data_dir)]
+    actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
+                                     '%s/1.txt' % self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedFileToDirectory(self):
+    """ Tests GetFilesChanged when provided a file from the host and a
+        directory on the device.
+    """
+    expected = [('%s/1.txt' % self.host_data_dir,
+                 '%s' % self.device_data_dir)]
+    actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
+                                     '%s' % self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def tearDown(self):
+    try:
+      shutil.rmtree(self.host_data_dir)
+      self.ac.RunShellCommand('rm -rf %s' % self.device_data_dir)
+    except:
+      pass
+
+if __name__ == '__main__':
+  unittest.main()
+
diff --git a/build/android/pylib/base/__init__.py b/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/base/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/base/base_test_result.py b/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000..1f45214
--- /dev/null
+++ b/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,201 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+class ResultType(object):
+  """Class enumerating test types."""
+  PASS = 'PASS'
+  SKIP = 'SKIP'
+  FAIL = 'FAIL'
+  CRASH = 'CRASH'
+  TIMEOUT = 'TIMEOUT'
+  UNKNOWN = 'UNKNOWN'
+
+  @staticmethod
+  def GetTypes():
+    """Get a list of all test types."""
+    return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+            ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN]
+
+
+class BaseTestResult(object):
+  """Base class for a single test result."""
+
+  def __init__(self, name, test_type, log=''):
+    """Construct a BaseTestResult.
+
+    Args:
+      name: Name of the test which defines uniqueness.
+      test_type: Type of the test result as defined in ResultType.
+      log: An optional string listing any errors.
+    """
+    assert name
+    assert test_type in ResultType.GetTypes()
+    self._name = name
+    self._test_type = test_type
+    self._log = log
+
+  def __str__(self):
+    return self._name
+
+  def __repr__(self):
+    return self._name
+
+  def __cmp__(self, other):
+    # pylint: disable=W0212
+    return cmp(self._name, other._name)
+
+  def __hash__(self):
+    return hash(self._name)
+
+  def SetName(self, name):
+    """Set the test name.
+
+    Because we're putting this into a set, this should only be used if moving
+    this test result into another set.
+    """
+    self._name = name
+
+  def GetName(self):
+    """Get the test name."""
+    return self._name
+
+  def GetType(self):
+    """Get the test result type."""
+    return self._test_type
+
+  def GetLog(self):
+    """Get the test log."""
+    return self._log
+
+
+class TestRunResults(object):
+  """Set of results for a test run."""
+
+  def __init__(self):
+    self._results = set()
+
+  def GetLogs(self):
+    """Get the string representation of all test logs."""
+    s = []
+    for test_type in ResultType.GetTypes():
+      if test_type != ResultType.PASS:
+        for t in sorted(self._GetType(test_type)):
+          log = t.GetLog()
+          if log:
+            s.append('[%s] %s:' % (test_type, t))
+            s.append(log)
+    return '\n'.join(s)
+
+  def GetGtestForm(self):
+    """Get the gtest string representation of this object."""
+    s = []
+    plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+    tests = lambda n: plural(n, 'test', 'tests')
+
+    s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+    s.append('[  PASSED  ] %s.' % (tests(len(self.GetPass()))))
+
+    skipped = self.GetSkip()
+    if skipped:
+      s.append('[  SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+      for t in sorted(skipped):
+        s.append('[  SKIPPED ] %s' % str(t))
+
+    all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+        self.GetUnknown())
+    if all_failures:
+      s.append('[  FAILED  ] %s, listed below:' % tests(len(all_failures)))
+      for t in sorted(self.GetFail()):
+        s.append('[  FAILED  ] %s' % str(t))
+      for t in sorted(self.GetCrash()):
+        s.append('[  FAILED  ] %s (CRASHED)' % str(t))
+      for t in sorted(self.GetTimeout()):
+        s.append('[  FAILED  ] %s (TIMEOUT)' % str(t))
+      for t in sorted(self.GetUnknown()):
+        s.append('[  FAILED  ] %s (UNKNOWN)' % str(t))
+      s.append('')
+      s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+    return '\n'.join(s)
+
+  def GetShortForm(self):
+    """Get the short string representation of this object."""
+    s = []
+    s.append('ALL: %d' % len(self._results))
+    for test_type in ResultType.GetTypes():
+      s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+    return ''.join([x.ljust(15) for x in s])
+
+  def __str__(self):
+    return self.GetLongForm()
+
+  def AddResult(self, result):
+    """Add |result| to the set.
+
+    Args:
+      result: An instance of BaseTestResult.
+    """
+    assert isinstance(result, BaseTestResult)
+    self._results.add(result)
+
+  def AddResults(self, results):
+    """Add |results| to the set.
+
+    Args:
+      results: An iterable of BaseTestResult objects.
+    """
+    for t in results:
+      self.AddResult(t)
+
+  def AddTestRunResults(self, results):
+    """Add the set of test results from |results|.
+
+    Args:
+      results: An instance of TestRunResults.
+    """
+    assert isinstance(results, TestRunResults)
+    # pylint: disable=W0212
+    self._results.update(results._results)
+
+  def GetAll(self):
+    """Get the set of all test results."""
+    return self._results.copy()
+
+  def _GetType(self, test_type):
+    """Get the set of test results with the given test type."""
+    return set(t for t in self._results if t.GetType() == test_type)
+
+  def GetPass(self):
+    """Get the set of all passed test results."""
+    return self._GetType(ResultType.PASS)
+
+  def GetSkip(self):
+    """Get the set of all skipped test results."""
+    return self._GetType(ResultType.SKIP)
+
+  def GetFail(self):
+    """Get the set of all failed test results."""
+    return self._GetType(ResultType.FAIL)
+
+  def GetCrash(self):
+    """Get the set of all crashed test results."""
+    return self._GetType(ResultType.CRASH)
+
+  def GetTimeout(self):
+    """Get the set of all timed out test results."""
+    return self._GetType(ResultType.TIMEOUT)
+
+  def GetUnknown(self):
+    """Get the set of all unknown test results."""
+    return self._GetType(ResultType.UNKNOWN)
+
+  def GetNotPass(self):
+    """Get the set of all non-passed test results."""
+    return self.GetAll() - self.GetPass()
+
+  def DidRunPass(self):
+    """Return whether the test run was successful."""
+    return not (self.GetNotPass() - self.GetSkip())
+
diff --git a/build/android/pylib/base/base_test_result_unittest.py b/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000..6f0cba7
--- /dev/null
+++ b/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+  def setUp(self):
+    self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    self.p2 = BaseTestResult('p2', ResultType.PASS)
+    self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+    self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+    self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+    self.tr = TestRunResults()
+    self.tr.AddResult(self.p1)
+    self.tr.AddResult(other_p1)
+    self.tr.AddResult(self.p2)
+    self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+  def testGetAll(self):
+    self.assertFalse(
+        self.tr.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+  def testGetPass(self):
+    self.assertFalse(self.tr.GetPass().symmetric_difference(
+        [self.p1, self.p2]))
+
+  def testGetNotPass(self):
+    self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+        [self.f1, self.c1, self.u1]))
+
+  def testGetAddTestRunResults(self):
+    tr2 = TestRunResults()
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    f2 = BaseTestResult('f2', ResultType.FAIL)
+    tr2.AddResult(other_p1)
+    tr2.AddResult(f2)
+    tr2.AddTestRunResults(self.tr)
+    self.assertFalse(
+        tr2.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+  def testGetLogs(self):
+    log_print = ('[FAIL] f1:\n'
+                 'failure1\n'
+                 '[CRASH] c1:\n'
+                 'crash1')
+    self.assertEqual(self.tr.GetLogs(), log_print)
+
+  def testGetShortForm(self):
+    short_print = ('ALL: 5         PASS: 2        FAIL: 1        '
+                   'CRASH: 1       TIMEOUT: 0     UNKNOWN: 1     ')
+    self.assertEqual(self.tr.GetShortForm(), short_print)
+
+  def testGetGtestForm(self):
+    gtest_print = ('[==========] 5 tests ran.\n'
+                   '[  PASSED  ] 2 tests.\n'
+                   '[  FAILED  ] 3 tests, listed below:\n'
+                   '[  FAILED  ] f1\n'
+                   '[  FAILED  ] c1 (CRASHED)\n'
+                   '[  FAILED  ] u1 (UNKNOWN)\n'
+                   '\n'
+                   '3 FAILED TESTS')
+    self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+  def testRunPassed(self):
+    self.assertFalse(self.tr.DidRunPass())
+    tr2 = TestRunResults()
+    self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/base/base_test_runner.py b/build/android/pylib/base/base_test_runner.py
new file mode 100644
index 0000000..cfd0c6c
--- /dev/null
+++ b/build/android/pylib/base/base_test_runner.py
@@ -0,0 +1,201 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for running tests on a single device."""
+
+import logging
+import time
+
+from pylib import ports
+from pylib.chrome_test_server_spawner import SpawningServer
+from pylib.device import device_utils
+from pylib.forwarder import Forwarder
+from pylib.valgrind_tools import CreateTool
+# TODO(frankf): Move this to pylib/utils
+import lighttpd_server
+
+
+# A file on device to store ports of net test server. The format of the file is
+# test-spawner-server-port:test-server-port
+NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports'
+
+
+class BaseTestRunner(object):
+  """Base class for running tests on a single device."""
+
+  def __init__(self, device_serial, tool, push_deps=True,
+               cleanup_test_files=False):
+    """
+      Args:
+        device: Tests will run on the device of this ID.
+        tool: Name of the Valgrind tool.
+        push_deps: If True, push all dependencies to the device.
+        cleanup_test_files: Whether or not to cleanup test files on device.
+    """
+    self.device_serial = device_serial
+    self.device = device_utils.DeviceUtils(device_serial)
+    self.tool = CreateTool(tool, self.device)
+    self._http_server = None
+    self._forwarder_device_port = 8000
+    self.forwarder_base_url = ('http://localhost:%d' %
+        self._forwarder_device_port)
+    self._spawning_server = None
+    # We will allocate port for test server spawner when calling method
+    # LaunchChromeTestServerSpawner and allocate port for test server when
+    # starting it in TestServerThread.
+    self.test_server_spawner_port = 0
+    self.test_server_port = 0
+    self._push_deps = push_deps
+    self._cleanup_test_files = cleanup_test_files
+
+  def _PushTestServerPortInfoToDevice(self):
+    """Pushes the latest port information to device."""
+    self.device.WriteFile(
+        self.device.GetExternalStoragePath() + '/' +
+            NET_TEST_SERVER_PORT_INFO_FILE,
+        '%d:%d' % (self.test_server_spawner_port, self.test_server_port))
+
+  def RunTest(self, test):
+    """Runs a test. Needs to be overridden.
+
+    Args:
+      test: A test to run.
+
+    Returns:
+      Tuple containing:
+        (base_test_result.TestRunResults, tests to rerun or None)
+    """
+    raise NotImplementedError
+
+  def InstallTestPackage(self):
+    """Installs the test package once before all tests are run."""
+    pass
+
+  def PushDataDeps(self):
+    """Push all data deps to device once before all tests are run."""
+    pass
+
+  def SetUp(self):
+    """Run once before all tests are run."""
+    self.InstallTestPackage()
+    push_size_before = self.device.old_interface.GetPushSizeInfo()
+    if self._push_deps:
+      logging.warning('Pushing data files to device.')
+      self.PushDataDeps()
+      push_size_after = self.device.old_interface.GetPushSizeInfo()
+      logging.warning(
+          'Total data: %0.3fMB' %
+          ((push_size_after[0] - push_size_before[0]) / float(2 ** 20)))
+      logging.warning(
+          'Total data transferred: %0.3fMB' %
+          ((push_size_after[1] - push_size_before[1]) / float(2 ** 20)))
+    else:
+      logging.warning('Skipping pushing data to device.')
+
+  def TearDown(self):
+    """Run once after all tests are run."""
+    self.ShutdownHelperToolsForTestSuite()
+    if self._cleanup_test_files:
+      self.device.old_interface.RemovePushedFiles()
+
+  def LaunchTestHttpServer(self, document_root, port=None,
+                           extra_config_contents=None):
+    """Launches an HTTP server to serve HTTP tests.
+
+    Args:
+      document_root: Document root of the HTTP server.
+      port: port on which we want to the http server bind.
+      extra_config_contents: Extra config contents for the HTTP server.
+    """
+    self._http_server = lighttpd_server.LighttpdServer(
+        document_root, port=port, extra_config_contents=extra_config_contents)
+    if self._http_server.StartupHttpServer():
+      logging.info('http server started: http://localhost:%s',
+                   self._http_server.port)
+    else:
+      logging.critical('Failed to start http server')
+    self._ForwardPortsForHttpServer()
+    return (self._forwarder_device_port, self._http_server.port)
+
+  def _ForwardPorts(self, port_pairs):
+    """Forwards a port."""
+    Forwarder.Map(port_pairs, self.device, self.tool)
+
+  def _UnmapPorts(self, port_pairs):
+    """Unmap previously forwarded ports."""
+    for (device_port, _) in port_pairs:
+      Forwarder.UnmapDevicePort(device_port, self.device)
+
+  # Deprecated: Use ForwardPorts instead.
+  def StartForwarder(self, port_pairs):
+    """Starts TCP traffic forwarding for the given |port_pairs|.
+
+    Args:
+      host_port_pairs: A list of (device_port, local_port) tuples to forward.
+    """
+    self._ForwardPorts(port_pairs)
+
+  def _ForwardPortsForHttpServer(self):
+    """Starts a forwarder for the HTTP server.
+
+    The forwarder forwards HTTP requests and responses between host and device.
+    """
+    self._ForwardPorts([(self._forwarder_device_port, self._http_server.port)])
+
+  def _RestartHttpServerForwarderIfNecessary(self):
+    """Restarts the forwarder if it's not open."""
+    # Checks to see if the http server port is being used.  If not forwards the
+    # request.
+    # TODO(dtrainor): This is not always reliable because sometimes the port
+    # will be left open even after the forwarder has been killed.
+    if not ports.IsDevicePortUsed(self.device, self._forwarder_device_port):
+      self._ForwardPortsForHttpServer()
+
+  def ShutdownHelperToolsForTestSuite(self):
+    """Shuts down the server and the forwarder."""
+    if self._http_server:
+      self._UnmapPorts([(self._forwarder_device_port, self._http_server.port)])
+      self._http_server.ShutdownHttpServer()
+    if self._spawning_server:
+      self._spawning_server.Stop()
+
+  def CleanupSpawningServerState(self):
+    """Tells the spawning server to clean up any state.
+
+    If the spawning server is reused for multiple tests, this should be called
+    after each test to prevent tests affecting each other.
+    """
+    if self._spawning_server:
+      self._spawning_server.CleanupState()
+
+  def LaunchChromeTestServerSpawner(self):
+    """Launches test server spawner."""
+    server_ready = False
+    error_msgs = []
+    # TODO(pliard): deflake this function. The for loop should be removed as
+    # well as IsHttpServerConnectable(). spawning_server.Start() should also
+    # block until the server is ready.
+    # Try 3 times to launch test spawner server.
+    for _ in xrange(0, 3):
+      self.test_server_spawner_port = ports.AllocateTestServerPort()
+      self._ForwardPorts(
+          [(self.test_server_spawner_port, self.test_server_spawner_port)])
+      self._spawning_server = SpawningServer(self.test_server_spawner_port,
+                                             self.device,
+                                             self.tool)
+      self._spawning_server.Start()
+      server_ready, error_msg = ports.IsHttpServerConnectable(
+          '127.0.0.1', self.test_server_spawner_port, path='/ping',
+          expected_read='ready')
+      if server_ready:
+        break
+      else:
+        error_msgs.append(error_msg)
+      self._spawning_server.Stop()
+      # Wait for 2 seconds then restart.
+      time.sleep(2)
+    if not server_ready:
+      logging.error(';'.join(error_msgs))
+      raise Exception('Can not start the test spawner server.')
+    self._PushTestServerPortInfoToDevice()
diff --git a/build/android/pylib/base/test_dispatcher.py b/build/android/pylib/base/test_dispatcher.py
new file mode 100644
index 0000000..cb789de
--- /dev/null
+++ b/build/android/pylib/base/test_dispatcher.py
@@ -0,0 +1,414 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dispatches tests, either sharding or replicating them.
+
+Performs the following steps:
+* Create a test collection factory, using the given tests
+  - If sharding: test collection factory returns the same shared test collection
+    to all test runners
+  - If replciating: test collection factory returns a unique test collection to
+    each test runner, with the same set of tests in each.
+* Create a test runner for each device.
+* Run each test runner in its own thread, grabbing tests from the test
+  collection until there are no tests left.
+"""
+
+import logging
+import threading
+
+from pylib import android_commands
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+DEFAULT_TIMEOUT = 7 * 60  # seven minutes
+
+
+class _ThreadSafeCounter(object):
+  """A threadsafe counter."""
+
+  def __init__(self):
+    self._lock = threading.Lock()
+    self._value = 0
+
+  def GetAndIncrement(self):
+    """Get the current value and increment it atomically.
+
+    Returns:
+      The value before incrementing.
+    """
+    with self._lock:
+      pre_increment = self._value
+      self._value += 1
+      return pre_increment
+
+
+class _Test(object):
+  """Holds a test with additional metadata."""
+
+  def __init__(self, test, tries=0):
+    """Initializes the _Test object.
+
+    Args:
+      test: The test.
+      tries: Number of tries so far.
+    """
+    self.test = test
+    self.tries = tries
+
+
+class _TestCollection(object):
+  """A threadsafe collection of tests.
+
+  Args:
+    tests: List of tests to put in the collection.
+  """
+
+  def __init__(self, tests=None):
+    if not tests:
+      tests = []
+    self._lock = threading.Lock()
+    self._tests = []
+    self._tests_in_progress = 0
+    # Used to signal that an item is available or all items have been handled.
+    self._item_available_or_all_done = threading.Event()
+    for t in tests:
+      self.add(t)
+
+  def _pop(self):
+    """Pop a test from the collection.
+
+    Waits until a test is available or all tests have been handled.
+
+    Returns:
+      A test or None if all tests have been handled.
+    """
+    while True:
+      # Wait for a test to be available or all tests to have been handled.
+      self._item_available_or_all_done.wait()
+      with self._lock:
+        # Check which of the two conditions triggered the signal.
+        if self._tests_in_progress == 0:
+          return None
+        try:
+          return self._tests.pop(0)
+        except IndexError:
+          # Another thread beat us to the available test, wait again.
+          self._item_available_or_all_done.clear()
+
+  def add(self, test):
+    """Add an test to the collection.
+
+    Args:
+      test: A test to add.
+    """
+    with self._lock:
+      self._tests.append(test)
+      self._item_available_or_all_done.set()
+      self._tests_in_progress += 1
+
+  def test_completed(self):
+    """Indicate that a test has been fully handled."""
+    with self._lock:
+      self._tests_in_progress -= 1
+      if self._tests_in_progress == 0:
+        # All tests have been handled, signal all waiting threads.
+        self._item_available_or_all_done.set()
+
+  def __iter__(self):
+    """Iterate through tests in the collection until all have been handled."""
+    while True:
+      r = self._pop()
+      if r is None:
+        break
+      yield r
+
+  def __len__(self):
+    """Return the number of tests currently in the collection."""
+    return len(self._tests)
+
+  def test_names(self):
+    """Return a list of the names of the tests currently in the collection."""
+    with self._lock:
+      return list(t.test for t in self._tests)
+
+
+def _RunTestsFromQueue(runner, test_collection, out_results, watcher,
+                       num_retries, tag_results_with_device=False):
+  """Runs tests from the test_collection until empty using the given runner.
+
+  Adds TestRunResults objects to the out_results list and may add tests to the
+  out_retry list.
+
+  Args:
+    runner: A TestRunner object used to run the tests.
+    test_collection: A _TestCollection from which to get _Test objects to run.
+    out_results: A list to add TestRunResults to.
+    watcher: A watchdog_timer.WatchdogTimer object, used as a shared timeout.
+    num_retries: Number of retries for a test.
+    tag_results_with_device: If True, appends the name of the device on which
+        the test was run to the test name. Used when replicating to identify
+        which device ran each copy of the test, and to ensure each copy of the
+        test is recorded separately.
+  """
+
+  def TagTestRunResults(test_run_results):
+    """Tags all results with the last 4 digits of the device id.
+
+    Used when replicating tests to distinguish the same tests run on different
+    devices. We use a set to store test results, so the hash (generated from
+    name and tag) must be unique to be considered different results.
+    """
+    new_test_run_results = base_test_result.TestRunResults()
+    for test_result in test_run_results.GetAll():
+      test_result.SetName('%s_%s' % (runner.device_serial[-4:],
+                                     test_result.GetName()))
+      new_test_run_results.AddResult(test_result)
+    return new_test_run_results
+
+  for test in test_collection:
+    watcher.Reset()
+    try:
+      if runner.device_serial not in android_commands.GetAttachedDevices():
+        # Device is unresponsive, stop handling tests on this device.
+        msg = 'Device %s is unresponsive.' % runner.device_serial
+        logging.warning(msg)
+        raise device_errors.DeviceUnreachableError(msg)
+      result, retry = runner.RunTest(test.test)
+      if tag_results_with_device:
+        result = TagTestRunResults(result)
+      test.tries += 1
+      if retry and test.tries <= num_retries:
+        # Retry non-passing results, only record passing results.
+        pass_results = base_test_result.TestRunResults()
+        pass_results.AddResults(result.GetPass())
+        out_results.append(pass_results)
+        logging.warning('Will retry test, try #%s.' % test.tries)
+        test_collection.add(_Test(test=retry, tries=test.tries))
+      else:
+        # All tests passed or retry limit reached. Either way, record results.
+        out_results.append(result)
+    except:
+      # An unhandleable exception, ensure tests get run by another device and
+      # reraise this exception on the main thread.
+      test_collection.add(test)
+      raise
+    finally:
+      # Retries count as separate tasks so always mark the popped test as done.
+      test_collection.test_completed()
+
+
+def _SetUp(runner_factory, device, out_runners, threadsafe_counter):
+  """Creates a test runner for each device and calls SetUp() in parallel.
+
+  Note: if a device is unresponsive the corresponding TestRunner will not be
+    added to out_runners.
+
+  Args:
+    runner_factory: Callable that takes a device and index and returns a
+      TestRunner object.
+    device: The device serial number to set up.
+    out_runners: List to add the successfully set up TestRunner object.
+    threadsafe_counter: A _ThreadSafeCounter object used to get shard indices.
+  """
+  try:
+    index = threadsafe_counter.GetAndIncrement()
+    logging.warning('Creating shard %s for device %s.', index, device)
+    runner = runner_factory(device, index)
+    runner.SetUp()
+    out_runners.append(runner)
+  except (device_errors.DeviceUnreachableError,
+          # TODO(jbudorick) Remove this once the underlying implementations
+          #                 for the above are switched or wrapped.
+          android_commands.errors.DeviceUnresponsiveError) as e:
+    logging.warning('Failed to create shard for %s: [%s]', device, e)
+
+
+def _RunAllTests(runners, test_collection_factory, num_retries, timeout=None,
+                 tag_results_with_device=False):
+  """Run all tests using the given TestRunners.
+
+  Args:
+    runners: A list of TestRunner objects.
+    test_collection_factory: A callable to generate a _TestCollection object for
+        each test runner.
+    num_retries: Number of retries for a test.
+    timeout: Watchdog timeout in seconds.
+    tag_results_with_device: If True, appends the name of the device on which
+        the test was run to the test name. Used when replicating to identify
+        which device ran each copy of the test, and to ensure each copy of the
+        test is recorded separately.
+
+  Returns:
+    A tuple of (TestRunResults object, exit code)
+  """
+  logging.warning('Running tests with %s test runners.' % (len(runners)))
+  results = []
+  exit_code = 0
+  run_results = base_test_result.TestRunResults()
+  watcher = watchdog_timer.WatchdogTimer(timeout)
+  test_collections = [test_collection_factory() for _ in runners]
+
+  threads = [
+      reraiser_thread.ReraiserThread(
+          _RunTestsFromQueue,
+          [r, tc, results, watcher, num_retries, tag_results_with_device],
+          name=r.device_serial[-4:])
+      for r, tc in zip(runners, test_collections)]
+
+  workers = reraiser_thread.ReraiserThreadGroup(threads)
+  workers.StartAll()
+
+  # Catch DeviceUnreachableErrors and set a warning exit code
+  try:
+    workers.JoinAll(watcher)
+  except (device_errors.DeviceUnreachableError,
+          # TODO(jbudorick) Remove this once the underlying implementations
+          #                 for the above are switched or wrapped.
+          android_commands.errors.DeviceUnresponsiveError) as e:
+    logging.error(e)
+    exit_code = constants.WARNING_EXIT_CODE
+
+  if not all((len(tc) == 0 for tc in test_collections)):
+    logging.error('Only ran %d tests (all devices are likely offline).' %
+                  len(results))
+    for tc in test_collections:
+      run_results.AddResults(base_test_result.BaseTestResult(
+          t, base_test_result.ResultType.UNKNOWN) for t in tc.test_names())
+
+  for r in results:
+    run_results.AddTestRunResults(r)
+  if not run_results.DidRunPass():
+    exit_code = constants.ERROR_EXIT_CODE
+  return (run_results, exit_code)
+
+
+def _CreateRunners(runner_factory, devices, timeout=None):
+  """Creates a test runner for each device and calls SetUp() in parallel.
+
+  Note: if a device is unresponsive the corresponding TestRunner will not be
+    included in the returned list.
+
+  Args:
+    runner_factory: Callable that takes a device and index and returns a
+      TestRunner object.
+    devices: List of device serial numbers as strings.
+    timeout: Watchdog timeout in seconds, defaults to the default timeout.
+
+  Returns:
+    A list of TestRunner objects.
+  """
+  logging.warning('Creating %s test runners.' % len(devices))
+  runners = []
+  counter = _ThreadSafeCounter()
+  threads = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(_SetUp,
+                                      [runner_factory, d, runners, counter],
+                                      name=d[-4:])
+       for d in devices])
+  threads.StartAll()
+  threads.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+  return runners
+
+
+def _TearDownRunners(runners, timeout=None):
+  """Calls TearDown() for each test runner in parallel.
+
+  Args:
+    runners: A list of TestRunner objects.
+    timeout: Watchdog timeout in seconds, defaults to the default timeout.
+  """
+  threads = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(r.TearDown, name=r.device_serial[-4:])
+       for r in runners])
+  threads.StartAll()
+  threads.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+
+
+def ApplyMaxPerRun(tests, max_per_run):
+  """Rearrange the tests so that no group contains more than max_per_run tests.
+
+  Args:
+    tests:
+    max_per_run:
+
+  Returns:
+    A list of tests with no more than max_per_run per run.
+  """
+  tests_expanded = []
+  for test_group in tests:
+    if type(test_group) != str:
+      # Do not split test objects which are not strings.
+      tests_expanded.append(test_group)
+    else:
+      test_split = test_group.split(':')
+      for i in range(0, len(test_split), max_per_run):
+        tests_expanded.append(':'.join(test_split[i:i+max_per_run]))
+  return tests_expanded
+
+
+def RunTests(tests, runner_factory, devices, shard=True,
+             test_timeout=DEFAULT_TIMEOUT, setup_timeout=DEFAULT_TIMEOUT,
+             num_retries=2, max_per_run=256):
+  """Run all tests on attached devices, retrying tests that don't pass.
+
+  Args:
+    tests: List of tests to run.
+    runner_factory: Callable that takes a device and index and returns a
+        TestRunner object.
+    devices: List of attached devices.
+    shard: True if we should shard, False if we should replicate tests.
+      - Sharding tests will distribute tests across all test runners through a
+        shared test collection.
+      - Replicating tests will copy all tests to each test runner through a
+        unique test collection for each test runner.
+    test_timeout: Watchdog timeout in seconds for running tests.
+    setup_timeout: Watchdog timeout in seconds for creating and cleaning up
+        test runners.
+    num_retries: Number of retries for a test.
+    max_per_run: Maximum number of tests to run in any group.
+
+  Returns:
+    A tuple of (base_test_result.TestRunResults object, exit code).
+  """
+  if not tests:
+    logging.critical('No tests to run.')
+    return (base_test_result.TestRunResults(), constants.ERROR_EXIT_CODE)
+
+  tests_expanded = ApplyMaxPerRun(tests, max_per_run)
+  if shard:
+    # Generate a shared _TestCollection object for all test runners, so they
+    # draw from a common pool of tests.
+    shared_test_collection = _TestCollection([_Test(t) for t in tests_expanded])
+    test_collection_factory = lambda: shared_test_collection
+    tag_results_with_device = False
+    log_string = 'sharded across devices'
+  else:
+    # Generate a unique _TestCollection object for each test runner, but use
+    # the same set of tests.
+    test_collection_factory = lambda: _TestCollection(
+        [_Test(t) for t in tests_expanded])
+    tag_results_with_device = True
+    log_string = 'replicated on each device'
+
+  logging.info('Will run %d tests (%s): %s',
+               len(tests_expanded), log_string, str(tests_expanded))
+  runners = _CreateRunners(runner_factory, devices, setup_timeout)
+  try:
+    return _RunAllTests(runners, test_collection_factory,
+                        num_retries, test_timeout, tag_results_with_device)
+  finally:
+    try:
+      _TearDownRunners(runners, setup_timeout)
+    except (device_errors.DeviceUnreachableError,
+            # TODO(jbudorick) Remove this once the underlying implementations
+            #                 for the above are switched or wrapped.
+            android_commands.errors.DeviceUnresponsiveError) as e:
+      logging.warning('Device unresponsive during TearDown: [%s]', e)
+    except Exception as e:
+      logging.error('Unexpected exception caught during TearDown: %s' % str(e))
diff --git a/build/android/pylib/base/test_dispatcher_unittest.py b/build/android/pylib/base/test_dispatcher_unittest.py
new file mode 100644
index 0000000..d349f32
--- /dev/null
+++ b/build/android/pylib/base/test_dispatcher_unittest.py
@@ -0,0 +1,223 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for test_dispatcher.py."""
+# pylint: disable=R0201
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+                os.pardir, os.pardir))
+
+# Mock out android_commands.GetAttachedDevices().
+from pylib import android_commands
+android_commands.GetAttachedDevices = lambda: ['0', '1']
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_dispatcher
+from pylib.utils import watchdog_timer
+
+
+class TestException(Exception):
+  pass
+
+
+class MockRunner(object):
+  """A mock TestRunner."""
+  def __init__(self, device='0', shard_index=0):
+    self.device_serial = device
+    self.shard_index = shard_index
+    self.setups = 0
+    self.teardowns = 0
+
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(test, base_test_result.ResultType.PASS))
+    return (results, None)
+
+  def SetUp(self):
+    self.setups += 1
+
+  def TearDown(self):
+    self.teardowns += 1
+
+
+class MockRunnerFail(MockRunner):
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(test, base_test_result.ResultType.FAIL))
+    return (results, test)
+
+
+class MockRunnerFailTwice(MockRunner):
+  def __init__(self, device='0', shard_index=0):
+    super(MockRunnerFailTwice, self).__init__(device, shard_index)
+    self._fails = 0
+
+  def RunTest(self, test):
+    self._fails += 1
+    results = base_test_result.TestRunResults()
+    if self._fails <= 2:
+      results.AddResult(base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.FAIL))
+      return (results, test)
+    else:
+      results.AddResult(base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.PASS))
+      return (results, None)
+
+
+class MockRunnerException(MockRunner):
+  def RunTest(self, test):
+    raise TestException
+
+
+class TestFunctions(unittest.TestCase):
+  """Tests test_dispatcher._RunTestsFromQueue."""
+  @staticmethod
+  def _RunTests(mock_runner, tests):
+    results = []
+    tests = test_dispatcher._TestCollection(
+        [test_dispatcher._Test(t) for t in tests])
+    test_dispatcher._RunTestsFromQueue(mock_runner, tests, results,
+                                       watchdog_timer.WatchdogTimer(None), 2)
+    run_results = base_test_result.TestRunResults()
+    for r in results:
+      run_results.AddTestRunResults(r)
+    return run_results
+
+  def testRunTestsFromQueue(self):
+    results = TestFunctions._RunTests(MockRunner(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 2)
+    self.assertEqual(len(results.GetNotPass()), 0)
+
+  def testRunTestsFromQueueRetry(self):
+    results = TestFunctions._RunTests(MockRunnerFail(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 2)
+
+  def testRunTestsFromQueueFailTwice(self):
+    results = TestFunctions._RunTests(MockRunnerFailTwice(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 2)
+    self.assertEqual(len(results.GetNotPass()), 0)
+
+  def testSetUp(self):
+    runners = []
+    counter = test_dispatcher._ThreadSafeCounter()
+    test_dispatcher._SetUp(MockRunner, '0', runners, counter)
+    self.assertEqual(len(runners), 1)
+    self.assertEqual(runners[0].setups, 1)
+
+  def testThreadSafeCounter(self):
+    counter = test_dispatcher._ThreadSafeCounter()
+    for i in xrange(5):
+      self.assertEqual(counter.GetAndIncrement(), i)
+
+  def testApplyMaxPerRun(self):
+    self.assertEqual(
+        ['A:B', 'C:D', 'E', 'F:G', 'H:I'],
+        test_dispatcher.ApplyMaxPerRun(['A:B', 'C:D:E', 'F:G:H:I'], 2))
+
+
+class TestThreadGroupFunctions(unittest.TestCase):
+  """Tests test_dispatcher._RunAllTests and test_dispatcher._CreateRunners."""
+  def setUp(self):
+    self.tests = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+    shared_test_collection = test_dispatcher._TestCollection(
+        [test_dispatcher._Test(t) for t in self.tests])
+    self.test_collection_factory = lambda: shared_test_collection
+
+  def testCreate(self):
+    runners = test_dispatcher._CreateRunners(MockRunner, ['0', '1'])
+    for runner in runners:
+      self.assertEqual(runner.setups, 1)
+    self.assertEqual(set([r.device_serial for r in runners]),
+                     set(['0', '1']))
+    self.assertEqual(set([r.shard_index for r in runners]),
+                     set([0, 1]))
+
+  def testRun(self):
+    runners = [MockRunner('0'), MockRunner('1')]
+    results, exit_code = test_dispatcher._RunAllTests(
+        runners, self.test_collection_factory, 0)
+    self.assertEqual(len(results.GetPass()), len(self.tests))
+    self.assertEqual(exit_code, 0)
+
+  def testTearDown(self):
+    runners = [MockRunner('0'), MockRunner('1')]
+    test_dispatcher._TearDownRunners(runners)
+    for runner in runners:
+      self.assertEqual(runner.teardowns, 1)
+
+  def testRetry(self):
+    runners = test_dispatcher._CreateRunners(MockRunnerFail, ['0', '1'])
+    results, exit_code = test_dispatcher._RunAllTests(
+        runners, self.test_collection_factory, 0)
+    self.assertEqual(len(results.GetFail()), len(self.tests))
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testReraise(self):
+    runners = test_dispatcher._CreateRunners(MockRunnerException, ['0', '1'])
+    with self.assertRaises(TestException):
+      test_dispatcher._RunAllTests(runners, self.test_collection_factory, 0)
+
+
+class TestShard(unittest.TestCase):
+  """Tests test_dispatcher.RunTests with sharding."""
+  @staticmethod
+  def _RunShard(runner_factory):
+    return test_dispatcher.RunTests(
+        ['a', 'b', 'c'], runner_factory, ['0', '1'], shard=True)
+
+  def testShard(self):
+    results, exit_code = TestShard._RunShard(MockRunner)
+    self.assertEqual(len(results.GetPass()), 3)
+    self.assertEqual(exit_code, 0)
+
+  def testFailing(self):
+    results, exit_code = TestShard._RunShard(MockRunnerFail)
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 3)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testNoTests(self):
+    results, exit_code = test_dispatcher.RunTests(
+        [], MockRunner, ['0', '1'], shard=True)
+    self.assertEqual(len(results.GetAll()), 0)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+
+class TestReplicate(unittest.TestCase):
+  """Tests test_dispatcher.RunTests with replication."""
+  @staticmethod
+  def _RunReplicate(runner_factory):
+    return test_dispatcher.RunTests(
+        ['a', 'b', 'c'], runner_factory, ['0', '1'], shard=False)
+
+  def testReplicate(self):
+    results, exit_code = TestReplicate._RunReplicate(MockRunner)
+    # We expect 6 results since each test should have been run on every device
+    self.assertEqual(len(results.GetPass()), 6)
+    self.assertEqual(exit_code, 0)
+
+  def testFailing(self):
+    results, exit_code = TestReplicate._RunReplicate(MockRunnerFail)
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 6)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testNoTests(self):
+    results, exit_code = test_dispatcher.RunTests(
+        [], MockRunner, ['0', '1'], shard=False)
+    self.assertEqual(len(results.GetAll()), 0)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/chrome_test_server_spawner.py b/build/android/pylib/chrome_test_server_spawner.py
new file mode 100644
index 0000000..e1fe7b1
--- /dev/null
+++ b/build/android/pylib/chrome_test_server_spawner.py
@@ -0,0 +1,425 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+# pylint: disable=W0702
+
+import BaseHTTPServer
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import sys
+import threading
+import time
+import urlparse
+
+from pylib import constants
+from pylib import ports
+
+from pylib.forwarder import Forwarder
+
+
+# Path that are needed to import necessary modules when launching a testserver.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
+    % (os.path.join(constants.DIR_SOURCE_ROOT, 'third_party'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib',
+                    'src'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'sync', 'tools', 'testserver')))
+
+
+SERVER_TYPES = {
+    'http': '',
+    'ftp': '-f',
+    'sync': '',  # Sync uses its own script, and doesn't take a server type arg.
+    'tcpecho': '--tcp-echo',
+    'udpecho': '--udp-echo',
+}
+
+
+# The timeout (in seconds) of starting up the Python test server.
+TEST_SERVER_STARTUP_TIMEOUT = 10
+
+def _WaitUntil(predicate, max_attempts=5):
+  """Blocks until the provided predicate (function) is true.
+
+  Returns:
+    Whether the provided predicate was satisfied once (before the timeout).
+  """
+  sleep_time_sec = 0.025
+  for _ in xrange(1, max_attempts):
+    if predicate():
+      return True
+    time.sleep(sleep_time_sec)
+    sleep_time_sec = min(1, sleep_time_sec * 2)  # Don't wait more than 1 sec.
+  return False
+
+
+def _CheckPortStatus(port, expected_status):
+  """Returns True if port has expected_status.
+
+  Args:
+    port: the port number.
+    expected_status: boolean of expected status.
+
+  Returns:
+    Returns True if the status is expected. Otherwise returns False.
+  """
+  return _WaitUntil(lambda: ports.IsHostPortUsed(port) == expected_status)
+
+
+def _CheckDevicePortStatus(device, port):
+  """Returns whether the provided port is used."""
+  return _WaitUntil(lambda: ports.IsDevicePortUsed(device, port))
+
+
+def _GetServerTypeCommandLine(server_type):
+  """Returns the command-line by the given server type.
+
+  Args:
+    server_type: the server type to be used (e.g. 'http').
+
+  Returns:
+    A string containing the command-line argument.
+  """
+  if server_type not in SERVER_TYPES:
+    raise NotImplementedError('Unknown server type: %s' % server_type)
+  if server_type == 'udpecho':
+    raise Exception('Please do not run UDP echo tests because we do not have '
+                    'a UDP forwarder tool.')
+  return SERVER_TYPES[server_type]
+
+
+class TestServerThread(threading.Thread):
+  """A thread to run the test server in a separate process."""
+
+  def __init__(self, ready_event, arguments, device, tool):
+    """Initialize TestServerThread with the following argument.
+
+    Args:
+      ready_event: event which will be set when the test server is ready.
+      arguments: dictionary of arguments to run the test server.
+      device: An instance of DeviceUtils.
+      tool: instance of runtime error detection tool.
+    """
+    threading.Thread.__init__(self)
+    self.wait_event = threading.Event()
+    self.stop_flag = False
+    self.ready_event = ready_event
+    self.ready_event.clear()
+    self.arguments = arguments
+    self.device = device
+    self.tool = tool
+    self.test_server_process = None
+    self.is_ready = False
+    self.host_port = self.arguments['port']
+    assert isinstance(self.host_port, int)
+    # The forwarder device port now is dynamically allocated.
+    self.forwarder_device_port = 0
+    # Anonymous pipe in order to get port info from test server.
+    self.pipe_in = None
+    self.pipe_out = None
+    self.process = None
+    self.command_line = []
+
+  def _WaitToStartAndGetPortFromTestServer(self):
+    """Waits for the Python test server to start and gets the port it is using.
+
+    The port information is passed by the Python test server with a pipe given
+    by self.pipe_out. It is written as a result to |self.host_port|.
+
+    Returns:
+      Whether the port used by the test server was successfully fetched.
+    """
+    assert self.host_port == 0 and self.pipe_out and self.pipe_in
+    (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+                                   TEST_SERVER_STARTUP_TIMEOUT)
+    if len(in_fds) == 0:
+      logging.error('Failed to wait to the Python test server to be started.')
+      return False
+    # First read the data length as an unsigned 4-byte value.  This
+    # is _not_ using network byte ordering since the Python test server packs
+    # size as native byte order and all Chromium platforms so far are
+    # configured to use little-endian.
+    # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+    # use a unified byte order (either big-endian or little-endian).
+    data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+    if data_length:
+      (data_length,) = struct.unpack('=L', data_length)
+      assert data_length
+    if not data_length:
+      logging.error('Failed to get length of server data.')
+      return False
+    port_json = os.read(self.pipe_in, data_length)
+    if not port_json:
+      logging.error('Failed to get server data.')
+      return False
+    logging.info('Got port json data: %s', port_json)
+    port_json = json.loads(port_json)
+    if port_json.has_key('port') and isinstance(port_json['port'], int):
+      self.host_port = port_json['port']
+      return _CheckPortStatus(self.host_port, True)
+    logging.error('Failed to get port information from the server data.')
+    return False
+
+  def _GenerateCommandLineArguments(self):
+    """Generates the command line to run the test server.
+
+    Note that all options are processed by following the definitions in
+    testserver.py.
+    """
+    if self.command_line:
+      return
+
+    args_copy = dict(self.arguments)
+
+    # Translate the server type.
+    type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
+    if type_cmd:
+      self.command_line.append(type_cmd)
+
+    # Use a pipe to get the port given by the instance of Python test server
+    # if the test does not specify the port.
+    assert self.host_port == args_copy['port']
+    if self.host_port == 0:
+      (self.pipe_in, self.pipe_out) = os.pipe()
+      self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+
+    # Pass the remaining arguments as-is.
+    for key, values in args_copy.iteritems():
+      if not isinstance(values, list):
+        values = [values]
+      for value in values:
+        if value is None:
+          self.command_line.append('--%s' % key)
+        else:
+          self.command_line.append('--%s=%s' % (key, value))
+
+  def _CloseUnnecessaryFDsForTestServerProcess(self):
+    # This is required to avoid subtle deadlocks that could be caused by the
+    # test server child process inheriting undesirable file descriptors such as
+    # file lock file descriptors.
+    for fd in xrange(0, 1024):
+      if fd != self.pipe_out:
+        try:
+          os.close(fd)
+        except:
+          pass
+
+  def run(self):
+    logging.info('Start running the thread!')
+    self.wait_event.clear()
+    self._GenerateCommandLineArguments()
+    command = constants.DIR_SOURCE_ROOT
+    if self.arguments['server-type'] == 'sync':
+      command = [os.path.join(command, 'sync', 'tools', 'testserver',
+                              'sync_testserver.py')] + self.command_line
+    else:
+      command = [os.path.join(command, 'net', 'tools', 'testserver',
+                              'testserver.py')] + self.command_line
+    logging.info('Running: %s', command)
+    # Pass DIR_SOURCE_ROOT as the child's working directory so that relative
+    # paths in the arguments are resolved correctly.
+    self.process = subprocess.Popen(
+        command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
+        cwd=constants.DIR_SOURCE_ROOT)
+    if self.process:
+      if self.pipe_out:
+        self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+      else:
+        self.is_ready = _CheckPortStatus(self.host_port, True)
+    if self.is_ready:
+      Forwarder.Map([(0, self.host_port)], self.device, self.tool)
+      # Check whether the forwarder is ready on the device.
+      self.is_ready = False
+      device_port = Forwarder.DevicePortForHostPort(self.host_port)
+      if device_port and _CheckDevicePortStatus(self.device, device_port):
+        self.is_ready = True
+        self.forwarder_device_port = device_port
+    # Wake up the request handler thread.
+    self.ready_event.set()
+    # Keep thread running until Stop() gets called.
+    _WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint)
+    if self.process.poll() is None:
+      self.process.kill()
+    Forwarder.UnmapDevicePort(self.forwarder_device_port, self.device)
+    self.process = None
+    self.is_ready = False
+    if self.pipe_out:
+      os.close(self.pipe_in)
+      os.close(self.pipe_out)
+      self.pipe_in = None
+      self.pipe_out = None
+    logging.info('Test-server has died.')
+    self.wait_event.set()
+
+  def Stop(self):
+    """Blocks until the loop has finished.
+
+    Note that this must be called in another thread.
+    """
+    if not self.process:
+      return
+    self.stop_flag = True
+    self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler used to process http GET/POST request."""
+
+  def _SendResponse(self, response_code, response_reason, additional_headers,
+                    contents):
+    """Generates a response sent to the client from the provided parameters.
+
+    Args:
+      response_code: number of the response status.
+      response_reason: string of reason description of the response.
+      additional_headers: dict of additional headers. Each key is the name of
+                          the header, each value is the content of the header.
+      contents: string of the contents we want to send to client.
+    """
+    self.send_response(response_code, response_reason)
+    self.send_header('Content-Type', 'text/html')
+    # Specify the content-length as without it the http(s) response will not
+    # be completed properly (and the browser keeps expecting data).
+    self.send_header('Content-Length', len(contents))
+    for header_name in additional_headers:
+      self.send_header(header_name, additional_headers[header_name])
+    self.end_headers()
+    self.wfile.write(contents)
+    self.wfile.flush()
+
+  def _StartTestServer(self):
+    """Starts the test server thread."""
+    logging.info('Handling request to spawn a test server.')
+    content_type = self.headers.getheader('content-type')
+    if content_type != 'application/json':
+      raise Exception('Bad content-type for start request.')
+    content_length = self.headers.getheader('content-length')
+    if not content_length:
+      content_length = 0
+    try:
+      content_length = int(content_length)
+    except:
+      raise Exception('Bad content-length for start request.')
+    logging.info(content_length)
+    test_server_argument_json = self.rfile.read(content_length)
+    logging.info(test_server_argument_json)
+    assert not self.server.test_server_instance
+    ready_event = threading.Event()
+    self.server.test_server_instance = TestServerThread(
+        ready_event,
+        json.loads(test_server_argument_json),
+        self.server.device,
+        self.server.tool)
+    self.server.test_server_instance.setDaemon(True)
+    self.server.test_server_instance.start()
+    ready_event.wait()
+    if self.server.test_server_instance.is_ready:
+      self._SendResponse(200, 'OK', {}, json.dumps(
+          {'port': self.server.test_server_instance.forwarder_device_port,
+           'message': 'started'}))
+      logging.info('Test server is running on port: %d.',
+                   self.server.test_server_instance.host_port)
+    else:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during starting a test server.')
+
+  def _KillTestServer(self):
+    """Stops the test server instance."""
+    # There should only ever be one test server at a time. This may do the
+    # wrong thing if we try and start multiple test servers.
+    if not self.server.test_server_instance:
+      return
+    port = self.server.test_server_instance.host_port
+    logging.info('Handling request to kill a test server on port: %d.', port)
+    self.server.test_server_instance.Stop()
+    # Make sure the status of test server is correct before sending response.
+    if _CheckPortStatus(port, False):
+      self._SendResponse(200, 'OK', {}, 'killed')
+      logging.info('Test server on port %d is killed', port)
+    else:
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during killing a test server.')
+    self.server.test_server_instance = None
+
+  def do_POST(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    logging.info('Action for POST method is: %s.', action)
+    if action == '/start':
+      self._StartTestServer()
+    else:
+      self._SendResponse(400, 'Unknown request.', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+  def do_GET(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+    logging.info('Action for GET method is: %s.', action)
+    for param in params:
+      logging.info('%s=%s', param, params[param][0])
+    if action == '/kill':
+      self._KillTestServer()
+    elif action == '/ping':
+      # The ping handler is used to check whether the spawner server is ready
+      # to serve the requests. We don't need to test the status of the test
+      # server when handling ping request.
+      self._SendResponse(200, 'OK', {}, 'ready')
+      logging.info('Handled ping request and sent response.')
+    else:
+      self._SendResponse(400, 'Unknown request', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+  """The class used to start/stop a http server."""
+
+  def __init__(self, test_server_spawner_port, device, tool):
+    logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
+    self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+                                            SpawningServerRequestHandler)
+    self.server.device = device
+    self.server.tool = tool
+    self.server.test_server_instance = None
+    self.server.build_type = constants.GetBuildType()
+
+  def _Listen(self):
+    logging.info('Starting test server spawner')
+    self.server.serve_forever()
+
+  def Start(self):
+    """Starts the test server spawner."""
+    listener_thread = threading.Thread(target=self._Listen)
+    listener_thread.setDaemon(True)
+    listener_thread.start()
+
+  def Stop(self):
+    """Stops the test server spawner.
+
+    Also cleans the server state.
+    """
+    self.CleanupState()
+    self.server.shutdown()
+
+  def CleanupState(self):
+    """Cleans up the spawning server state.
+
+    This should be called if the test server spawner is reused,
+    to avoid sharing the test server instance.
+    """
+    if self.server.test_server_instance:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
diff --git a/build/android/pylib/cmd_helper.py b/build/android/pylib/cmd_helper.py
new file mode 100644
index 0000000..aba00be
--- /dev/null
+++ b/build/android/pylib/cmd_helper.py
@@ -0,0 +1,167 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A wrapper for subprocess to make calling shell commands easier."""
+
+import logging
+import os
+import pipes
+import select
+import signal
+import StringIO
+import subprocess
+import time
+
+# fcntl is not available on Windows.
+try:
+  import fcntl
+except ImportError:
+  fcntl = None
+
+
+def Popen(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
+  return subprocess.Popen(
+      args=args, cwd=cwd, stdout=stdout, stderr=stderr,
+      shell=shell, close_fds=True, env=env,
+      preexec_fn=lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL))
+
+
+def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
+  pipe = Popen(args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd,
+               env=env)
+  pipe.communicate()
+  return pipe.wait()
+
+
+def RunCmd(args, cwd=None):
+  """Opens a subprocess to execute a program and returns its return value.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+
+  Returns:
+    Return code from the command execution.
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  return Call(args, cwd=cwd)
+
+
+def GetCmdOutput(args, cwd=None, shell=False):
+  """Open a subprocess to execute a program and returns its output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+
+  Returns:
+    Captures and returns the command's stdout.
+    Prints the command's stderr to logger (which defaults to stdout).
+  """
+  (_, output) = GetCmdStatusAndOutput(args, cwd, shell)
+  return output
+
+
+def GetCmdStatusAndOutput(args, cwd=None, shell=False):
+  """Executes a subprocess and returns its exit code and output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+
+  Returns:
+    The 2-tuple (exit code, output).
+  """
+  if isinstance(args, basestring):
+    args_repr = args
+    if not shell:
+      raise Exception('string args must be run with shell=True')
+  elif shell:
+    raise Exception('array args must be run with shell=False')
+  else:
+    args_repr = ' '.join(map(pipes.quote, args))
+
+  s = '[host]'
+  if cwd:
+    s += ':' + cwd
+  s += '> ' + args_repr
+  logging.info(s)
+  pipe = Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+               shell=shell, cwd=cwd)
+  stdout, stderr = pipe.communicate()
+
+  if stderr:
+    logging.critical(stderr)
+  if len(stdout) > 4096:
+    logging.debug('Truncated output:')
+  logging.debug(stdout[:4096])
+  return (pipe.returncode, stdout)
+
+
+class TimeoutError(Exception):
+  """Module-specific timeout exception."""
+  pass
+
+
+def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False,
+                                     logfile=None):
+  """Executes a subprocess with a timeout.
+
+  Args:
+    args: List of arguments to the program, the program to execute is the first
+      element.
+    timeout: the timeout in seconds or None to wait forever.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+    logfile: Optional file-like object that will receive output from the
+      command as it is running.
+
+  Returns:
+    The 2-tuple (exit code, output).
+  """
+  assert fcntl, 'fcntl module is required'
+  process = Popen(args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,
+                  stderr=subprocess.STDOUT)
+  try:
+    end_time = (time.time() + timeout) if timeout else None
+    poll_interval = 1
+    buffer_size = 4096
+    child_fd = process.stdout.fileno()
+    output = StringIO.StringIO()
+
+    # Enable non-blocking reads from the child's stdout.
+    fl = fcntl.fcntl(child_fd, fcntl.F_GETFL)
+    fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
+
+    while True:
+      if end_time and time.time() > end_time:
+        raise TimeoutError
+      read_fds, _, _ = select.select([child_fd], [], [], poll_interval)
+      if child_fd in read_fds:
+        data = os.read(child_fd, buffer_size)
+        if not data:
+          break
+        if logfile:
+          logfile.write(data)
+        output.write(data)
+      if process.poll() is not None:
+        break
+  finally:
+    try:
+      # Make sure the process doesn't stick around if we fail with an
+      # exception.
+      process.kill()
+    except OSError:
+      pass
+    process.wait()
+  return process.returncode, output.getvalue()
diff --git a/build/android/pylib/constants.py b/build/android/pylib/constants.py
new file mode 100644
index 0000000..8b800ab
--- /dev/null
+++ b/build/android/pylib/constants.py
@@ -0,0 +1,214 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+# pylint: disable=W0212
+
+import collections
+import logging
+import os
+import subprocess
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir)))
+ISOLATE_DEPS_DIR = os.path.join(DIR_SOURCE_ROOT, 'isolate_deps_dir')
+
+CHROME_SHELL_HOST_DRIVEN_DIR = os.path.join(
+    DIR_SOURCE_ROOT, 'chrome', 'android')
+
+
+PackageInfo = collections.namedtuple('PackageInfo',
+    ['package', 'activity', 'cmdline_file', 'devtools_socket',
+     'test_package'])
+
+PACKAGE_INFO = {
+    'chrome_document': PackageInfo(
+        'com.google.android.apps.chrome.document',
+        'com.google.android.apps.chrome.document.ChromeLauncherActivity',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome': PackageInfo(
+        'com.google.android.apps.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        'com.google.android.apps.chrome.tests'),
+    'chrome_beta': PackageInfo(
+        'com.chrome.beta',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_stable': PackageInfo(
+        'com.android.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_dev': PackageInfo(
+        'com.google.android.apps.chrome_dev',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_canary': PackageInfo(
+        'com.chrome.canary',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'legacy_browser': PackageInfo(
+        'com.google.android.browser',
+        'com.android.browser.BrowserActivity',
+        None,
+        None,
+        None),
+    'content_shell': PackageInfo(
+        'org.chromium.content_shell_apk',
+        'org.chromium.content_shell_apk.ContentShellActivity',
+        '/data/local/tmp/content-shell-command-line',
+        None,
+        'org.chromium.content_shell_apk.tests'),
+    'chrome_shell': PackageInfo(
+        'org.chromium.chrome.shell',
+        'org.chromium.chrome.shell.ChromeShellActivity',
+        '/data/local/tmp/chrome-shell-command-line',
+        'chrome_shell_devtools_remote',
+        'org.chromium.chrome.shell.tests'),
+    'android_webview_shell': PackageInfo(
+        'org.chromium.android_webview.shell',
+        'org.chromium.android_webview.shell.AwShellActivity',
+        None,
+        None,
+        'org.chromium.android_webview.test'),
+    'gtest': PackageInfo(
+        'org.chromium.native_test',
+        'org.chromium.native_test.ChromeNativeTestActivity',
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'content_browsertests': PackageInfo(
+        'org.chromium.content_browsertests_apk',
+        'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+        '/data/local/tmp/content-browser-tests-command-line',
+        None,
+        None),
+    'chromedriver_webview_shell': PackageInfo(
+        'org.chromium.chromedriver_webview_shell',
+        'org.chromium.chromedriver_webview_shell.Main',
+        None,
+        None,
+        None),
+}
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+
+# The net test server is started from port 10201.
+# TODO(pliard): http://crbug.com/239014. Remove this dirty workaround once
+# http://crbug.com/239014 is fixed properly.
+TEST_SERVER_PORT_FIRST = 10201
+TEST_SERVER_PORT_LAST = 30000
+# A file to record next valid port of test server.
+TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
+TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+    '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+ANDROID_SDK_VERSION = 20
+ANDROID_SDK_BUILD_TOOLS_VERSION = '20.0.0'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party/android_tools/sdk')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+                                 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party/android_tools/ndk')
+
+EMULATOR_SDK_ROOT = os.environ.get('ANDROID_EMULATOR_SDK_ROOT',
+                                   os.path.join(DIR_SOURCE_ROOT,
+                                                'android_emulator_sdk'))
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+                                os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                                'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+def GetBuildType():
+  try:
+    return os.environ['BUILDTYPE']
+  except KeyError:
+    raise Exception('The BUILDTYPE environment variable has not been set')
+
+
+def SetBuildType(build_type):
+  os.environ['BUILDTYPE'] = build_type
+
+
+def GetOutDirectory(build_type=None):
+  """Returns the out directory where the output binaries are built.
+
+  Args:
+    build_type: Build type, generally 'Debug' or 'Release'. Defaults to the
+      globally set build type environment variable BUILDTYPE.
+  """
+  return os.path.abspath(os.path.join(
+      DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+      GetBuildType() if build_type is None else build_type))
+
+
+def _Memoize(func):
+  def Wrapper():
+    try:
+      return func._result
+    except AttributeError:
+      func._result = func()
+      return func._result
+  return Wrapper
+
+
+@_Memoize
+def GetAdbPath():
+  if os.environ.get('ANDROID_SDK_ROOT'):
+    return 'adb'
+  # If envsetup.sh hasn't been sourced and there's no adb in the path,
+  # set it here.
+  try:
+    with file(os.devnull, 'w') as devnull:
+      subprocess.call(['adb', 'version'], stdout=devnull, stderr=devnull)
+    return 'adb'
+  except OSError:
+    logging.debug('No adb found in $PATH, fallback to checked in binary.')
+    return os.path.join(ANDROID_SDK_ROOT, 'platform-tools', 'adb')
+
+
+# Exit codes
+ERROR_EXIT_CODE = 1
+WARNING_EXIT_CODE = 88
diff --git a/build/android/pylib/content_settings.py b/build/android/pylib/content_settings.py
new file mode 100644
index 0000000..d222053
--- /dev/null
+++ b/build/android/pylib/content_settings.py
@@ -0,0 +1,87 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class ContentSettings(dict):
+
+  """A dict interface to interact with device content settings.
+
+  System properties are key/value pairs as exposed by adb shell content.
+  """
+
+  def __init__(self, table, device):
+    super(ContentSettings, self).__init__()
+    sdk_version_string = device.GetProp('ro.build.version.sdk')
+    try:
+      sdk_version = int(sdk_version_string)
+      assert sdk_version >= 16, (
+          'ContentSettings supported only on SDK 16 and later')
+    except ValueError:
+      assert False, ('Unknown SDK version %s' % sdk_version_string)
+    self._table = table
+    self._device = device
+
+  @staticmethod
+  def _GetTypeBinding(value):
+    if isinstance(value, bool):
+      return 'b'
+    if isinstance(value, float):
+      return 'f'
+    if isinstance(value, int):
+      return 'i'
+    if isinstance(value, long):
+      return 'l'
+    if isinstance(value, str):
+      return 's'
+    raise ValueError('Unsupported type %s' % type(value))
+
+  def iteritems(self):
+    # Example row:
+    # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+    for row in self._device.RunShellCommand(
+        'content query --uri content://%s' % self._table, as_root=True):
+      fields = row.split(', ')
+      key = None
+      value = None
+      for field in fields:
+        k, _, v = field.partition('=')
+        if k == 'name':
+          key = v
+        elif k == 'value':
+          value = v
+      if not key:
+        continue
+      if not value:
+        value = ''
+      yield key, value
+
+  def __getitem__(self, key):
+    return self._device.RunShellCommand(
+        'content query --uri content://%s --where "name=\'%s\'" '
+        '--projection value' % (self._table, key), as_root=True).strip()
+
+  def __setitem__(self, key, value):
+    if key in self:
+      self._device.RunShellCommand(
+          'content update --uri content://%s '
+          '--bind value:%s:%s --where "name=\'%s\'"' % (
+              self._table,
+              self._GetTypeBinding(value), value, key),
+          as_root=True)
+    else:
+      self._device.RunShellCommand(
+          'content insert --uri content://%s '
+          '--bind name:%s:%s --bind value:%s:%s' % (
+              self._table,
+              self._GetTypeBinding(key), key,
+              self._GetTypeBinding(value), value),
+          as_root=True)
+
+  def __delitem__(self, key):
+    self._device.RunShellCommand(
+        'content delete --uri content://%s '
+        '--bind name:%s:%s' % (
+            self._table,
+            self._GetTypeBinding(key), key),
+        as_root=True)
diff --git a/build/android/pylib/device/OWNERS b/build/android/pylib/device/OWNERS
new file mode 100644
index 0000000..6a9eac5
--- /dev/null
+++ b/build/android/pylib/device/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+craigdh@chromium.org
+jbudorick@chromium.org
diff --git a/build/android/pylib/device/__init__.py b/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/device/__init__.py
diff --git a/build/android/pylib/device/adb_wrapper.py b/build/android/pylib/device/adb_wrapper.py
new file mode 100644
index 0000000..f4a5931
--- /dev/null
+++ b/build/android/pylib/device/adb_wrapper.py
@@ -0,0 +1,387 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps Android's adb tool.
+
+This is a thin wrapper around the adb interface. Any additional complexity
+should be delegated to a higher level (ex. DeviceUtils).
+"""
+
+import errno
+import os
+
+from pylib import cmd_helper
+from pylib.device import decorators
+from pylib.device import device_errors
+
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 2
+
+
+def _VerifyLocalFileExists(path):
+  """Verifies a local file exists.
+
+  Args:
+    path: Path to the local file.
+
+  Raises:
+    IOError: If the file doesn't exist.
+  """
+  if not os.path.exists(path):
+    raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), path)
+
+
+class AdbWrapper(object):
+  """A wrapper around a local Android Debug Bridge executable."""
+
+  def __init__(self, device_serial):
+    """Initializes the AdbWrapper.
+
+    Args:
+      device_serial: The device serial number as a string.
+    """
+    self._device_serial = str(device_serial)
+
+  # pylint: disable=W0613
+  @classmethod
+  @decorators.WithTimeoutAndRetries
+  def _RunAdbCmd(cls, arg_list, timeout=None, retries=None, check_error=True):
+    cmd = ['adb'] + arg_list
+    exit_code, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+    if exit_code != 0:
+      raise device_errors.AdbCommandFailedError(
+          cmd, 'returned non-zero exit code %s, output: %s' %
+          (exit_code, output))
+    # This catches some errors, including when the device drops offline;
+    # unfortunately adb is very inconsistent with error reporting so many
+    # command failures present differently.
+    if check_error and output[:len('error:')] == 'error:':
+      raise device_errors.AdbCommandFailedError(arg_list, output)
+    return output
+  # pylint: enable=W0613
+
+  def _DeviceAdbCmd(self, arg_list, timeout, retries, check_error=True):
+    """Runs an adb command on the device associated with this object.
+
+    Args:
+      arg_list: A list of arguments to adb.
+      timeout: Timeout in seconds.
+      retries: Number of retries.
+      check_error: Check that the command doesn't return an error message. This
+        does NOT check the return code of shell commands.
+
+    Returns:
+      The output of the command.
+    """
+    return self._RunAdbCmd(
+        ['-s', self._device_serial] + arg_list, timeout=timeout,
+        retries=retries, check_error=check_error)
+
+  def __eq__(self, other):
+    """Consider instances equal if they refer to the same device.
+
+    Args:
+      other: The instance to compare equality with.
+
+    Returns:
+      True if the instances are considered equal, false otherwise.
+    """
+    return self._device_serial == str(other)
+
+  def __str__(self):
+    """The string representation of an instance.
+
+    Returns:
+      The device serial number as a string.
+    """
+    return self._device_serial
+
+  def __repr__(self):
+    return '%s(\'%s\')' % (self.__class__.__name__, self)
+
+  # TODO(craigdh): Determine the filter criteria that should be supported.
+  @classmethod
+  def GetDevices(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Get the list of active attached devices.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Yields:
+      AdbWrapper instances.
+    """
+    output = cls._RunAdbCmd(['devices'], timeout=timeout, retries=retries)
+    lines = [line.split() for line in output.split('\n')]
+    return [AdbWrapper(line[0]) for line in lines
+            if len(line) == 2 and line[1] == 'device']
+
+  def GetDeviceSerial(self):
+    """Gets the device serial number associated with this object.
+
+    Returns:
+      Device serial number as a string.
+    """
+    return self._device_serial
+
+  def Push(self, local, remote, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Pushes a file from the host to the device.
+
+    Args:
+      local: Path on the host filesystem.
+      remote: Path on the device filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(local)
+    self._DeviceAdbCmd(['push', local, remote], timeout, retries)
+
+  def Pull(self, remote, local, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Pulls a file from the device to the host.
+
+    Args:
+      remote: Path on the device filesystem.
+      local: Path on the host filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._DeviceAdbCmd(['pull', remote, local], timeout, retries)
+    _VerifyLocalFileExists(local)
+
+  def Shell(self, command, expect_rc=None, timeout=_DEFAULT_TIMEOUT,
+            retries=_DEFAULT_RETRIES):
+    """Runs a shell command on the device.
+
+    Args:
+      command: The shell command to run.
+      expect_rc: (optional) If set checks that the command's return code matches
+        this value.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The output of the shell command as a string.
+
+    Raises:
+      device_errors.AdbCommandFailedError: If the return code doesn't match
+        |expect_rc|.
+    """
+    if expect_rc is None:
+      actual_command = command
+    else:
+      actual_command = '%s; echo $?;' % command
+    output = self._DeviceAdbCmd(
+        ['shell', actual_command], timeout, retries, check_error=False)
+    if expect_rc is not None:
+      output_end = output.rstrip().rfind('\n') + 1
+      rc = output[output_end:].strip()
+      output = output[:output_end]
+      if int(rc) != expect_rc:
+        raise device_errors.AdbCommandFailedError(
+            ['shell', command],
+            'shell command exited with code: %s' % rc,
+            self._device_serial)
+    return output
+
+  def Logcat(self, filter_spec=None, timeout=_DEFAULT_TIMEOUT,
+             retries=_DEFAULT_RETRIES):
+    """Get the logcat output.
+
+    Args:
+      filter_spec: (optional) Spec to filter the logcat.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      logcat output as a string.
+    """
+    cmd = ['logcat']
+    if filter_spec is not None:
+      cmd.append(filter_spec)
+    return self._DeviceAdbCmd(cmd, timeout, retries, check_error=False)
+
+  def Forward(self, local, remote, timeout=_DEFAULT_TIMEOUT,
+              retries=_DEFAULT_RETRIES):
+    """Forward socket connections from the local socket to the remote socket.
+
+    Sockets are specified by one of:
+      tcp:<port>
+      localabstract:<unix domain socket name>
+      localreserved:<unix domain socket name>
+      localfilesystem:<unix domain socket name>
+      dev:<character device name>
+      jdwp:<process pid> (remote only)
+
+    Args:
+      local: The host socket.
+      remote: The device socket.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._DeviceAdbCmd(['forward', str(local), str(remote)], timeout, retries)
+
+  def JDWP(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """List of PIDs of processes hosting a JDWP transport.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      A list of PIDs as strings.
+    """
+    return [a.strip() for a in
+            self._DeviceAdbCmd(['jdwp'], timeout, retries).split('\n')]
+
+  def Install(self, apk_path, forward_lock=False, reinstall=False,
+              sd_card=False, timeout=60*2, retries=_DEFAULT_RETRIES):
+    """Install an apk on the device.
+
+    Args:
+      apk_path: Host path to the APK file.
+      forward_lock: (optional) If set forward-locks the app.
+      reinstall: (optional) If set reinstalls the app, keeping its data.
+      sd_card: (optional) If set installs on the SD card.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(apk_path)
+    cmd = ['install']
+    if forward_lock:
+      cmd.append('-l')
+    if reinstall:
+      cmd.append('-r')
+    if sd_card:
+      cmd.append('-s')
+    cmd.append(apk_path)
+    output = self._DeviceAdbCmd(cmd, timeout, retries)
+    if 'Success' not in output:
+      raise device_errors.AdbCommandFailedError(cmd, output)
+
+  def Uninstall(self, package, keep_data=False, timeout=_DEFAULT_TIMEOUT,
+                retries=_DEFAULT_RETRIES):
+    """Remove the app |package| from the device.
+
+    Args:
+      package: The package to uninstall.
+      keep_data: (optional) If set keep the data and cache directories.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['uninstall']
+    if keep_data:
+      cmd.append('-k')
+    cmd.append(package)
+    output = self._DeviceAdbCmd(cmd, timeout, retries)
+    if 'Failure' in output:
+      raise device_errors.AdbCommandFailedError(cmd, output)
+
+  def Backup(self, path, packages=None, apk=False, shared=False,
+             nosystem=True, include_all=False, timeout=_DEFAULT_TIMEOUT,
+             retries=_DEFAULT_RETRIES):
+    """Write an archive of the device's data to |path|.
+
+    Args:
+      path: Local path to store the backup file.
+      packages: List of to packages to be backed up.
+      apk: (optional) If set include the .apk files in the archive.
+      shared: (optional) If set buckup the device's SD card.
+      nosystem: (optional) If set exclude system applications.
+      include_all: (optional) If set back up all installed applications and
+        |packages| is optional.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['backup', path]
+    if apk:
+      cmd.append('-apk')
+    if shared:
+      cmd.append('-shared')
+    if nosystem:
+      cmd.append('-nosystem')
+    if include_all:
+      cmd.append('-all')
+    if packages:
+      cmd.extend(packages)
+    assert bool(packages) ^ bool(include_all), (
+        'Provide \'packages\' or set \'include_all\' but not both.')
+    ret = self._DeviceAdbCmd(cmd, timeout, retries)
+    _VerifyLocalFileExists(path)
+    return ret
+
+  def Restore(self, path, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Restore device contents from the backup archive.
+
+    Args:
+      path: Host path to the backup archive.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(path)
+    self._DeviceAdbCmd(['restore'] + [path], timeout, retries)
+
+  def WaitForDevice(self, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Block until the device is online.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._DeviceAdbCmd(['wait-for-device'], timeout, retries)
+
+  def GetState(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Get device state.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      One of 'offline', 'bootloader', or 'device'.
+    """
+    return self._DeviceAdbCmd(['get-state'], timeout, retries).strip()
+
+  def GetDevPath(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Gets the device path.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The device path (e.g. usb:3-4)
+    """
+    return self._DeviceAdbCmd(['get-devpath'], timeout, retries)
+
+  def Remount(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Remounts the /system partition on the device read-write."""
+    self._DeviceAdbCmd(['remount'], timeout, retries)
+
+  def Reboot(self, to_bootloader=False, timeout=60*5,
+             retries=_DEFAULT_RETRIES):
+    """Reboots the device.
+
+    Args:
+      to_bootloader: (optional) If set reboots to the bootloader.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    if to_bootloader:
+      cmd = ['reboot-bootloader']
+    else:
+      cmd = ['reboot']
+    self._DeviceAdbCmd(cmd, timeout, retries)
+
+  def Root(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Restarts the adbd daemon with root permissions, if possible.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    output = self._DeviceAdbCmd(['root'], timeout, retries)
+    if 'cannot' in output:
+      raise device_errors.AdbCommandFailedError(['root'], output)
+
diff --git a/build/android/pylib/device/adb_wrapper_test.py b/build/android/pylib/device/adb_wrapper_test.py
new file mode 100644
index 0000000..08b7fcc
--- /dev/null
+++ b/build/android/pylib/device/adb_wrapper_test.py
@@ -0,0 +1,93 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the AdbWrapper class."""
+
+import os
+import tempfile
+import time
+import unittest
+
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+
+
+class TestAdbWrapper(unittest.TestCase):
+
+  def setUp(self):
+    devices = adb_wrapper.AdbWrapper.GetDevices()
+    assert devices, 'A device must be attached'
+    self._adb = devices[0]
+    self._adb.WaitForDevice()
+
+  @staticmethod
+  def _MakeTempFile(contents):
+    """Make a temporary file with the given contents.
+
+    Args:
+      contents: string to write to the temporary file.
+
+    Returns:
+      The absolute path to the file.
+    """
+    fi, path = tempfile.mkstemp()
+    with os.fdopen(fi, 'wb') as f:
+      f.write(contents)
+    return path
+
+  def testShell(self):
+    output = self._adb.Shell('echo test', expect_rc=0)
+    self.assertEqual(output.strip(), 'test')
+    output = self._adb.Shell('echo test')
+    self.assertEqual(output.strip(), 'test')
+    self.assertRaises(device_errors.AdbCommandFailedError, self._adb.Shell,
+        'echo test', expect_rc=1)
+
+  def testPushPull(self):
+    path = self._MakeTempFile('foo')
+    device_path = '/data/local/tmp/testfile.txt'
+    local_tmpdir = os.path.dirname(path)
+    self._adb.Push(path, device_path)
+    self.assertEqual(self._adb.Shell('cat %s' % device_path), 'foo')
+    self._adb.Pull(device_path, local_tmpdir)
+    with open(os.path.join(local_tmpdir, 'testfile.txt'), 'r') as f:
+      self.assertEqual(f.read(), 'foo')
+
+  def testInstall(self):
+    path = self._MakeTempFile('foo')
+    self.assertRaises(device_errors.AdbCommandFailedError, self._adb.Install,
+                      path)
+
+  def testForward(self):
+    self.assertRaises(device_errors.AdbCommandFailedError, self._adb.Forward,
+                      0, 0)
+
+  def testUninstall(self):
+    self.assertRaises(device_errors.AdbCommandFailedError, self._adb.Uninstall,
+        'some.nonexistant.package')
+
+  def testRebootWaitForDevice(self):
+    self._adb.Reboot()
+    print 'waiting for device to reboot...'
+    while self._adb.GetState() == 'device':
+      time.sleep(1)
+    self._adb.WaitForDevice()
+    self.assertEqual(self._adb.GetState(), 'device')
+    print 'waiting for package manager...'
+    while 'package:' not in self._adb.Shell('pm path android'):
+      time.sleep(1)
+
+  def testRootRemount(self):
+    self._adb.Root()
+    while True:
+      try:
+        self._adb.Shell('start')
+        break
+      except device_errors.AdbCommandFailedError:
+        time.sleep(1)
+    self._adb.Remount()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/device/decorators.py b/build/android/pylib/device/decorators.py
new file mode 100644
index 0000000..9362f95
--- /dev/null
+++ b/build/android/pylib/device/decorators.py
@@ -0,0 +1,148 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Function/method decorators that provide timeout and retry logic.
+"""
+
+import functools
+import os
+import sys
+
+from pylib import constants
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+from pylib.utils import timeout_retry
+
+# TODO(jbudorick) Remove once the DeviceUtils implementations are no longer
+#                 backed by AndroidCommands / android_testrunner.
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                             'android_testrunner'))
+import errors as old_errors
+
+DEFAULT_TIMEOUT_ATTR = '_default_timeout'
+DEFAULT_RETRIES_ATTR = '_default_retries'
+
+
+def _TimeoutRetryWrapper(f, timeout_func, retries_func, pass_values=False):
+  """ Wraps a funcion with timeout and retry handling logic.
+
+  Args:
+    f: The function to wrap.
+    timeout_func: A callable that returns the timeout value.
+    retries_func: A callable that returns the retries value.
+    pass_values: If True, passes the values returned by |timeout_func| and
+                 |retries_func| to the wrapped function as 'timeout' and
+                 'retries' kwargs, respectively.
+  Returns:
+    The wrapped function.
+  """
+  @functools.wraps(f)
+  def TimeoutRetryWrapper(*args, **kwargs):
+    timeout = timeout_func(*args, **kwargs)
+    retries = retries_func(*args, **kwargs)
+    if pass_values:
+      kwargs['timeout'] = timeout
+      kwargs['retries'] = retries
+    def impl():
+      return f(*args, **kwargs)
+    try:
+      return timeout_retry.Run(impl, timeout, retries)
+    except old_errors.WaitForResponseTimedOutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+             sys.exc_info()[2])
+    except old_errors.DeviceUnresponsiveError as e:
+      raise device_errors.DeviceUnreachableError(str(e)), None, (
+             sys.exc_info()[2])
+    except reraiser_thread.TimeoutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+             sys.exc_info()[2])
+  return TimeoutRetryWrapper
+
+
+def WithTimeoutAndRetries(f):
+  """A decorator that handles timeouts and retries.
+
+  'timeout' and 'retries' kwargs must be passed to the function.
+
+  Args:
+    f: The function to decorate.
+  Returns:
+    The decorated function.
+  """
+  get_timeout = lambda *a, **kw: kw['timeout']
+  get_retries = lambda *a, **kw: kw['retries']
+  return _TimeoutRetryWrapper(f, get_timeout, get_retries)
+
+
+def WithExplicitTimeoutAndRetries(timeout, retries):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |timeout| and |retries| values are always used.
+
+  Args:
+    timeout: The number of seconds to wait for the decorated function to
+             return. Always used.
+    retries: The number of times the decorated function should be retried on
+             failure. Always used.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    get_timeout = lambda *a, **kw: timeout
+    get_retries = lambda *a, **kw: retries
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries)
+  return decorator
+
+
+def WithTimeoutAndRetriesDefaults(default_timeout, default_retries):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |default_timeout| and |default_retries| values are used only
+  if timeout and retries values are not provided.
+
+  Args:
+    default_timeout: The number of seconds to wait for the decorated function
+                     to return. Only used if a 'timeout' kwarg is not passed
+                     to the decorated function.
+    default_retries: The number of times the decorated function should be
+                     retried on failure. Only used if a 'retries' kwarg is not
+                     passed to the decorated function.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    get_timeout = lambda *a, **kw: kw.get('timeout', default_timeout)
+    get_retries = lambda *a, **kw: kw.get('retries', default_retries)
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
+  return decorator
+
+
+def WithTimeoutAndRetriesFromInstance(
+    default_timeout_name=DEFAULT_TIMEOUT_ATTR,
+    default_retries_name=DEFAULT_RETRIES_ATTR):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |default_timeout_name| and |default_retries_name| are used to
+  get the default timeout value and the default retries value from the object
+  instance if timeout and retries values are not provided.
+
+  Note that this should only be used to decorate methods, not functions.
+
+  Args:
+    default_timeout_name: The name of the default timeout attribute of the
+                          instance.
+    default_retries_name: The name of the default retries attribute of the
+                          instance.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    def get_timeout(inst, *_args, **kwargs):
+      return kwargs.get('timeout', getattr(inst, default_timeout_name))
+    def get_retries(inst, *_args, **kwargs):
+      return kwargs.get('retries', getattr(inst, default_retries_name))
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
+  return decorator
+
diff --git a/build/android/pylib/device/decorators_test.py b/build/android/pylib/device/decorators_test.py
new file mode 100644
index 0000000..1ae8cb9
--- /dev/null
+++ b/build/android/pylib/device/decorators_test.py
@@ -0,0 +1,365 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for decorators.py.
+"""
+
+# pylint: disable=W0613
+
+import os
+import sys
+import time
+import traceback
+import unittest
+
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+
+# TODO(jbudorick) Remove once the DeviceUtils implementations are no longer
+#                 backed by AndroidCommands / android_testrunner.
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                             'android_testrunner'))
+import errors as old_errors
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+class DecoratorsTest(unittest.TestCase):
+  _decorated_function_called_count = 0
+
+  def testFunctionDecoratorDoesTimeouts(self):
+    """Tests that the base decorator handles the timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetries
+    def alwaysTimesOut(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut(timeout=1, retries=0)
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testFunctionDecoratorDoesRetries(self):
+    """Tests that the base decorator handles the retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesCommandFailedError(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError(timeout=30, retries=10)
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+  def testFunctionDecoratorRequiresParams(self):
+    """Tests that the base decorator requires timeout and retries params."""
+    @decorators.WithTimeoutAndRetries
+    def requiresExplicitTimeoutAndRetries(timeout=None, retries=None):
+      return (timeout, retries)
+
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries()
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries(timeout=10)
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries(retries=0)
+    expected_timeout = 10
+    expected_retries = 1
+    (actual_timeout, actual_retries) = (
+        requiresExplicitTimeoutAndRetries(timeout=expected_timeout,
+                                          retries=expected_retries))
+    self.assertEquals(expected_timeout, actual_timeout)
+    self.assertEquals(expected_retries, actual_retries)
+
+  def testFunctionDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testFunctionDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testDefaultsFunctionDecoratorDoesTimeouts(self):
+    """Tests that the defaults decorator handles timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetriesDefaults(1, 0)
+    def alwaysTimesOut(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut()
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+    DecoratorsTest._decorated_function_called_count = 0
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut(timeout=2)
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 2)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testDefaultsFunctionDecoratorDoesRetries(self):
+    """Tests that the defaults decorator handles retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesCommandFailedError(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError()
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+    DecoratorsTest._decorated_function_called_count = 0
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError(retries=5)
+    self.assertEquals(6, DecoratorsTest._decorated_function_called_count)
+
+  def testDefaultsFunctionDecoratorPassesValues(self):
+    """Tests that the defaults decorator passes timeout and retries kwargs."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysReturnsTimeouts(timeout=None, retries=None):
+      return timeout
+
+    self.assertEquals(30, alwaysReturnsTimeouts())
+    self.assertEquals(120, alwaysReturnsTimeouts(timeout=120))
+
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysReturnsRetries(timeout=None, retries=None):
+      return retries
+
+    self.assertEquals(10, alwaysReturnsRetries())
+    self.assertEquals(1, alwaysReturnsRetries(retries=1))
+
+  def testDefaultsFunctionDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testDefaultsFunctionDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testExplicitFunctionDecoratorDoesTimeouts(self):
+    """Tests that the explicit decorator handles timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithExplicitTimeoutAndRetries(1, 0)
+    def alwaysTimesOut():
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut()
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testExplicitFunctionDecoratorDoesRetries(self):
+    """Tests that the explicit decorator handles retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesCommandFailedError():
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError()
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+  def testExplicitDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesProvidedException(exception):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testExplicitDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesProvidedException(exception):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  class _MethodDecoratorTestObject(object):
+    """An object suitable for testing the method decorator."""
+
+    def __init__(self, test_case, default_timeout=_DEFAULT_TIMEOUT,
+                 default_retries=_DEFAULT_RETRIES):
+      self._test_case = test_case
+      self.default_timeout = default_timeout
+      self.default_retries = default_retries
+      self.function_call_counters = {
+          'alwaysRaisesCommandFailedError': 0,
+          'alwaysTimesOut': 0,
+          'requiresExplicitTimeoutAndRetries': 0,
+      }
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysTimesOut(self, timeout=None, retries=None):
+      self.function_call_counters['alwaysTimesOut'] += 1
+      time.sleep(100)
+      self._test_case.assertFalse(True, msg='Failed to time out?')
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysRaisesCommandFailedError(self, timeout=None, retries=None):
+      self.function_call_counters['alwaysRaisesCommandFailedError'] += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    # pylint: disable=R0201
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysReturnsTimeout(self, timeout=None, retries=None):
+      return timeout
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysReturnsRetries(self, timeout=None, retries=None):
+      return retries
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysRaisesProvidedException(self, exception, timeout=None,
+                                      retries=None):
+      raise exception
+
+    # pylint: enable=R0201
+
+
+  def testMethodDecoratorDoesTimeout(self):
+    """Tests that the method decorator handles timeout logic."""
+    test_obj = self._MethodDecoratorTestObject(self)
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      try:
+        test_obj.alwaysTimesOut(timeout=1, retries=0)
+      except:
+        traceback.print_exc()
+        raise
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, test_obj.function_call_counters['alwaysTimesOut'])
+
+  def testMethodDecoratorDoesRetries(self):
+    """Tests that the method decorator handles retries logic."""
+    test_obj = self._MethodDecoratorTestObject(self)
+    with self.assertRaises(device_errors.CommandFailedError):
+      try:
+        test_obj.alwaysRaisesCommandFailedError(retries=10)
+      except:
+        traceback.print_exc()
+        raise
+    self.assertEquals(
+        11, test_obj.function_call_counters['alwaysRaisesCommandFailedError'])
+
+  def testMethodDecoratorPassesValues(self):
+    """Tests that the method decorator passes timeout and retries kwargs."""
+    test_obj = self._MethodDecoratorTestObject(
+        self, default_timeout=42, default_retries=31)
+    self.assertEquals(42, test_obj.alwaysReturnsTimeout())
+    self.assertEquals(41, test_obj.alwaysReturnsTimeout(timeout=41))
+    self.assertEquals(31, test_obj.alwaysReturnsRetries())
+    self.assertEquals(32, test_obj.alwaysReturnsRetries(retries=32))
+
+  def testMethodDecoratorTranslatesOldExceptions(self):
+    test_obj = self._MethodDecoratorTestObject(self)
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testMethodDecoratorTranslatesReraiserExceptions(self):
+    test_obj = self._MethodDecoratorTestObject(self)
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/device_blacklist.py b/build/android/pylib/device/device_blacklist.py
new file mode 100644
index 0000000..a141d62
--- /dev/null
+++ b/build/android/pylib/device/device_blacklist.py
@@ -0,0 +1,61 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import threading
+
+from pylib import constants
+_BLACKLIST_JSON = os.path.join(
+    constants.DIR_SOURCE_ROOT,
+    os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+    'bad_devices.json')
+
+# Note that this only protects against concurrent accesses to the blacklist
+# within a process.
+_blacklist_lock = threading.RLock()
+
+def ReadBlacklist():
+  """Reads the blacklist from the _BLACKLIST_JSON file.
+
+  Returns:
+    A list containing bad devices.
+  """
+  with _blacklist_lock:
+    if not os.path.exists(_BLACKLIST_JSON):
+      return []
+
+    with open(_BLACKLIST_JSON, 'r') as f:
+      return json.load(f)
+
+
+def WriteBlacklist(blacklist):
+  """Writes the provided blacklist to the _BLACKLIST_JSON file.
+
+  Args:
+    blacklist: list of bad devices to write to the _BLACKLIST_JSON file.
+  """
+  with _blacklist_lock:
+    with open(_BLACKLIST_JSON, 'w') as f:
+      json.dump(list(set(blacklist)), f)
+
+
+def ExtendBlacklist(devices):
+  """Adds devices to _BLACKLIST_JSON file.
+
+  Args:
+    devices: list of bad devices to be added to the _BLACKLIST_JSON file.
+  """
+  with _blacklist_lock:
+    blacklist = ReadBlacklist()
+    blacklist.extend(devices)
+    WriteBlacklist(blacklist)
+
+
+def ResetBlacklist():
+  """Erases the _BLACKLIST_JSON file if it exists."""
+  with _blacklist_lock:
+    if os.path.exists(_BLACKLIST_JSON):
+      os.remove(_BLACKLIST_JSON)
+
diff --git a/build/android/pylib/device/device_errors.py b/build/android/pylib/device/device_errors.py
new file mode 100644
index 0000000..acc7603
--- /dev/null
+++ b/build/android/pylib/device/device_errors.py
@@ -0,0 +1,46 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Exception classes raised by AdbWrapper and DeviceUtils.
+"""
+
+class BaseError(Exception):
+  """Base exception for all device and command errors."""
+  pass
+
+
+class CommandFailedError(BaseError):
+  """Exception for command failures."""
+
+  def __init__(self, msg, device=None):
+    super(CommandFailedError, self).__init__(
+        '%s%s' % ('(device: %s) ' % device if device else '', msg))
+
+
+class AdbCommandFailedError(CommandFailedError):
+  """Exception for adb command failures."""
+
+  def __init__(self, cmd, msg, device=None):
+    super(AdbCommandFailedError, self).__init__(
+        'adb command \'%s\' failed with message: \'%s\'' % (' '.join(cmd), msg),
+        device=device)
+
+
+class CommandTimeoutError(BaseError):
+  """Exception for command timeouts."""
+  pass
+
+
+class DeviceUnreachableError(BaseError):
+  """Exception for device unreachable failures."""
+  pass
+
+
+class NoDevicesError(BaseError):
+  """Exception for having no devices attached."""
+
+  def __init__(self):
+    super(NoDevicesError, self).__init__('No devices attached.')
+
diff --git a/build/android/pylib/device/device_list.py b/build/android/pylib/device/device_list.py
new file mode 100644
index 0000000..0eb6acb
--- /dev/null
+++ b/build/android/pylib/device/device_list.py
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to keep track of devices across builds."""
+
+import os
+
+LAST_DEVICES_FILENAME = '.last_devices'
+LAST_MISSING_DEVICES_FILENAME = '.last_missing'
+
+
+def GetPersistentDeviceList(file_name):
+  """Returns a list of devices.
+
+  Args:
+    file_name: the file name containing a list of devices.
+
+  Returns: List of device serial numbers that were on the bot.
+  """
+  with open(file_name) as f:
+    return f.read().splitlines()
+
+
+def WritePersistentDeviceList(file_name, device_list):
+  path = os.path.dirname(file_name)
+  if not os.path.exists(path):
+    os.makedirs(path)
+  with open(file_name, 'w') as f:
+    f.write('\n'.join(set(device_list)))
diff --git a/build/android/pylib/device/device_utils.py b/build/android/pylib/device/device_utils.py
new file mode 100644
index 0000000..fccdd61
--- /dev/null
+++ b/build/android/pylib/device/device_utils.py
@@ -0,0 +1,872 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a variety of device interactions based on adb.
+
+Eventually, this will be based on adb_wrapper.
+"""
+# pylint: disable=W0613
+
+import pipes
+import sys
+import time
+
+import pylib.android_commands
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.utils import apk_helper
+from pylib.utils import parallelizer
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+
+@decorators.WithExplicitTimeoutAndRetries(
+    _DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
+def GetAVDs():
+  """Returns a list of Android Virtual Devices.
+
+  Returns:
+    A list containing the configured AVDs.
+  """
+  return pylib.android_commands.GetAVDs()
+
+
+@decorators.WithExplicitTimeoutAndRetries(
+    _DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
+def RestartServer():
+  """Restarts the adb server.
+
+  Raises:
+    CommandFailedError if we fail to kill or restart the server.
+  """
+  pylib.android_commands.AndroidCommands().RestartAdbServer()
+
+
+class DeviceUtils(object):
+
+  def __init__(self, device, default_timeout=_DEFAULT_TIMEOUT,
+               default_retries=_DEFAULT_RETRIES):
+    """DeviceUtils constructor.
+
+    Args:
+      device: Either a device serial, an existing AdbWrapper instance, an
+              an existing AndroidCommands instance, or nothing.
+      default_timeout: An integer containing the default number of seconds to
+                       wait for an operation to complete if no explicit value
+                       is provided.
+      default_retries: An integer containing the default number or times an
+                       operation should be retried on failure if no explicit
+                       value is provided.
+    """
+    self.old_interface = None
+    if isinstance(device, basestring):
+      self.old_interface = pylib.android_commands.AndroidCommands(device)
+    elif isinstance(device, adb_wrapper.AdbWrapper):
+      self.old_interface = pylib.android_commands.AndroidCommands(str(device))
+    elif isinstance(device, pylib.android_commands.AndroidCommands):
+      self.old_interface = device
+    elif not device:
+      self.old_interface = pylib.android_commands.AndroidCommands()
+    else:
+      raise ValueError('Unsupported type passed for argument "device"')
+    self._default_timeout = default_timeout
+    self._default_retries = default_retries
+    assert(hasattr(self, decorators.DEFAULT_TIMEOUT_ATTR))
+    assert(hasattr(self, decorators.DEFAULT_RETRIES_ATTR))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def IsOnline(self, timeout=None, retries=None):
+    """Checks whether the device is online.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device is online, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    return self.old_interface.IsOnline()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def HasRoot(self, timeout=None, retries=None):
+    """Checks whether or not adbd has root privileges.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if adbd has root privileges, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self._HasRootImpl()
+
+  def _HasRootImpl(self):
+    """Implementation of HasRoot.
+
+    This is split from HasRoot to allow other DeviceUtils methods to call
+    HasRoot without spawning a new timeout thread.
+
+    Returns:
+      Same as for |HasRoot|.
+
+    Raises:
+      Same as for |HasRoot|.
+    """
+    return self.old_interface.IsRootEnabled()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def EnableRoot(self, timeout=None, retries=None):
+    """Restarts adbd with root privileges.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if root could not be enabled.
+      CommandTimeoutError on timeout.
+    """
+    if not self.old_interface.EnableAdbRoot():
+      raise device_errors.CommandFailedError(
+          'Could not enable root.', device=str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetExternalStoragePath(self, timeout=None, retries=None):
+    """Get the device's path to its SD card.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The device's path to its SD card.
+
+    Raises:
+      CommandFailedError if the external storage path could not be determined.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    try:
+      return self.old_interface.GetExternalStorage()
+    except AssertionError as e:
+      raise device_errors.CommandFailedError(
+          str(e), device=str(self)), None, sys.exc_info()[2]
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WaitUntilFullyBooted(self, wifi=False, timeout=None, retries=None):
+    """Wait for the device to fully boot.
+
+    This means waiting for the device to boot, the package manager to be
+    available, and the SD card to be ready. It can optionally mean waiting
+    for wifi to come up, too.
+
+    Args:
+      wifi: A boolean indicating if we should wait for wifi to come up or not.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError if one of the component waits times out.
+      DeviceUnreachableError if the device becomes unresponsive.
+    """
+    self._WaitUntilFullyBootedImpl(wifi=wifi, timeout=timeout)
+
+  def _WaitUntilFullyBootedImpl(self, wifi=False, timeout=None):
+    """Implementation of WaitUntilFullyBooted.
+
+    This is split from WaitUntilFullyBooted to allow other DeviceUtils methods
+    to call WaitUntilFullyBooted without spawning a new timeout thread.
+
+    TODO(jbudorick) Remove the timeout parameter once this is no longer
+    implemented via AndroidCommands.
+
+    Args:
+      wifi: Same as for |WaitUntilFullyBooted|.
+      timeout: timeout in seconds
+
+    Raises:
+      Same as for |WaitUntilFullyBooted|.
+    """
+    if timeout is None:
+      timeout = self._default_timeout
+    self.old_interface.WaitForSystemBootCompleted(timeout)
+    self.old_interface.WaitForDevicePm()
+    self.old_interface.WaitForSdCardReady(timeout)
+    if wifi:
+      while not 'Wi-Fi is enabled' in (
+          self._RunShellCommandImpl('dumpsys wifi')):
+        time.sleep(1)
+
+  REBOOT_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
+  REBOOT_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      REBOOT_DEFAULT_TIMEOUT,
+      REBOOT_DEFAULT_RETRIES)
+  def Reboot(self, block=True, timeout=None, retries=None):
+    """Reboot the device.
+
+    Args:
+      block: A boolean indicating if we should wait for the reboot to complete.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.old_interface.Reboot()
+    if block:
+      self._WaitUntilFullyBootedImpl(timeout=timeout)
+
+  INSTALL_DEFAULT_TIMEOUT = 4 * _DEFAULT_TIMEOUT
+  INSTALL_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      INSTALL_DEFAULT_TIMEOUT,
+      INSTALL_DEFAULT_RETRIES)
+  def Install(self, apk_path, reinstall=False, timeout=None, retries=None):
+    """Install an APK.
+
+    Noop if an identical APK is already installed.
+
+    Args:
+      apk_path: A string containing the path to the APK to install.
+      reinstall: A boolean indicating if we should keep any existing app data.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the installation fails.
+      CommandTimeoutError if the installation times out.
+      DeviceUnreachableError on missing device.
+    """
+    package_name = apk_helper.GetPackageName(apk_path)
+    device_path = self.old_interface.GetApplicationPath(package_name)
+    if device_path is not None:
+      files_changed = self.old_interface.GetFilesChanged(
+          apk_path, device_path, ignore_filenames=True)
+      if len(files_changed) > 0:
+        should_install = True
+        if not reinstall:
+          out = self.old_interface.Uninstall(package_name)
+          for line in out.splitlines():
+            if 'Failure' in line:
+              raise device_errors.CommandFailedError(
+                  line.strip(), device=str(self))
+      else:
+        should_install = False
+    else:
+      should_install = True
+    if should_install:
+      try:
+        out = self.old_interface.Install(apk_path, reinstall=reinstall)
+        for line in out.splitlines():
+          if 'Failure' in line:
+            raise device_errors.CommandFailedError(
+                line.strip(), device=str(self))
+      except AssertionError as e:
+        raise device_errors.CommandFailedError(
+            str(e), device=str(self)), None, sys.exc_info()[2]
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def RunShellCommand(self, cmd, check_return=False, as_root=False,
+                      timeout=None, retries=None):
+    """Run an ADB shell command.
+
+    TODO(jbudorick) Switch the default value of check_return to True after
+    AndroidCommands is gone.
+
+    Args:
+      cmd: A list containing the command to run on the device and any arguments.
+      check_return: A boolean indicating whether or not the return code should
+                    be checked.
+      as_root: A boolean indicating whether the shell command should be run
+               with root privileges.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The output of the command.
+
+    Raises:
+      CommandFailedError if check_return is True and the return code is nozero.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self._RunShellCommandImpl(cmd, check_return=check_return,
+                                     as_root=as_root, timeout=timeout)
+
+  def _RunShellCommandImpl(self, cmd, check_return=False, as_root=False,
+                           timeout=None):
+    """Implementation of RunShellCommand.
+
+    This is split from RunShellCommand to allow other DeviceUtils methods to
+    call RunShellCommand without spawning a new timeout thread.
+
+    TODO(jbudorick) Remove the timeout parameter once this is no longer
+    implemented via AndroidCommands.
+
+    Args:
+      cmd: Same as for |RunShellCommand|.
+      check_return: Same as for |RunShellCommand|.
+      as_root: Same as for |RunShellCommand|.
+      timeout: timeout in seconds
+
+    Raises:
+      Same as for |RunShellCommand|.
+
+    Returns:
+      Same as for |RunShellCommand|.
+    """
+    if isinstance(cmd, list):
+      cmd = ' '.join(cmd)
+    if as_root and not self.HasRoot():
+      cmd = 'su -c %s' % cmd
+    if check_return:
+      code, output = self.old_interface.GetShellCommandStatusAndOutput(
+          cmd, timeout_time=timeout)
+      if int(code) != 0:
+        raise device_errors.AdbCommandFailedError(
+            cmd.split(), 'Nonzero exit code (%d)' % code, device=str(self))
+    else:
+      output = self.old_interface.RunShellCommand(cmd, timeout_time=timeout)
+    return output
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def KillAll(self, process_name, signum=9, as_root=False, blocking=False,
+              timeout=None, retries=None):
+    """Kill all processes with the given name on the device.
+
+    Args:
+      process_name: A string containing the name of the process to kill.
+      signum: An integer containing the signal number to send to kill. Defaults
+              to 9 (SIGKILL).
+      as_root: A boolean indicating whether the kill should be executed with
+               root privileges.
+      blocking: A boolean indicating whether we should wait until all processes
+                with the given |process_name| are dead.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if no process was killed.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    pids = self._GetPidsImpl(process_name)
+    if not pids:
+      raise device_errors.CommandFailedError(
+          'No process "%s"' % process_name, device=str(self))
+
+    cmd = 'kill -%d %s' % (signum, ' '.join(pids.values()))
+    self._RunShellCommandImpl(cmd, as_root=as_root)
+
+    if blocking:
+      wait_period = 0.1
+      while self._GetPidsImpl(process_name):
+        time.sleep(wait_period)
+
+    return len(pids)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def StartActivity(self, intent, blocking=False, trace_file_name=None,
+                    force_stop=False, timeout=None, retries=None):
+    """Start package's activity on the device.
+
+    Args:
+      intent: An Intent to send.
+      blocking: A boolean indicating whether we should wait for the activity to
+                finish launching.
+      trace_file_name: If present, a string that both indicates that we want to
+                       profile the activity and contains the path to which the
+                       trace should be saved.
+      force_stop: A boolean indicating whether we should stop the activity
+                  before starting it.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the activity could not be started.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    single_category = (intent.category[0] if isinstance(intent.category, list)
+                                          else intent.category)
+    output = self.old_interface.StartActivity(
+        intent.package, intent.activity, wait_for_completion=blocking,
+        action=intent.action, category=single_category, data=intent.data,
+        extras=intent.extras, trace_file_name=trace_file_name,
+        force_stop=force_stop, flags=intent.flags)
+    for l in output:
+      if l.startswith('Error:'):
+        raise device_errors.CommandFailedError(l, device=str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def BroadcastIntent(self, intent, timeout=None, retries=None):
+    """Send a broadcast intent.
+
+    Args:
+      intent: An Intent to broadcast.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    package, old_intent = intent.action.rsplit('.', 1)
+    if intent.extras is None:
+      args = []
+    else:
+      args = ['-e %s%s' % (k, ' "%s"' % v if v else '')
+              for k, v in intent.extras.items() if len(k) > 0]
+    self.old_interface.BroadcastIntent(package, old_intent, *args)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GoHome(self, timeout=None, retries=None):
+    """Return to the home screen.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.old_interface.GoHome()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ForceStop(self, package, timeout=None, retries=None):
+    """Close the application.
+
+    Args:
+      package: A string containing the name of the package to stop.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.old_interface.CloseApplication(package)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ClearApplicationState(self, package, timeout=None, retries=None):
+    """Clear all state for the given package.
+
+    Args:
+      package: A string containing the name of the package to stop.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.old_interface.ClearApplicationState(package)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SendKeyEvent(self, keycode, timeout=None, retries=None):
+    """Sends a keycode to the device.
+
+    See: http://developer.android.com/reference/android/view/KeyEvent.html
+
+    Args:
+      keycode: A integer keycode to send to the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.old_interface.SendKeyEvent(keycode)
+
+  PUSH_CHANGED_FILES_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
+  PUSH_CHANGED_FILES_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      PUSH_CHANGED_FILES_DEFAULT_TIMEOUT,
+      PUSH_CHANGED_FILES_DEFAULT_RETRIES)
+  def PushChangedFiles(self, host_path, device_path, timeout=None,
+                       retries=None):
+    """Push files to the device, skipping files that don't need updating.
+
+    Args:
+      host_path: A string containing the absolute path to the file or directory
+                 on the host that should be minimally pushed to the device.
+      device_path: A string containing the absolute path of the destination on
+                   the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.old_interface.PushIfNeeded(host_path, device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def FileExists(self, device_path, timeout=None, retries=None):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file on the
+                   device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the file exists on the device, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self._FileExistsImpl(device_path)
+
+  def _FileExistsImpl(self, device_path):
+    """Implementation of FileExists.
+
+    This is split from FileExists to allow other DeviceUtils methods to call
+    FileExists without spawning a new timeout thread.
+
+    Args:
+      device_path: Same as for |FileExists|.
+
+    Returns:
+      True if the file exists on the device, False otherwise.
+
+    Raises:
+      Same as for |FileExists|.
+    """
+    return self.old_interface.FileExistsOnDevice(device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def PullFile(self, device_path, host_path, timeout=None, retries=None):
+    """Pull a file from the device.
+
+    Args:
+      device_path: A string containing the absolute path of the file to pull
+                   from the device.
+      host_path: A string containing the absolute path of the destination on
+                 the host.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+    """
+    try:
+      self.old_interface.PullFileFromDevice(device_path, host_path)
+    except AssertionError as e:
+      raise device_errors.CommandFailedError(
+          str(e), device=str(self)), None, sys.exc_info()[2]
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ReadFile(self, device_path, as_root=False, timeout=None, retries=None):
+    """Reads the contents of a file from the device.
+
+    Args:
+      device_path: A string containing the absolute path of the file to read
+                   from the device.
+      as_root: A boolean indicating whether the read should be executed with
+               root privileges.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The contents of the file at |device_path| as a list of lines.
+
+    Raises:
+      CommandFailedError if the file can't be read.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    # TODO(jbudorick) Evaluate whether we awant to return a list of lines after
+    # the implementation switch, and if file not found should raise exception.
+    if as_root:
+      if not self.old_interface.CanAccessProtectedFileContents():
+        raise device_errors.CommandFailedError(
+          'Cannot read from %s with root privileges.' % device_path)
+      return self.old_interface.GetProtectedFileContents(device_path)
+    else:
+      return self.old_interface.GetFileContents(device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WriteFile(self, device_path, contents, as_root=False, timeout=None,
+                retries=None):
+    """Writes |contents| to a file on the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file to write
+                   on the device.
+      contents: A string containing the data to write to the device.
+      as_root: A boolean indicating whether the write should be executed with
+               root privileges.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the file could not be written on the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if as_root:
+      if not self.old_interface.CanAccessProtectedFileContents():
+        raise device_errors.CommandFailedError(
+            'Cannot write to %s with root privileges.' % device_path)
+      self.old_interface.SetProtectedFileContents(device_path, contents)
+    else:
+      self.old_interface.SetFileContents(device_path, contents)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WriteTextFile(self, device_path, text, as_root=False, timeout=None,
+                    retries=None):
+    """Writes |text| to a file on the device.
+
+    Assuming that |text| is a small string, this is typically more efficient
+    than |WriteFile|, as no files are pushed into the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file to write
+                   on the device.
+      text: A short string of text to write to the file on the device.
+      as_root: A boolean indicating whether the write should be executed with
+               root privileges.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the file could not be written on the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self._RunShellCommandImpl('echo {1} > {0}'.format(device_path,
+        pipes.quote(text)), check_return=True, as_root=as_root)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def Ls(self, device_path, timeout=None, retries=None):
+    """Lists the contents of a directory on the device.
+
+    Args:
+      device_path: A string containing the path of the directory on the device
+                   to list.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The contents of the directory specified by |device_path|.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.old_interface.ListPathContents(device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetJavaAsserts(self, enabled, timeout=None, retries=None):
+    """Enables or disables Java asserts.
+
+    Args:
+      enabled: A boolean indicating whether Java asserts should be enabled
+               or disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device-side property changed and a restart is required as a
+      result, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    return self.old_interface.SetJavaAssertsEnabled(enabled)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetProp(self, property_name, timeout=None, retries=None):
+    """Gets a property from the device.
+
+    Args:
+      property_name: A string containing the name of the property to get from
+                     the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The value of the device's |property_name| property.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    return self.old_interface.system_properties[property_name]
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetProp(self, property_name, value, timeout=None, retries=None):
+    """Sets a property on the device.
+
+    Args:
+      property_name: A string containing the name of the property to set on
+                     the device.
+      value: A string containing the value to set to the property on the
+             device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    self.old_interface.system_properties[property_name] = value
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPids(self, process_name, timeout=None, retries=None):
+    """Returns the PIDs of processes with the given name.
+
+    Note that the |process_name| is often the package name.
+
+    Args:
+      process_name: A string containing the process name to get the PIDs for.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A dict mapping process name to PID for each process that contained the
+      provided |process_name|.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self._GetPidsImpl(process_name)
+
+  def _GetPidsImpl(self, process_name):
+    """Implementation of GetPids.
+
+    This is split from GetPids to allow other DeviceUtils methods to call
+    GetPids without spawning a new timeout thread.
+
+    Args:
+      process_name: A string containing the process name to get the PIDs for.
+
+    Returns:
+      A dict mapping process name to PID for each process that contained the
+      provided |process_name|.
+
+    Raises:
+      DeviceUnreachableError on missing device.
+    """
+    procs_pids = {}
+    for line in self._RunShellCommandImpl('ps'):
+      try:
+        ps_data = line.split()
+        if process_name in ps_data[-1]:
+          procs_pids[ps_data[-1]] = ps_data[1]
+      except IndexError:
+        pass
+    return procs_pids
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def TakeScreenshot(self, host_path=None, timeout=None, retries=None):
+    """Takes a screenshot of the device.
+
+    Args:
+      host_path: A string containing the path on the host to save the
+                 screenshot to. If None, a file name will be generated.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The name of the file on the host to which the screenshot was saved.
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.old_interface.TakeScreenshot(host_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetIOStats(self, timeout=None, retries=None):
+    """Gets cumulative disk IO stats since boot for all processes.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A dict containing |num_reads|, |num_writes|, |read_ms|, and |write_ms|.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.old_interface.GetIoStats()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetMemoryUsageForPid(self, pid, timeout=None, retries=None):
+    """Gets the memory usage for the given PID.
+
+    Args:
+      pid: PID of the process.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A 2-tuple containing:
+        - A dict containing the overall memory usage statistics for the PID.
+        - A dict containing memory usage statistics broken down by mapping.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    return self.old_interface.GetMemoryUsageForPid(pid)
+
+  def __str__(self):
+    """Returns the device serial."""
+    s = self.old_interface.GetDevice()
+    if not s:
+      s = self.old_interface.Adb().GetSerialNumber()
+      if s == 'unknown':
+        raise device_errors.NoDevicesError()
+    return s
+
+  @staticmethod
+  def parallel(devices=None, async=False):
+    """Creates a Parallelizer to operate over the provided list of devices.
+
+    If |devices| is either |None| or an empty list, the Parallelizer will
+    operate over all attached devices.
+
+    Args:
+      devices: A list of either DeviceUtils instances or objects from
+               from which DeviceUtils instances can be constructed. If None,
+               all attached devices will be used.
+      async: If true, returns a Parallelizer that runs operations
+             asynchronously.
+
+    Returns:
+      A Parallelizer operating over |devices|.
+    """
+    if not devices or len(devices) == 0:
+      devices = pylib.android_commands.GetAttachedDevices()
+    parallelizer_type = (parallelizer.Parallelizer if async
+                         else parallelizer.SyncParallelizer)
+    return parallelizer_type([
+        d if isinstance(d, DeviceUtils) else DeviceUtils(d)
+        for d in devices])
+
diff --git a/build/android/pylib/device/device_utils_test.py b/build/android/pylib/device/device_utils_test.py
new file mode 100755
index 0000000..42dc5b2
--- /dev/null
+++ b/build/android/pylib/device/device_utils_test.py
@@ -0,0 +1,1529 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_utils.py (mostly DeviceUtils).
+"""
+
+# pylint: disable=C0321
+# pylint: disable=W0212
+# pylint: disable=W0613
+
+import collections
+import datetime
+import logging
+import os
+import re
+import signal
+import sys
+import unittest
+
+from pylib import android_commands
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.device import intent
+
+# RunCommand from third_party/android_testrunner/run_command.py is mocked
+# below, so its path needs to be in sys.path.
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class DeviceUtilsTest(unittest.TestCase):
+
+  def testInitWithStr(self):
+    serial_as_str = str('0123456789abcdef')
+    d = device_utils.DeviceUtils('0123456789abcdef')
+    self.assertEqual(serial_as_str, d.old_interface.GetDevice())
+
+  def testInitWithUnicode(self):
+    serial_as_unicode = unicode('fedcba9876543210')
+    d = device_utils.DeviceUtils(serial_as_unicode)
+    self.assertEqual(serial_as_unicode, d.old_interface.GetDevice())
+
+  def testInitWithAdbWrapper(self):
+    serial = '123456789abcdef0'
+    a = adb_wrapper.AdbWrapper(serial)
+    d = device_utils.DeviceUtils(a)
+    self.assertEqual(serial, d.old_interface.GetDevice())
+
+  def testInitWithAndroidCommands(self):
+    serial = '0fedcba987654321'
+    a = android_commands.AndroidCommands(device=serial)
+    d = device_utils.DeviceUtils(a)
+    self.assertEqual(serial, d.old_interface.GetDevice())
+
+  def testInitWithNone(self):
+    d = device_utils.DeviceUtils(None)
+    self.assertIsNone(d.old_interface.GetDevice())
+
+
+class _PatchedFunction(object):
+  def __init__(self, patched=None, mocked=None):
+    self.patched = patched
+    self.mocked = mocked
+
+
+class MockFileSystem(object):
+
+  @staticmethod
+  def osStatResult(
+      st_mode=None, st_ino=None, st_dev=None, st_nlink=None, st_uid=None,
+      st_gid=None, st_size=None, st_atime=None, st_mtime=None, st_ctime=None):
+    MockOSStatResult = collections.namedtuple('MockOSStatResult', [
+        'st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid', 'st_gid',
+        'st_size', 'st_atime', 'st_mtime', 'st_ctime'])
+    return MockOSStatResult(st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid,
+                            st_size, st_atime, st_mtime, st_ctime)
+
+  MOCKED_FUNCTIONS = [
+    ('os.path.abspath', ''),
+    ('os.path.dirname', ''),
+    ('os.path.exists', False),
+    ('os.path.getsize', 0),
+    ('os.path.isdir', False),
+    ('os.stat', osStatResult.__func__()),
+    ('os.walk', []),
+  ]
+
+  def _get(self, mocked, path, default_val):
+    if self._verbose:
+      logging.debug('%s(%s)' % (mocked, path))
+    return (self.mock_file_info[path][mocked]
+            if path in self.mock_file_info
+            else default_val)
+
+  def _patched(self, target, default_val=None):
+    r = lambda f: self._get(target, f, default_val)
+    return _PatchedFunction(patched=mock.patch(target, side_effect=r))
+
+  def __init__(self, verbose=False):
+    self.mock_file_info = {}
+    self._patched_functions = [
+        self._patched(m, d) for m, d in type(self).MOCKED_FUNCTIONS]
+    self._verbose = verbose
+
+  def addMockFile(self, path, **kw):
+    self._addMockThing(path, False, **kw)
+
+  def addMockDirectory(self, path, **kw):
+    self._addMockThing(path, True, **kw)
+
+  def _addMockThing(self, path, is_dir, size=0, stat=None, walk=None):
+    if stat is None:
+      stat = self.osStatResult()
+    if walk is None:
+      walk = []
+    self.mock_file_info[path] = {
+      'os.path.abspath': path,
+      'os.path.dirname': '/' + '/'.join(path.strip('/').split('/')[:-1]),
+      'os.path.exists': True,
+      'os.path.isdir': is_dir,
+      'os.path.getsize': size,
+      'os.stat': stat,
+      'os.walk': walk,
+    }
+
+  def __enter__(self):
+    for p in self._patched_functions:
+      p.mocked = p.patched.__enter__()
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    for p in self._patched_functions:
+      p.patched.__exit__()
+
+
+class DeviceUtilsOldImplTest(unittest.TestCase):
+
+  class AndroidCommandsCalls(object):
+
+    def __init__(self, test_case, cmd_ret, comp):
+      self._cmds = cmd_ret
+      self._comp = comp
+      self._run_command = _PatchedFunction()
+      self._test_case = test_case
+      self._total_received = 0
+
+    def __enter__(self):
+      self._run_command.patched = mock.patch(
+          'run_command.RunCommand',
+          side_effect=lambda c, **kw: self._ret(c))
+      self._run_command.mocked = self._run_command.patched.__enter__()
+
+    def _ret(self, actual_cmd):
+      if sys.exc_info()[0] is None:
+        on_failure_fmt = ('\n'
+                          '  received command: %s\n'
+                          '  expected command: %s')
+        self._test_case.assertGreater(
+            len(self._cmds), self._total_received,
+            msg=on_failure_fmt % (actual_cmd, None))
+        expected_cmd, ret = self._cmds[self._total_received]
+        self._total_received += 1
+        self._test_case.assertTrue(
+            self._comp(expected_cmd, actual_cmd),
+            msg=on_failure_fmt % (actual_cmd, expected_cmd))
+        return ret
+      return ''
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+      self._run_command.patched.__exit__(exc_type, exc_val, exc_tb)
+      if exc_type is None:
+        on_failure = "adb commands don't match.\nExpected:%s\nActual:%s" % (
+            ''.join('\n  %s' % c for c, _ in self._cmds),
+            ''.join('\n  %s' % a[0]
+                    for _, a, kw in self._run_command.mocked.mock_calls))
+        self._test_case.assertEqual(
+          len(self._cmds), len(self._run_command.mocked.mock_calls),
+          msg=on_failure)
+        for (expected_cmd, _r), (_n, actual_args, actual_kwargs) in zip(
+            self._cmds, self._run_command.mocked.mock_calls):
+          self._test_case.assertEqual(1, len(actual_args), msg=on_failure)
+          self._test_case.assertTrue(self._comp(expected_cmd, actual_args[0]),
+                                     msg=on_failure)
+          self._test_case.assertTrue('timeout_time' in actual_kwargs,
+                                     msg=on_failure)
+          self._test_case.assertTrue('retry_count' in actual_kwargs,
+                                     msg=on_failure)
+
+  def assertNoAdbCalls(self):
+    return type(self).AndroidCommandsCalls(self, [], str.__eq__)
+
+  def assertCalls(self, cmd, ret, comp=str.__eq__):
+    return type(self).AndroidCommandsCalls(self, [(cmd, ret)], comp)
+
+  def assertCallsSequence(self, cmd_ret, comp=str.__eq__):
+    return type(self).AndroidCommandsCalls(self, cmd_ret, comp)
+
+  def setUp(self):
+    self.device = device_utils.DeviceUtils(
+        '0123456789abcdef', default_timeout=1, default_retries=0)
+
+
+class DeviceUtilsIsOnlineTest(DeviceUtilsOldImplTest):
+
+  def testIsOnline_true(self):
+    with self.assertCalls('adb -s 0123456789abcdef devices',
+                                 '00123456789abcdef  device\r\n'):
+      self.assertTrue(self.device.IsOnline())
+
+  def testIsOnline_false(self):
+    with self.assertCalls('adb -s 0123456789abcdef devices', '\r\n'):
+      self.assertFalse(self.device.IsOnline())
+
+
+class DeviceUtilsHasRootTest(DeviceUtilsOldImplTest):
+
+  def testHasRoot_true(self):
+    with self.assertCalls("adb -s 0123456789abcdef shell 'ls /root'",
+                                 'foo\r\n'):
+      self.assertTrue(self.device.HasRoot())
+
+  def testHasRoot_false(self):
+    with self.assertCalls("adb -s 0123456789abcdef shell 'ls /root'",
+                                 'Permission denied\r\n'):
+      self.assertFalse(self.device.HasRoot())
+
+
+class DeviceUtilsEnableRootTest(DeviceUtilsOldImplTest):
+
+  def testEnableRoot_succeeds(self):
+    with self.assertCallsSequence([
+        ('adb -s 0123456789abcdef shell getprop ro.build.type',
+         'userdebug\r\n'),
+        ('adb -s 0123456789abcdef root', 'restarting adbd as root\r\n'),
+        ('adb -s 0123456789abcdef wait-for-device', ''),
+        ('adb -s 0123456789abcdef wait-for-device', '')]):
+      self.device.EnableRoot()
+
+  def testEnableRoot_userBuild(self):
+    with self.assertCallsSequence([
+        ('adb -s 0123456789abcdef shell getprop ro.build.type', 'user\r\n')]):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.EnableRoot()
+
+  def testEnableRoot_rootFails(self):
+    with self.assertCallsSequence([
+        ('adb -s 0123456789abcdef shell getprop ro.build.type',
+         'userdebug\r\n'),
+        ('adb -s 0123456789abcdef root', 'no\r\n'),
+        ('adb -s 0123456789abcdef wait-for-device', '')]):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.EnableRoot()
+
+
+class DeviceUtilsGetExternalStoragePathTest(DeviceUtilsOldImplTest):
+
+  def testGetExternalStoragePath_succeeds(self):
+    fakeStoragePath = '/fake/storage/path'
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'",
+        '%s\r\n' % fakeStoragePath):
+      self.assertEquals(fakeStoragePath,
+                        self.device.GetExternalStoragePath())
+
+  def testGetExternalStoragePath_fails(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'", '\r\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.GetExternalStoragePath()
+
+
+class DeviceUtilsWaitUntilFullyBootedTest(DeviceUtilsOldImplTest):
+
+  def testWaitUntilFullyBooted_succeedsNoWifi(self):
+    with self.assertCallsSequence([
+        # AndroidCommands.WaitForSystemBootCompleted
+        ('adb -s 0123456789abcdef wait-for-device', ''),
+        ('adb -s 0123456789abcdef shell getprop sys.boot_completed', '1\r\n'),
+        # AndroidCommands.WaitForDevicePm
+        ('adb -s 0123456789abcdef wait-for-device', ''),
+        ('adb -s 0123456789abcdef shell pm path android',
+         'package:this.is.a.test.package'),
+        # AndroidCommands.WaitForSdCardReady
+        ("adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'",
+         '/fake/storage/path'),
+        ("adb -s 0123456789abcdef shell 'ls /fake/storage/path'",
+         'nothing\r\n')
+        ]):
+      self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_succeedsWithWifi(self):
+    with self.assertCallsSequence([
+        # AndroidCommands.WaitForSystemBootCompleted
+        ('adb -s 0123456789abcdef wait-for-device', ''),
+        ('adb -s 0123456789abcdef shell getprop sys.boot_completed', '1\r\n'),
+        # AndroidCommands.WaitForDevicePm
+        ('adb -s 0123456789abcdef wait-for-device', ''),
+        ('adb -s 0123456789abcdef shell pm path android',
+         'package:this.is.a.test.package'),
+        # AndroidCommands.WaitForSdCardReady
+        ("adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'",
+         '/fake/storage/path'),
+        ("adb -s 0123456789abcdef shell 'ls /fake/storage/path'",
+         'nothing\r\n'),
+        # wait for wifi
+        ("adb -s 0123456789abcdef shell 'dumpsys wifi'", 'Wi-Fi is enabled')]):
+      self.device.WaitUntilFullyBooted(wifi=True)
+
+  def testWaitUntilFullyBooted_bootFails(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+          # AndroidCommands.WaitForSystemBootCompleted
+          ('adb -s 0123456789abcdef wait-for-device', ''),
+          ('adb -s 0123456789abcdef shell getprop sys.boot_completed',
+           '0\r\n')]):
+        with self.assertRaises(device_errors.CommandTimeoutError):
+          self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_devicePmFails(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+          # AndroidCommands.WaitForSystemBootCompleted
+          ('adb -s 0123456789abcdef wait-for-device', ''),
+          ('adb -s 0123456789abcdef shell getprop sys.boot_completed',
+           '1\r\n')]
+          # AndroidCommands.WaitForDevicePm
+        + 3 * ([('adb -s 0123456789abcdef wait-for-device', '')]
+             + 24 * [('adb -s 0123456789abcdef shell pm path android', '\r\n')]
+             + [("adb -s 0123456789abcdef shell 'stop'", '\r\n'),
+                ("adb -s 0123456789abcdef shell 'start'", '\r\n')])):
+        with self.assertRaises(device_errors.CommandTimeoutError):
+          self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_sdCardReadyFails_noPath(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+          # AndroidCommands.WaitForSystemBootCompleted
+          ('adb -s 0123456789abcdef wait-for-device', ''),
+          ('adb -s 0123456789abcdef shell getprop sys.boot_completed',
+           '1\r\n'),
+          # AndroidCommands.WaitForDevicePm
+          ('adb -s 0123456789abcdef wait-for-device', ''),
+          ('adb -s 0123456789abcdef shell pm path android',
+           'package:this.is.a.test.package'),
+          ("adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'", '\r\n')]):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_sdCardReadyFails_emptyPath(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+          # AndroidCommands.WaitForSystemBootCompleted
+          ('adb -s 0123456789abcdef wait-for-device', ''),
+          ('adb -s 0123456789abcdef shell getprop sys.boot_completed',
+           '1\r\n'),
+          # AndroidCommands.WaitForDevicePm
+          ('adb -s 0123456789abcdef wait-for-device', ''),
+          ('adb -s 0123456789abcdef shell pm path android',
+           'package:this.is.a.test.package'),
+          ("adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'",
+           '/fake/storage/path\r\n'),
+          ("adb -s 0123456789abcdef shell 'ls /fake/storage/path'", '')]):
+        with self.assertRaises(device_errors.CommandTimeoutError):
+          self.device.WaitUntilFullyBooted(wifi=False)
+
+
+class DeviceUtilsRebootTest(DeviceUtilsOldImplTest):
+
+  def testReboot_nonBlocking(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+            ('adb -s 0123456789abcdef reboot', ''),
+            ('adb -s 0123456789abcdef devices', 'unknown\r\n'),
+            ('adb -s 0123456789abcdef wait-for-device', ''),
+            ('adb -s 0123456789abcdef shell pm path android',
+             'package:this.is.a.test.package'),
+            ("adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'",
+             '/fake/storage/path\r\n'),
+            ("adb -s 0123456789abcdef shell 'ls /fake/storage/path'",
+             'nothing\r\n')]):
+        self.device.Reboot(block=False)
+
+  def testReboot_blocking(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+            ('adb -s 0123456789abcdef reboot', ''),
+            ('adb -s 0123456789abcdef devices', 'unknown\r\n'),
+            ('adb -s 0123456789abcdef wait-for-device', ''),
+            ('adb -s 0123456789abcdef shell pm path android',
+             'package:this.is.a.test.package'),
+            ("adb -s 0123456789abcdef shell 'echo $EXTERNAL_STORAGE'",
+             '/fake/storage/path\r\n'),
+            ("adb -s 0123456789abcdef shell 'ls /fake/storage/path'",
+             'nothing\r\n'),
+            ('adb -s 0123456789abcdef wait-for-device', ''),
+            ('adb -s 0123456789abcdef shell getprop sys.boot_completed',
+             '1\r\n'),
+            ('adb -s 0123456789abcdef wait-for-device', ''),
+            ('adb -s 0123456789abcdef shell pm path android',
+             'package:this.is.a.test.package'),
+            ("adb -s 0123456789abcdef shell 'ls /fake/storage/path'",
+             'nothing\r\n')]):
+        self.device.Reboot(block=True)
+
+
+class DeviceUtilsInstallTest(DeviceUtilsOldImplTest):
+
+  def testInstall_noPriorInstall(self):
+    with mock.patch('os.path.isfile', return_value=True), (
+         mock.patch('pylib.utils.apk_helper.GetPackageName',
+                    return_value='this.is.a.test.package')):
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'pm path this.is.a.test.package'",
+           ''),
+          ("adb -s 0123456789abcdef install /fake/test/app.apk",
+           'Success\r\n')]):
+        self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_differentPriorInstall(self):
+    def mockGetFilesChanged(host_path, device_path, ignore_filenames):
+      return [(host_path, device_path)]
+
+    # Pylint raises a false positive "operator not preceded by a space"
+    # warning below.
+    # pylint: disable=C0322
+    with mock.patch('os.path.isfile', return_value=True), (
+         mock.patch('os.path.exists', return_value=True)), (
+         mock.patch('pylib.utils.apk_helper.GetPackageName',
+                    return_value='this.is.a.test.package')), (
+         mock.patch('pylib.constants.GetOutDirectory',
+                    return_value='/fake/test/out')), (
+         mock.patch('pylib.android_commands.AndroidCommands.GetFilesChanged',
+                    side_effect=mockGetFilesChanged)):
+    # pylint: enable=C0322
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'pm path this.is.a.test.package'",
+           'package:/fake/data/app/this.is.a.test.package.apk\r\n'),
+          # GetFilesChanged is mocked, so its adb calls are omitted.
+          ('adb -s 0123456789abcdef uninstall this.is.a.test.package',
+           'Success\r\n'),
+          ('adb -s 0123456789abcdef install /fake/test/app.apk',
+           'Success\r\n')]):
+        self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_differentPriorInstall_reinstall(self):
+    def mockGetFilesChanged(host_path, device_path, ignore_filenames):
+      return [(host_path, device_path)]
+
+    # Pylint raises a false positive "operator not preceded by a space"
+    # warning below.
+    # pylint: disable=C0322
+    with mock.patch('os.path.isfile', return_value=True), (
+         mock.patch('pylib.utils.apk_helper.GetPackageName',
+                    return_value='this.is.a.test.package')), (
+         mock.patch('pylib.constants.GetOutDirectory',
+                    return_value='/fake/test/out')), (
+         mock.patch('pylib.android_commands.AndroidCommands.GetFilesChanged',
+                    side_effect=mockGetFilesChanged)):
+    # pylint: enable=C0322
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'pm path this.is.a.test.package'",
+           'package:/fake/data/app/this.is.a.test.package.apk\r\n'),
+          # GetFilesChanged is mocked, so its adb calls are omitted.
+          ('adb -s 0123456789abcdef install -r /fake/test/app.apk',
+           'Success\r\n')]):
+        self.device.Install('/fake/test/app.apk', reinstall=True, retries=0)
+
+  def testInstall_identicalPriorInstall(self):
+    def mockGetFilesChanged(host_path, device_path, ignore_filenames):
+      return []
+
+    with mock.patch('pylib.utils.apk_helper.GetPackageName',
+                    return_value='this.is.a.test.package'), (
+         mock.patch('pylib.android_commands.AndroidCommands.GetFilesChanged',
+                    side_effect=mockGetFilesChanged)):
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'pm path this.is.a.test.package'",
+           'package:/fake/data/app/this.is.a.test.package.apk\r\n')
+          # GetFilesChanged is mocked, so its adb calls are omitted.
+          ]):
+        self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_fails(self):
+    with mock.patch('os.path.isfile', return_value=True), (
+         mock.patch('pylib.utils.apk_helper.GetPackageName',
+                    return_value='this.is.a.test.package')):
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'pm path this.is.a.test.package'",
+           ''),
+          ("adb -s 0123456789abcdef install /fake/test/app.apk",
+           'Failure\r\n')]):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.device.Install('/fake/test/app.apk', retries=0)
+
+
+class DeviceUtilsRunShellCommandTest(DeviceUtilsOldImplTest):
+
+  def testRunShellCommand_commandAsList(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'pm list packages'",
+        'pacakge:android\r\n'):
+      self.device.RunShellCommand(['pm', 'list', 'packages'])
+
+  def testRunShellCommand_commandAsString(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'dumpsys wifi'",
+        'Wi-Fi is enabled\r\n'):
+      self.device.RunShellCommand('dumpsys wifi')
+
+  def testRunShellCommand_withSu(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls /root'", 'Permission denied\r\n'),
+        ("adb -s 0123456789abcdef shell 'su -c setprop service.adb.root 0'",
+         '')]):
+      self.device.RunShellCommand('setprop service.adb.root 0', as_root=True)
+
+  def testRunShellCommand_withRoot(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls /root'", 'hello\r\nworld\r\n'),
+        ("adb -s 0123456789abcdef shell 'setprop service.adb.root 0'", '')]):
+      self.device.RunShellCommand('setprop service.adb.root 0', as_root=True)
+
+  def testRunShellCommand_checkReturn_success(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'echo $ANDROID_DATA; echo %$?'",
+        '/data\r\n%0\r\n'):
+      self.device.RunShellCommand('echo $ANDROID_DATA', check_return=True)
+
+  def testRunShellCommand_checkReturn_failure(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'echo $ANDROID_DATA; echo %$?'",
+        '\r\n%1\r\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.RunShellCommand('echo $ANDROID_DATA', check_return=True)
+
+
+class DeviceUtilsKillAllTest(DeviceUtilsOldImplTest):
+
+  def testKillAll_noMatchingProcesses(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'ps'",
+        'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.KillAll('test_process')
+
+  def testKillAll_nonblocking(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ps'",
+         'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+         'u0_a1  1234  174   123456 54321 ffffffff 456789ab '
+              'this.is.a.test.process\r\n'),
+        ("adb -s 0123456789abcdef shell 'kill -9 1234'", '')]):
+      self.assertEquals(1,
+          self.device.KillAll('this.is.a.test.process', blocking=False))
+
+  def testKillAll_blocking(self):
+    with mock.patch('time.sleep'):
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'ps'",
+           'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+           'u0_a1  1234  174   123456 54321 ffffffff 456789ab '
+                'this.is.a.test.process\r\n'),
+          ("adb -s 0123456789abcdef shell 'kill -9 1234'", ''),
+          ("adb -s 0123456789abcdef shell 'ps'",
+           'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+           'u0_a1  1234  174   123456 54321 ffffffff 456789ab '
+                'this.is.a.test.process\r\n'),
+          ("adb -s 0123456789abcdef shell 'ps'",
+           'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n')]):
+        self.assertEquals(1,
+            self.device.KillAll('this.is.a.test.process', blocking=True))
+
+  def testKillAll_root(self):
+    with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'ps'",
+           'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+           'u0_a1  1234  174   123456 54321 ffffffff 456789ab '
+                'this.is.a.test.process\r\n'),
+          ("adb -s 0123456789abcdef shell 'ls /root'", 'Permission denied\r\n'),
+          ("adb -s 0123456789abcdef shell 'su -c kill -9 1234'", '')]):
+      self.assertEquals(1,
+          self.device.KillAll('this.is.a.test.process', as_root=True))
+
+  def testKillAll_sigterm(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ps'",
+         'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+         'u0_a1  1234  174   123456 54321 ffffffff 456789ab '
+              'this.is.a.test.process\r\n'),
+        ("adb -s 0123456789abcdef shell 'kill -15 1234'", '')]):
+      self.assertEquals(1,
+          self.device.KillAll('this.is.a.test.process', signum=signal.SIGTERM))
+
+
+class DeviceUtilsStartActivityTest(DeviceUtilsOldImplTest):
+
+  def testStartActivity_actionOnly(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_success(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_failure(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main'",
+        'Error: Failed to start test activity'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.StartActivity(test_intent)
+
+  def testStartActivity_blocking(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-W "
+            "-n this.is.a.test.package/.Main'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent, blocking=True)
+
+  def testStartActivity_withCategory(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                category='android.intent.category.HOME')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-c android.intent.category.HOME "
+            "-n this.is.a.test.package/.Main'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withMultipleCategories(self):
+    # The new implementation will start the activity with all provided
+    # categories. The old one only uses the first category.
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                category=['android.intent.category.HOME',
+                                          'android.intent.category.BROWSABLE'])
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-c android.intent.category.HOME "
+            "-n this.is.a.test.package/.Main'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withData(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                data='http://www.google.com/')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main "
+            "-d \"http://www.google.com/\"'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withStringExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': 'test'})
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main "
+            "--es foo test'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withBoolExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': True})
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main "
+            "--ez foo True'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withIntExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': 123})
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main "
+            "--ei foo 123'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withTraceFile(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main "
+            "--start-profiler test_trace_file.out'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent,
+                                trace_file_name='test_trace_file.out')
+
+  def testStartActivity_withForceStop(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-S "
+            "-n this.is.a.test.package/.Main'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent, force_stop=True)
+
+  def testStartActivity_withFlags(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                flags='0x10000000')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-a android.intent.action.VIEW "
+            "-n this.is.a.test.package/.Main "
+            "-f 0x10000000'",
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+
+class DeviceUtilsBroadcastIntentTest(DeviceUtilsOldImplTest):
+
+  def testBroadcastIntent_noExtras(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT')
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am broadcast "
+            "-a test.package.with.an.INTENT '",
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+  def testBroadcastIntent_withExtra(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT',
+                                extras={'foo': 'bar'})
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am broadcast "
+            "-a test.package.with.an.INTENT "
+            "-e foo \"bar\"'",
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+  def testBroadcastIntent_withExtra_noValue(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT',
+                                extras={'foo': None})
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am broadcast "
+            "-a test.package.with.an.INTENT "
+            "-e foo'",
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+
+class DeviceUtilsGoHomeTest(DeviceUtilsOldImplTest):
+
+  def testGoHome(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am start "
+            "-W "
+            "-a android.intent.action.MAIN "
+            "-c android.intent.category.HOME'",
+        'Starting: Intent { act=android.intent.action.MAIN }\r\n'):
+      self.device.GoHome()
+
+
+class DeviceUtilsForceStopTest(DeviceUtilsOldImplTest):
+
+  def testForceStop(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'am force-stop this.is.a.test.package'",
+        ''):
+      self.device.ForceStop('this.is.a.test.package')
+
+
+class DeviceUtilsClearApplicationStateTest(DeviceUtilsOldImplTest):
+
+  def testClearApplicationState_packageExists(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'pm path this.package.does.not.exist'",
+        ''):
+      self.device.ClearApplicationState('this.package.does.not.exist')
+
+  def testClearApplicationState_packageDoesntExist(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'pm path this.package.exists'",
+         'package:/data/app/this.package.exists.apk'),
+        ("adb -s 0123456789abcdef shell 'pm clear this.package.exists'",
+         'Success\r\n')]):
+      self.device.ClearApplicationState('this.package.exists')
+
+
+class DeviceUtilsSendKeyEventTest(DeviceUtilsOldImplTest):
+
+  def testSendKeyEvent(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'input keyevent 66'",
+        ''):
+      self.device.SendKeyEvent(66)
+
+
+class DeviceUtilsPushChangedFilesTest(DeviceUtilsOldImplTest):
+
+
+  def testPushChangedFiles_noHostPath(self):
+    with mock.patch('os.path.exists', return_value=False):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.PushChangedFiles('/test/host/path', '/test/device/path')
+
+  def testPushChangedFiles_file_noChange(self):
+    self.device.old_interface._push_if_needed_cache = {}
+
+    host_file_path = '/test/host/path'
+    device_file_path = '/test/device/path'
+
+    mock_fs = MockFileSystem()
+    mock_fs.addMockFile(host_file_path, size=100)
+
+    self.device.old_interface.GetFilesChanged = mock.Mock(return_value=[])
+
+    with mock_fs:
+      # GetFilesChanged is mocked, so its adb calls are omitted.
+      with self.assertNoAdbCalls():
+        self.device.PushChangedFiles(host_file_path, device_file_path)
+
+  def testPushChangedFiles_file_changed(self):
+    self.device.old_interface._push_if_needed_cache = {}
+
+    host_file_path = '/test/host/path'
+    device_file_path = '/test/device/path'
+
+    mock_fs = MockFileSystem()
+    mock_fs.addMockFile(
+        host_file_path, size=100,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000000))
+
+    self.device.old_interface.GetFilesChanged = mock.Mock(
+        return_value=[('/test/host/path', '/test/device/path')])
+
+    with mock_fs:
+      with self.assertCalls('adb -s 0123456789abcdef push '
+          '/test/host/path /test/device/path', '100 B/s (100 B in 1.000s)\r\n'):
+        self.device.PushChangedFiles(host_file_path, device_file_path)
+
+  def testPushChangedFiles_directory_nothingChanged(self):
+    self.device.old_interface._push_if_needed_cache = {}
+
+    host_file_path = '/test/host/path'
+    device_file_path = '/test/device/path'
+
+    mock_fs = MockFileSystem()
+    mock_fs.addMockDirectory(
+        host_file_path, size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000000))
+    mock_fs.addMockFile(
+        host_file_path + '/file1', size=251,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000001))
+    mock_fs.addMockFile(
+        host_file_path + '/file2', size=252,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000002))
+
+    self.device.old_interface.GetFilesChanged = mock.Mock(return_value=[])
+
+    with mock_fs:
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'mkdir -p \"/test/device/path\"'",
+           '')]):
+        self.device.PushChangedFiles(host_file_path, device_file_path)
+
+  def testPushChangedFiles_directory_somethingChanged(self):
+    self.device.old_interface._push_if_needed_cache = {}
+
+    host_file_path = '/test/host/path'
+    device_file_path = '/test/device/path'
+
+    mock_fs = MockFileSystem()
+    mock_fs.addMockDirectory(
+        host_file_path, size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000000),
+        walk=[('/test/host/path', [], ['file1', 'file2'])])
+    mock_fs.addMockFile(
+        host_file_path + '/file1', size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000001))
+    mock_fs.addMockFile(
+        host_file_path + '/file2', size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000002))
+
+    self.device.old_interface.GetFilesChanged = mock.Mock(
+        return_value=[('/test/host/path/file1', '/test/device/path/file1')])
+
+    with mock_fs:
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'mkdir -p \"/test/device/path\"'",
+           ''),
+          ('adb -s 0123456789abcdef push '
+              '/test/host/path/file1 /test/device/path/file1',
+           '256 B/s (256 B in 1.000s)\r\n')]):
+        self.device.PushChangedFiles(host_file_path, device_file_path)
+
+  def testPushChangedFiles_directory_everythingChanged(self):
+    self.device.old_interface._push_if_needed_cache = {}
+
+    host_file_path = '/test/host/path'
+    device_file_path = '/test/device/path'
+
+    mock_fs = MockFileSystem()
+    mock_fs.addMockDirectory(
+        host_file_path, size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000000))
+    mock_fs.addMockFile(
+        host_file_path + '/file1', size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000001))
+    mock_fs.addMockFile(
+        host_file_path + '/file2', size=256,
+        stat=MockFileSystem.osStatResult(st_mtime=1000000002))
+
+    self.device.old_interface.GetFilesChanged = mock.Mock(
+        return_value=[('/test/host/path/file1', '/test/device/path/file1'),
+                      ('/test/host/path/file2', '/test/device/path/file2')])
+
+    with mock_fs:
+      with self.assertCallsSequence([
+          ("adb -s 0123456789abcdef shell 'mkdir -p \"/test/device/path\"'",
+           ''),
+          ('adb -s 0123456789abcdef push /test/host/path /test/device/path',
+           '768 B/s (768 B in 1.000s)\r\n')]):
+        self.device.PushChangedFiles(host_file_path, device_file_path)
+
+
+class DeviceUtilsFileExistsTest(DeviceUtilsOldImplTest):
+
+  def testFileExists_usingTest_fileExists(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell "
+            "'test -e \"/data/app/test.file.exists\"; echo $?'",
+        '0\r\n'):
+      self.assertTrue(self.device.FileExists('/data/app/test.file.exists'))
+
+  def testFileExists_usingTest_fileDoesntExist(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell "
+            "'test -e \"/data/app/test.file.does.not.exist\"; echo $?'",
+        '1\r\n'):
+      self.assertFalse(self.device.FileExists(
+          '/data/app/test.file.does.not.exist'))
+
+  def testFileExists_usingLs_fileExists(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell "
+            "'test -e \"/data/app/test.file.exists\"; echo $?'",
+         'test: not found\r\n'),
+        ("adb -s 0123456789abcdef shell "
+            "'ls \"/data/app/test.file.exists\" >/dev/null 2>&1; echo $?'",
+         '0\r\n')]):
+      self.assertTrue(self.device.FileExists('/data/app/test.file.exists'))
+
+  def testFileExists_usingLs_fileDoesntExist(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell "
+            "'test -e \"/data/app/test.file.does.not.exist\"; echo $?'",
+         'test: not found\r\n'),
+        ("adb -s 0123456789abcdef shell "
+            "'ls \"/data/app/test.file.does.not.exist\" "
+            ">/dev/null 2>&1; echo $?'",
+         '1\r\n')]):
+      self.assertFalse(self.device.FileExists(
+          '/data/app/test.file.does.not.exist'))
+
+
+class DeviceUtilsPullFileTest(DeviceUtilsOldImplTest):
+
+  def testPullFile_existsOnDevice(self):
+    with mock.patch('os.path.exists', return_value=True):
+      with self.assertCallsSequence([
+          ('adb -s 0123456789abcdef shell '
+              'ls /data/app/test.file.exists',
+           '/data/app/test.file.exists'),
+          ('adb -s 0123456789abcdef pull '
+              '/data/app/test.file.exists /test/file/host/path',
+           '100 B/s (100 bytes in 1.000s)\r\n')]):
+        self.device.PullFile('/data/app/test.file.exists',
+                             '/test/file/host/path')
+
+  def testPullFile_doesntExistOnDevice(self):
+    with mock.patch('os.path.exists', return_value=True):
+      with self.assertCalls(
+          'adb -s 0123456789abcdef shell '
+              'ls /data/app/test.file.does.not.exist',
+          '/data/app/test.file.does.not.exist: No such file or directory\r\n'):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.device.PullFile('/data/app/test.file.does.not.exist',
+                               '/test/file/host/path')
+
+
+class DeviceUtilsReadFileTest(DeviceUtilsOldImplTest):
+
+  def testReadFile_exists(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell "
+            "'cat \"/read/this/test/file\" 2>/dev/null'",
+         'this is a test file')]):
+      self.assertEqual(['this is a test file'],
+                       self.device.ReadFile('/read/this/test/file'))
+
+  def testReadFile_doesNotExist(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell "
+            "'cat \"/this/file/does.not.exist\" 2>/dev/null'",
+         ''):
+      self.device.ReadFile('/this/file/does.not.exist')
+
+  def testReadFile_asRoot_withRoot(self):
+    self.device.old_interface._privileged_command_runner = (
+        self.device.old_interface.RunShellCommand)
+    self.device.old_interface._protected_file_access_method_initialized = True
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell "
+            "'cat \"/this/file/must.be.read.by.root\" 2> /dev/null'",
+         'this is a test file\nread by root')]):
+      self.assertEqual(
+          ['this is a test file', 'read by root'],
+          self.device.ReadFile('/this/file/must.be.read.by.root',
+                               as_root=True))
+
+  def testReadFile_asRoot_withSu(self):
+    self.device.old_interface._privileged_command_runner = (
+        self.device.old_interface.RunShellCommandWithSU)
+    self.device.old_interface._protected_file_access_method_initialized = True
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell "
+            "'su -c cat \"/this/file/can.be.read.with.su\" 2> /dev/null'",
+         'this is a test file\nread with su')]):
+      self.assertEqual(
+          ['this is a test file', 'read with su'],
+          self.device.ReadFile('/this/file/can.be.read.with.su',
+                               as_root=True))
+
+  def testReadFile_asRoot_rejected(self):
+    self.device.old_interface._privileged_command_runner = None
+    self.device.old_interface._protected_file_access_method_initialized = True
+    with self.assertRaises(device_errors.CommandFailedError):
+      self.device.ReadFile('/this/file/cannot.be.read.by.user',
+                           as_root=True)
+
+
+class DeviceUtilsWriteFileTest(DeviceUtilsOldImplTest):
+
+  def testWriteFile_basic(self):
+    mock_file = mock.MagicMock(spec=file)
+    mock_file.name = '/tmp/file/to.be.pushed'
+    mock_file.__enter__.return_value = mock_file
+    with mock.patch('tempfile.NamedTemporaryFile',
+                    return_value=mock_file):
+      with self.assertCalls(
+          'adb -s 0123456789abcdef push '
+              '/tmp/file/to.be.pushed /test/file/written.to.device',
+          '100 B/s (100 bytes in 1.000s)\r\n'):
+        self.device.WriteFile('/test/file/written.to.device',
+                              'new test file contents')
+    mock_file.write.assert_called_once_with('new test file contents')
+
+  def testWriteFile_asRoot_withRoot(self):
+    self.device.old_interface._external_storage = '/fake/storage/path'
+    self.device.old_interface._privileged_command_runner = (
+        self.device.old_interface.RunShellCommand)
+    self.device.old_interface._protected_file_access_method_initialized = True
+
+    mock_file = mock.MagicMock(spec=file)
+    mock_file.name = '/tmp/file/to.be.pushed'
+    mock_file.__enter__.return_value = mock_file
+    with mock.patch('tempfile.NamedTemporaryFile',
+                    return_value=mock_file):
+      with self.assertCallsSequence(
+          cmd_ret=[
+              # Create temporary contents file
+              (r"adb -s 0123456789abcdef shell "
+                  "'test -e \"/fake/storage/path/temp_file-\d+-\d+\"; "
+                  "echo \$\?'",
+               '1\r\n'),
+              # Create temporary script file
+              (r"adb -s 0123456789abcdef shell "
+                  "'test -e \"/fake/storage/path/temp_file-\d+-\d+\.sh\"; "
+                  "echo \$\?'",
+               '1\r\n'),
+              # Set contents file
+              (r'adb -s 0123456789abcdef push /tmp/file/to\.be\.pushed '
+                  '/fake/storage/path/temp_file-\d+\d+',
+               '100 B/s (100 bytes in 1.000s)\r\n'),
+              # Set script file
+              (r'adb -s 0123456789abcdef push /tmp/file/to\.be\.pushed '
+                  '/fake/storage/path/temp_file-\d+\d+',
+               '100 B/s (100 bytes in 1.000s)\r\n'),
+              # Call script
+              (r"adb -s 0123456789abcdef shell "
+                  "'sh /fake/storage/path/temp_file-\d+-\d+\.sh'", ''),
+              # Remove device temporaries
+              (r"adb -s 0123456789abcdef shell "
+                  "'rm /fake/storage/path/temp_file-\d+-\d+\.sh'", ''),
+              (r"adb -s 0123456789abcdef shell "
+                  "'rm /fake/storage/path/temp_file-\d+-\d+'", '')],
+          comp=re.match):
+        self.device.WriteFile('/test/file/written.to.device',
+                              'new test file contents', as_root=True)
+
+  def testWriteFile_asRoot_withSu(self):
+    self.device.old_interface._external_storage = '/fake/storage/path'
+    self.device.old_interface._privileged_command_runner = (
+        self.device.old_interface.RunShellCommandWithSU)
+    self.device.old_interface._protected_file_access_method_initialized = True
+
+    mock_file = mock.MagicMock(spec=file)
+    mock_file.name = '/tmp/file/to.be.pushed'
+    mock_file.__enter__.return_value = mock_file
+    with mock.patch('tempfile.NamedTemporaryFile',
+                    return_value=mock_file):
+      with self.assertCallsSequence(
+          cmd_ret=[
+              # Create temporary contents file
+              (r"adb -s 0123456789abcdef shell "
+                  "'test -e \"/fake/storage/path/temp_file-\d+-\d+\"; "
+                  "echo \$\?'",
+               '1\r\n'),
+              # Create temporary script file
+              (r"adb -s 0123456789abcdef shell "
+                  "'test -e \"/fake/storage/path/temp_file-\d+-\d+\.sh\"; "
+                  "echo \$\?'",
+               '1\r\n'),
+              # Set contents file
+              (r'adb -s 0123456789abcdef push /tmp/file/to\.be\.pushed '
+                  '/fake/storage/path/temp_file-\d+\d+',
+               '100 B/s (100 bytes in 1.000s)\r\n'),
+              # Set script file
+              (r'adb -s 0123456789abcdef push /tmp/file/to\.be\.pushed '
+                  '/fake/storage/path/temp_file-\d+\d+',
+               '100 B/s (100 bytes in 1.000s)\r\n'),
+              # Call script
+              (r"adb -s 0123456789abcdef shell "
+                  "'su -c sh /fake/storage/path/temp_file-\d+-\d+\.sh'", ''),
+              # Remove device temporaries
+              (r"adb -s 0123456789abcdef shell "
+                  "'rm /fake/storage/path/temp_file-\d+-\d+\.sh'", ''),
+              (r"adb -s 0123456789abcdef shell "
+                  "'rm /fake/storage/path/temp_file-\d+-\d+'", '')],
+          comp=re.match):
+        self.device.WriteFile('/test/file/written.to.device',
+                              'new test file contents', as_root=True)
+
+  def testWriteFile_asRoot_rejected(self):
+    self.device.old_interface._privileged_command_runner = None
+    self.device.old_interface._protected_file_access_method_initialized = True
+    with self.assertRaises(device_errors.CommandFailedError):
+      self.device.WriteFile('/test/file/no.permissions.to.write',
+                            'new test file contents', as_root=True)
+
+class DeviceUtilsWriteTextFileTest(DeviceUtilsOldImplTest):
+
+  def testWriteTextFileTest_basic(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'echo some.string"
+        " > /test/file/to.write; echo %$?'", '%0\r\n'):
+      self.device.WriteTextFile('/test/file/to.write', 'some.string')
+
+  def testWriteTextFileTest_stringWithSpaces(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'echo '\\''some other string'\\''"
+        " > /test/file/to.write; echo %$?'", '%0\r\n'):
+      self.device.WriteTextFile('/test/file/to.write', 'some other string')
+
+  def testWriteTextFileTest_asRoot_withSu(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls /root'", 'Permission denied\r\n'),
+        ("adb -s 0123456789abcdef shell 'su -c echo some.string"
+          " > /test/file/to.write; echo %$?'", '%0\r\n')]):
+      self.device.WriteTextFile('/test/file/to.write', 'some.string',
+                                as_root=True)
+
+  def testWriteTextFileTest_asRoot_withRoot(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls /root'", 'hello\r\nworld\r\n'),
+        ("adb -s 0123456789abcdef shell 'echo some.string"
+          " > /test/file/to.write; echo %$?'", '%0\r\n')]):
+      self.device.WriteTextFile('/test/file/to.write', 'some.string',
+                                as_root=True)
+
+  def testWriteTextFileTest_asRoot_rejected(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls /root'", 'Permission denied\r\n'),
+        ("adb -s 0123456789abcdef shell 'su -c echo some.string"
+          " > /test/file/to.write; echo %$?'", '%1\r\n')]):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.WriteTextFile('/test/file/to.write', 'some.string',
+                                  as_root=True)
+
+class DeviceUtilsLsTest(DeviceUtilsOldImplTest):
+
+  def testLs_nothing(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls -lR /this/file/does.not.exist'",
+         '/this/file/does.not.exist: No such file or directory\r\n'),
+        ("adb -s 0123456789abcdef shell 'date +%z'", '+0000')]):
+      self.assertEqual({}, self.device.Ls('/this/file/does.not.exist'))
+
+  def testLs_file(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls -lR /this/is/a/test.file'",
+         '-rw-rw---- testuser testgroup 4096 1970-01-01 00:00 test.file\r\n'),
+        ("adb -s 0123456789abcdef shell 'date +%z'", '+0000')]):
+      self.assertEqual(
+          {'test.file': (4096, datetime.datetime(1970, 1, 1))},
+          self.device.Ls('/this/is/a/test.file'))
+
+  def testLs_directory(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls -lR /this/is/a/test.directory'",
+         '\r\n'
+         '/this/is/a/test.directory:\r\n'
+         '-rw-rw---- testuser testgroup 4096 1970-01-01 18:19 test.file\r\n'),
+        ("adb -s 0123456789abcdef shell 'date +%z'", '+0000')]):
+      self.assertEqual(
+          {'test.file': (4096, datetime.datetime(1970, 1, 1, 18, 19))},
+          self.device.Ls('/this/is/a/test.directory'))
+
+  def testLs_directories(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'ls -lR /this/is/a/test.directory'",
+         '\r\n'
+         '/this/is/a/test.directory:\r\n'
+         'drwxr-xr-x testuser testgroup 1970-01-01 00:00 test.subdirectory\r\n'
+         '\r\n'
+         '/this/is/a/test.directory/test.subdirectory:\r\n'
+         '-rw-rw---- testuser testgroup 4096 1970-01-01 00:00 test.file\r\n'),
+        ("adb -s 0123456789abcdef shell 'date +%z'", '-0700')]):
+      self.assertEqual(
+          {'test.subdirectory/test.file':
+              (4096, datetime.datetime(1970, 1, 1, 7, 0, 0))},
+          self.device.Ls('/this/is/a/test.directory'))
+
+
+class DeviceUtilsSetJavaAssertsTest(DeviceUtilsOldImplTest):
+
+  @staticmethod
+  def mockNamedTemporary(name='/tmp/file/property.file',
+                         read_contents=''):
+    mock_file = mock.MagicMock(spec=file)
+    mock_file.name = name
+    mock_file.__enter__.return_value = mock_file
+    mock_file.read.return_value = read_contents
+    return mock_file
+
+  def testSetJavaAsserts_enable(self):
+    mock_file = self.mockNamedTemporary()
+    with mock.patch('tempfile.NamedTemporaryFile',
+                    return_value=mock_file), (
+         mock.patch('__builtin__.open', return_value=mock_file)):
+      with self.assertCallsSequence(
+          [('adb -s 0123456789abcdef shell ls %s' %
+                constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            '%s\r\n' % constants.DEVICE_LOCAL_PROPERTIES_PATH),
+           ('adb -s 0123456789abcdef pull %s %s' %
+                (constants.DEVICE_LOCAL_PROPERTIES_PATH, mock_file.name),
+            '100 B/s (100 bytes in 1.000s)\r\n'),
+           ('adb -s 0123456789abcdef push %s %s' %
+                (mock_file.name, constants.DEVICE_LOCAL_PROPERTIES_PATH),
+            '100 B/s (100 bytes in 1.000s)\r\n'),
+           ('adb -s 0123456789abcdef shell '
+                'getprop dalvik.vm.enableassertions',
+            '\r\n'),
+           ('adb -s 0123456789abcdef shell '
+                'setprop dalvik.vm.enableassertions "all"',
+            '')]):
+        self.assertTrue(self.device.SetJavaAsserts(True))
+
+  def testSetJavaAsserts_disable(self):
+    mock_file = self.mockNamedTemporary(
+        read_contents='dalvik.vm.enableassertions=all\n')
+    with mock.patch('tempfile.NamedTemporaryFile',
+                    return_value=mock_file), (
+         mock.patch('__builtin__.open', return_value=mock_file)):
+      with self.assertCallsSequence(
+          [('adb -s 0123456789abcdef shell ls %s' %
+                constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            '%s\r\n' % constants.DEVICE_LOCAL_PROPERTIES_PATH),
+           ('adb -s 0123456789abcdef pull %s %s' %
+                (constants.DEVICE_LOCAL_PROPERTIES_PATH, mock_file.name),
+            '100 B/s (100 bytes in 1.000s)\r\n'),
+           ('adb -s 0123456789abcdef push %s %s' %
+                (mock_file.name, constants.DEVICE_LOCAL_PROPERTIES_PATH),
+            '100 B/s (100 bytes in 1.000s)\r\n'),
+           ('adb -s 0123456789abcdef shell '
+                'getprop dalvik.vm.enableassertions',
+            'all\r\n'),
+           ('adb -s 0123456789abcdef shell '
+                'setprop dalvik.vm.enableassertions ""',
+            '')]):
+        self.assertTrue(self.device.SetJavaAsserts(False))
+
+  def testSetJavaAsserts_alreadyEnabled(self):
+    mock_file = self.mockNamedTemporary(
+        read_contents='dalvik.vm.enableassertions=all\n')
+    with mock.patch('tempfile.NamedTemporaryFile',
+                    return_value=mock_file), (
+         mock.patch('__builtin__.open', return_value=mock_file)):
+      with self.assertCallsSequence(
+          [('adb -s 0123456789abcdef shell ls %s' %
+                constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            '%s\r\n' % constants.DEVICE_LOCAL_PROPERTIES_PATH),
+           ('adb -s 0123456789abcdef pull %s %s' %
+                (constants.DEVICE_LOCAL_PROPERTIES_PATH, mock_file.name),
+            '100 B/s (100 bytes in 1.000s)\r\n'),
+           ('adb -s 0123456789abcdef shell '
+                'getprop dalvik.vm.enableassertions',
+            'all\r\n')]):
+        self.assertFalse(self.device.SetJavaAsserts(True))
+
+
+class DeviceUtilsGetPropTest(DeviceUtilsOldImplTest):
+
+  def testGetProp_exists(self):
+    with self.assertCalls(
+        'adb -s 0123456789abcdef shell getprop this.is.a.test.property',
+        'test_property_value\r\n'):
+      self.assertEqual('test_property_value',
+                       self.device.GetProp('this.is.a.test.property'))
+
+  def testGetProp_doesNotExist(self):
+    with self.assertCalls(
+        'adb -s 0123456789abcdef shell '
+            'getprop this.property.does.not.exist', ''):
+      self.assertEqual('', self.device.GetProp('this.property.does.not.exist'))
+
+  def testGetProp_cachedRoProp(self):
+    with self.assertCalls(
+        'adb -s 0123456789abcdef shell '
+            'getprop ro.build.type', 'userdebug'):
+      self.assertEqual('userdebug', self.device.GetProp('ro.build.type'))
+      self.assertEqual('userdebug', self.device.GetProp('ro.build.type'))
+
+
+class DeviceUtilsSetPropTest(DeviceUtilsOldImplTest):
+
+  def testSetProp(self):
+    with self.assertCalls(
+        'adb -s 0123456789abcdef shell '
+            'setprop this.is.a.test.property "test_property_value"',
+        ''):
+      self.device.SetProp('this.is.a.test.property', 'test_property_value')
+
+
+class DeviceUtilsGetPidsTest(DeviceUtilsOldImplTest):
+
+  def testGetPids_noMatches(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'ps'",
+        'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+        'user  1000    100   1024 1024   ffffffff 00000000 no.match\r\n'):
+      self.assertEqual({}, self.device.GetPids('does.not.match'))
+
+  def testGetPids_oneMatch(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'ps'",
+        'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+        'user  1000    100   1024 1024   ffffffff 00000000 not.a.match\r\n'
+        'user  1001    100   1024 1024   ffffffff 00000000 one.match\r\n'):
+      self.assertEqual({'one.match': '1001'}, self.device.GetPids('one.match'))
+
+  def testGetPids_mutlipleMatches(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'ps'",
+        'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+        'user  1000    100   1024 1024   ffffffff 00000000 not\r\n'
+        'user  1001    100   1024 1024   ffffffff 00000000 one.match\r\n'
+        'user  1002    100   1024 1024   ffffffff 00000000 two.match\r\n'
+        'user  1003    100   1024 1024   ffffffff 00000000 three.match\r\n'):
+      self.assertEqual(
+          {'one.match': '1001', 'two.match': '1002', 'three.match': '1003'},
+          self.device.GetPids('match'))
+
+  def testGetPids_exactMatch(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'ps'",
+        'USER   PID   PPID  VSIZE  RSS   WCHAN    PC       NAME\r\n'
+        'user  1000    100   1024 1024   ffffffff 00000000 not.exact.match\r\n'
+        'user  1234    100   1024 1024   ffffffff 00000000 exact.match\r\n'):
+      self.assertEqual(
+          {'not.exact.match': '1000', 'exact.match': '1234'},
+          self.device.GetPids('exact.match'))
+
+
+class DeviceUtilsTakeScreenshotTest(DeviceUtilsOldImplTest):
+
+  def testTakeScreenshot_fileNameProvided(self):
+    mock_fs = MockFileSystem()
+    mock_fs.addMockDirectory('/test/host')
+    mock_fs.addMockFile('/test/host/screenshot.png')
+
+    with mock_fs:
+      with self.assertCallsSequence(
+          cmd_ret=[
+              (r"adb -s 0123456789abcdef shell 'echo \$EXTERNAL_STORAGE'",
+               '/test/external/storage\r\n'),
+              (r"adb -s 0123456789abcdef shell '/system/bin/screencap -p \S+'",
+               ''),
+              (r"adb -s 0123456789abcdef shell ls \S+",
+               '/test/external/storage/screenshot.png\r\n'),
+              (r'adb -s 0123456789abcdef pull \S+ /test/host/screenshot.png',
+               '100 B/s (100 B in 1.000s)\r\n'),
+              (r"adb -s 0123456789abcdef shell 'rm -f \S+'", '')
+          ],
+          comp=re.match):
+        self.device.TakeScreenshot('/test/host/screenshot.png')
+
+
+class DeviceUtilsGetIOStatsTest(DeviceUtilsOldImplTest):
+
+  def testGetIOStats(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'cat \"/proc/diskstats\" 2>/dev/null'",
+        '179 0 mmcblk0 1 2 3 4 5 6 7 8 9 10 11\r\n'):
+      self.assertEqual(
+          {
+            'num_reads': 1,
+            'num_writes': 5,
+            'read_ms': 4,
+            'write_ms': 8,
+          },
+          self.device.GetIOStats())
+
+
+class DeviceUtilsGetMemoryUsageForPidTest(DeviceUtilsOldImplTest):
+
+  def setUp(self):
+    super(DeviceUtilsGetMemoryUsageForPidTest, self).setUp()
+    self.device.old_interface._privileged_command_runner = (
+        self.device.old_interface.RunShellCommand)
+    self.device.old_interface._protected_file_access_method_initialized = True
+
+  def testGetMemoryUsageForPid_validPid(self):
+    with self.assertCallsSequence([
+        ("adb -s 0123456789abcdef shell 'showmap 1234'",
+         '100 101 102 103 104 105 106 107 TOTAL\r\n'),
+        ("adb -s 0123456789abcdef shell "
+            "'cat \"/proc/1234/status\" 2> /dev/null'",
+         'VmHWM: 1024 kB')
+        ]):
+      self.assertEqual(
+          {
+            'Size': 100,
+            'Rss': 101,
+            'Pss': 102,
+            'Shared_Clean': 103,
+            'Shared_Dirty': 104,
+            'Private_Clean': 105,
+            'Private_Dirty': 106,
+            'VmHWM': 1024
+          },
+          self.device.GetMemoryUsageForPid(1234))
+
+  def testGetMemoryUsageForPid_invalidPid(self):
+    with self.assertCalls(
+        "adb -s 0123456789abcdef shell 'showmap 4321'",
+        'cannot open /proc/4321/smaps: No such file or directory\r\n'):
+      self.assertEqual({}, self.device.GetMemoryUsageForPid(4321))
+
+
+class DeviceUtilsStrTest(DeviceUtilsOldImplTest):
+  def testStr_noAdbCalls(self):
+    with self.assertNoAdbCalls():
+      self.assertEqual('0123456789abcdef', str(self.device))
+
+  def testStr_noSerial(self):
+    self.device = device_utils.DeviceUtils(None)
+    with self.assertCalls('adb  get-serialno', '0123456789abcdef'):
+      self.assertEqual('0123456789abcdef', str(self.device))
+
+  def testStr_noSerial_noDevices(self):
+    self.device = device_utils.DeviceUtils(None)
+    with self.assertCalls('adb  get-serialno', 'unknown'), (
+         self.assertRaises(device_errors.NoDevicesError)):
+      str(self.device)
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/intent.py b/build/android/pylib/device/intent.py
new file mode 100644
index 0000000..3e34f79
--- /dev/null
+++ b/build/android/pylib/device/intent.py
@@ -0,0 +1,79 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manages intents and associated information.
+
+This is generally intended to be used with functions that calls Android's
+Am command.
+"""
+
+class Intent(object):
+
+  def __init__(self, action='android.intent.action.VIEW', activity=None,
+               category=None, component=None, data=None, extras=None,
+               flags=None, package=None):
+    """Creates an Intent.
+
+    Args:
+      action: A string containing the action.
+      activity: A string that, with |package|, can be used to specify the
+                component.
+      category: A string or list containing any categories.
+      component: A string that specifies the component to send the intent to.
+      data: A string containing a data URI.
+      extras: A dict containing extra parameters to be passed along with the
+              intent.
+      flags: A string containing flags to pass.
+      package: A string that, with activity, can be used to specify the
+               component.
+    """
+    self._action = action
+    self._activity = activity
+    if isinstance(category, list) or category is None:
+      self._category = category
+    else:
+      self._category = [category]
+    self._component = component
+    self._data = data
+    self._extras = extras
+    self._flags = flags
+    self._package = package
+
+    if self._component and '/' in component:
+      self._package, self._activity = component.split('/', 1)
+    elif self._package and self._activity:
+      self._component = '%s/%s' % (package, activity)
+
+  @property
+  def action(self):
+    return self._action
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def category(self):
+    return self._category
+
+  @property
+  def component(self):
+    return self._component
+
+  @property
+  def data(self):
+    return self._data
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def package(self):
+    return self._package
+
diff --git a/build/android/pylib/device_settings.py b/build/android/pylib/device_settings.py
new file mode 100644
index 0000000..bc39b5d
--- /dev/null
+++ b/build/android/pylib/device_settings.py
@@ -0,0 +1,179 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import content_settings
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+
+
+def ConfigureContentSettings(device, desired_settings):
+  """Configures device content setings from a list.
+
+  Many settings are documented at:
+    http://developer.android.com/reference/android/provider/Settings.Global.html
+    http://developer.android.com/reference/android/provider/Settings.Secure.html
+    http://developer.android.com/reference/android/provider/Settings.System.html
+
+  Many others are undocumented.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+    desired_settings: A list of (table, [(key: value), ...]) for all
+        settings to configure.
+  """
+  try:
+    sdk_version = int(device.GetProp('ro.build.version.sdk'))
+  except ValueError:
+    logging.error('Skipping content settings configuration, unknown sdk %s',
+                  device.GetProp('ro.build.version.sdk'))
+    return
+
+  if sdk_version < 16:
+    logging.error('Skipping content settings configuration due to outdated sdk')
+    return
+
+  if device.GetProp('ro.build.type') == 'userdebug':
+    for table, key_value in desired_settings:
+      settings = content_settings.ContentSettings(table, device)
+      for key, value in key_value:
+        settings[key] = value
+      logging.info('\n%s %s', table, (80 - len(table)) * '-')
+      for key, value in sorted(settings.iteritems()):
+        logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+  """Sets lock screen settings on the device.
+
+  On certain device/Android configurations we need to disable the lock screen in
+  a different database. Additionally, the password type must be set to
+  DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+  Lock screen settings are stored in sqlite on the device in:
+      /data/system/locksettings.db
+
+  IMPORTANT: The first column is used as a primary key so that all rows with the
+  same value for that column are removed from the table prior to inserting the
+  new values.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+
+  Raises:
+    Exception if the setting was not properly set.
+  """
+  if (not device.old_interface.FileExistsOnDevice(_LOCK_SCREEN_SETTINGS_PATH) or
+      device.GetProp('ro.build.type') != 'userdebug'):
+    return
+
+  db = _LOCK_SCREEN_SETTINGS_PATH
+  locksettings = [('locksettings', 'lockscreen.disabled', '1'),
+                  ('locksettings', 'lockscreen.password_type',
+                   PASSWORD_QUALITY_UNSPECIFIED),
+                  ('locksettings', 'lockscreen.password_type_alternate',
+                   PASSWORD_QUALITY_UNSPECIFIED)]
+  for table, key, value in locksettings:
+    # Set the lockscreen setting for default user '0'
+    columns = ['name', 'user', 'value']
+    values = [key, '0', value]
+
+    cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+      'table': table,
+      'primary_key': columns[0],
+      'primary_value': values[0],
+      'columns': ', '.join(columns),
+      'values': ', '.join(["'%s'" % value for value in values])
+    }
+    output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd))
+    if output_msg:
+      print ' '.join(output_msg)
+
+
+ENABLE_LOCATION_SETTINGS = [
+  # Note that setting these in this order is required in order for all of
+  # them to take and stick through a reboot.
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 1),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is enabled and allowed for tests.
+    ('location_providers_allowed', 'gps,network'),
+  ]),
+  ('com.google.settings/partner', [
+    ('network_location_opt_in', 1),
+  ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 0),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is disabled.
+    ('location_providers_allowed', ''),
+  ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+  ('settings/global', [
+    ('assisted_gps_enabled', 0),
+
+    # Disable "auto time" and "auto time zone" to avoid network-provided time
+    # to overwrite the device's datetime and timezone synchronized from host
+    # when running tests later. See b/6569849.
+    ('auto_time', 0),
+    ('auto_time_zone', 0),
+
+    ('development_settings_enabled', 1),
+
+    # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+    # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+    # will never display the "Report" button.
+    # Type: int ( 0 = disallow, 1 = allow )
+    ('send_action_app_error', 0),
+
+    ('stay_on_while_plugged_in', 3),
+
+    ('verifier_verify_adb_installs', 0),
+  ]),
+  ('settings/secure', [
+    ('allowed_geolocation_origins',
+        'http://www.google.co.uk http://www.google.com'),
+
+    # Ensure that we never get random dialogs like "Unfortunately the process
+    # android.process.acore has stopped", which steal the focus, and make our
+    # automation fail (because the dialog steals the focus then mistakenly
+    # receives the injected user input events).
+    ('anr_show_background', 0),
+
+    ('lockscreen.disabled', 1),
+
+    ('screensaver_enabled', 0),
+  ]),
+  ('settings/system', [
+    # Don't want devices to accidentally rotate the screen as that could
+    # affect performance measurements.
+    ('accelerometer_rotation', 0),
+
+    ('lockscreen.disabled', 1),
+
+    # Turn down brightness and disable auto-adjust so that devices run cooler.
+    ('screen_brightness', 5),
+    ('screen_brightness_mode', 0),
+
+    ('user_rotation', 0),
+  ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+  ('settings/global', [
+    ('airplane_mode_on', 1),
+    ('wifi_on', 0),
+  ]),
+]
diff --git a/build/android/pylib/efficient_android_directory_copy.sh b/build/android/pylib/efficient_android_directory_copy.sh
new file mode 100755
index 0000000..7021109
--- /dev/null
+++ b/build/android/pylib/efficient_android_directory_copy.sh
@@ -0,0 +1,78 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to make the destination directory identical with the
+# source directory, without doing unnecessary copies. This assumes that the
+# the destination directory was originally a copy of the source directory, and
+# has since been modified.
+
+source=$1
+dest=$2
+echo copying $source to $dest
+
+delete_extra() {
+  # Don't delete symbolic links, since doing so deletes the vital lib link.
+  if [ ! -L "$1" ]
+  then
+    if [ ! -e "$source/$1" ]
+    then
+      echo rm -rf "$dest/$1"
+      rm -rf "$dest/$1"
+    elif [ -d "$1" ]
+    then
+      for f in "$1"/*
+      do
+       delete_extra "$f"
+      done
+    fi
+  fi
+}
+
+copy_if_older() {
+  if [ -d "$1" ] && [ -e "$dest/$1" ]
+  then
+    if [ ! -e "$dest/$1" ]
+    then
+      echo cp -a "$1" "$dest/$1"
+      cp -a "$1" "$dest/$1"
+    else
+      for f in "$1"/*
+      do
+        copy_if_older "$f"
+      done
+    fi
+  elif [ ! -e "$dest/$1" ] || [ "$1" -ot "$dest/$1" ] || [ "$1" -nt "$dest/$1" ]
+  then
+    # dates are different, so either the destination of the source has changed.
+    echo cp -a "$1" "$dest/$1"
+    cp -a "$1" "$dest/$1"
+  fi
+}
+
+if [ -e "$dest" ]
+then
+  echo cd "$dest"
+  cd "$dest"
+  for f in ./*
+  do
+    if [ -e "$f" ]
+    then
+      delete_extra "$f"
+    fi
+  done
+else
+  echo mkdir "$dest"
+  mkdir "$dest"
+fi
+echo cd "$source"
+cd "$source"
+for f in ./*
+do
+  if [ -e "$f" ]
+  then
+    copy_if_older "$f"
+  fi
+done
diff --git a/build/android/pylib/flag_changer.py b/build/android/pylib/flag_changer.py
new file mode 100644
index 0000000..c0bcadb
--- /dev/null
+++ b/build/android/pylib/flag_changer.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+class FlagChanger(object):
+  """Changes the flags Chrome runs with.
+
+  There are two different use cases for this file:
+  * Flags are permanently set by calling Set().
+  * Flags can be temporarily set for a particular set of unit tests.  These
+    tests should call Restore() to revert the flags to their original state
+    once the tests have completed.
+  """
+
+  def __init__(self, device, cmdline_file):
+    """Initializes the FlagChanger and records the original arguments.
+
+    Args:
+      device: A DeviceUtils instance.
+      cmdline_file: Path to the command line file on the device.
+    """
+    # TODO(jbudorick) Remove once telemetry switches over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    self._device = device
+    self._cmdline_file = cmdline_file
+
+    # Save the original flags.
+    self._orig_line = self._device.ReadFile(self._cmdline_file)
+    if self._orig_line:
+      self._orig_line = self._orig_line[0].strip()
+
+    # Parse out the flags into a list to facilitate adding and removing flags.
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+
+  def Get(self):
+    """Returns list of current flags."""
+    return self._current_flags
+
+  def Set(self, flags):
+    """Replaces all flags on the current command line with the flags given.
+
+    Args:
+      flags: A list of flags to set, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    self._current_flags = flags
+    self._UpdateCommandLineFile()
+
+  def AddFlags(self, flags):
+    """Appends flags to the command line if they aren't already there.
+
+    Args:
+      flags: A list of flags to add on, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    # Avoid appending flags that are already present.
+    for flag in flags:
+      if flag not in self._current_flags:
+        self._current_flags.append(flag)
+    self._UpdateCommandLineFile()
+
+  def RemoveFlags(self, flags):
+    """Removes flags from the command line, if they exist.
+
+    Args:
+      flags: A list of flags to remove, eg. ['--single-process'].  Note that we
+             expect a complete match when removing flags; if you want to remove
+             a switch with a value, you must use the exact string used to add
+             it in the first place.
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    for flag in flags:
+      if flag in self._current_flags:
+        self._current_flags.remove(flag)
+    self._UpdateCommandLineFile()
+
+  def Restore(self):
+    """Restores the flags to their original state."""
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+    self._UpdateCommandLineFile()
+
+  def _UpdateCommandLineFile(self):
+    """Writes out the command line to the file, or removes it if empty."""
+    logging.info('Current flags: %s', self._current_flags)
+    # Root is not required to write to /data/local/tmp/.
+    use_root = '/data/local/tmp/' not in self._cmdline_file
+    if self._current_flags:
+      # The first command line argument doesn't matter as we are not actually
+      # launching the chrome executable using this command line.
+      cmd_line = ' '.join(['_'] + self._current_flags)
+      self._device.WriteFile(
+          self._cmdline_file, cmd_line, as_root=use_root)
+      file_contents = self._device.ReadFile(
+          self._cmdline_file, as_root=use_root)
+      assert len(file_contents) == 1 and file_contents[0] == cmd_line, (
+          'Failed to set the command line file at %s' % self._cmdline_file)
+    else:
+      self._device.RunShellCommand('rm ' + self._cmdline_file,
+                                   as_root=use_root)
+      assert not self._device.FileExists(self._cmdline_file), (
+          'Failed to remove the command line file at %s' % self._cmdline_file)
+
+  @staticmethod
+  def _TokenizeFlags(line):
+    """Changes the string containing the command line into a list of flags.
+
+    Follows similar logic to CommandLine.java::tokenizeQuotedArguments:
+    * Flags are split using whitespace, unless the whitespace is within a
+      pair of quotation marks.
+    * Unlike the Java version, we keep the quotation marks around switch
+      values since we need them to re-create the file when new flags are
+      appended.
+
+    Args:
+      line: A string containing the entire command line.  The first token is
+            assumed to be the program name.
+    """
+    if not line:
+      return []
+
+    tokenized_flags = []
+    current_flag = ""
+    within_quotations = False
+
+    # Move through the string character by character and build up each flag
+    # along the way.
+    for c in line.strip():
+      if c is '"':
+        if len(current_flag) > 0 and current_flag[-1] == '\\':
+          # Last char was a backslash; pop it, and treat this " as a literal.
+          current_flag = current_flag[0:-1] + '"'
+        else:
+          within_quotations = not within_quotations
+          current_flag += c
+      elif not within_quotations and (c is ' ' or c is '\t'):
+        if current_flag is not "":
+          tokenized_flags.append(current_flag)
+          current_flag = ""
+      else:
+        current_flag += c
+
+    # Tack on the last flag.
+    if not current_flag:
+      if within_quotations:
+        logging.warn('Unterminated quoted argument: ' + line)
+    else:
+      tokenized_flags.append(current_flag)
+
+    # Return everything but the program name.
+    return tokenized_flags[1:]
diff --git a/build/android/pylib/forwarder.py b/build/android/pylib/forwarder.py
new file mode 100644
index 0000000..db6ea03
--- /dev/null
+++ b/build/android/pylib/forwarder.py
@@ -0,0 +1,336 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0212
+
+import fcntl
+import logging
+import os
+import psutil
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import valgrind_tools
+
+# TODO(jbudorick) Remove once telemetry gets switched over.
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+def _GetProcessStartTime(pid):
+  return psutil.Process(pid).create_time
+
+
+class _FileLock(object):
+  """With statement-aware implementation of a file lock.
+
+  File locks are needed for cross-process synchronization when the
+  multiprocessing Python module is used.
+  """
+  def __init__(self, path):
+    self._fd = -1
+    self._path = path
+
+  def __enter__(self):
+    self._fd = os.open(self._path, os.O_RDONLY | os.O_CREAT)
+    if self._fd < 0:
+      raise Exception('Could not open file %s for reading' % self._path)
+    fcntl.flock(self._fd, fcntl.LOCK_EX)
+
+  def __exit__(self, _exception_type, _exception_value, traceback):
+    fcntl.flock(self._fd, fcntl.LOCK_UN)
+    os.close(self._fd)
+
+
+class Forwarder(object):
+  """Thread-safe class to manage port forwards from the device to the host."""
+
+  _DEVICE_FORWARDER_FOLDER = (constants.TEST_EXECUTABLE_DIR +
+                              '/forwarder/')
+  _DEVICE_FORWARDER_PATH = (constants.TEST_EXECUTABLE_DIR +
+                            '/forwarder/device_forwarder')
+  _LOCK_PATH = '/tmp/chrome.forwarder.lock'
+  _MULTIPROCESSING_ENV_VAR = 'CHROME_FORWARDER_USE_MULTIPROCESSING'
+  # Defined in host_forwarder_main.cc
+  _HOST_FORWARDER_LOG = '/tmp/host_forwarder_log'
+
+  _instance = None
+
+  @staticmethod
+  def UseMultiprocessing():
+    """Tells the forwarder that multiprocessing is used."""
+    os.environ[Forwarder._MULTIPROCESSING_ENV_VAR] = '1'
+
+  @staticmethod
+  def Map(port_pairs, device, tool=None):
+    """Runs the forwarder.
+
+    Args:
+      port_pairs: A list of tuples (device_port, host_port) to forward. Note
+                 that you can specify 0 as a device_port, in which case a
+                 port will by dynamically assigned on the device. You can
+                 get the number of the assigned port using the
+                 DevicePortForHostPort method.
+      device: A DeviceUtils instance.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+
+    Raises:
+      Exception on failure to forward the port.
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    if not tool:
+      tool = valgrind_tools.CreateTool(None, device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      instance = Forwarder._GetInstanceLocked(tool)
+      instance._InitDeviceLocked(device, tool)
+
+      device_serial = str(device)
+      redirection_commands = [
+          ['--serial-id=' + device_serial, '--map', str(device_port),
+           str(host_port)] for device_port, host_port in port_pairs]
+      logging.info('Forwarding using commands: %s', redirection_commands)
+
+      for redirection_command in redirection_commands:
+        try:
+          (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+              [instance._host_forwarder_path] + redirection_command)
+        except OSError as e:
+          if e.errno == 2:
+            raise Exception('Unable to start host forwarder. Make sure you have'
+                            ' built host_forwarder.')
+          else: raise
+        if exit_code != 0:
+          Forwarder._KillDeviceLocked(device, tool)
+          raise Exception('%s exited with %d:\n%s' % (
+              instance._host_forwarder_path, exit_code, '\n'.join(output)))
+        tokens = output.split(':')
+        if len(tokens) != 2:
+          raise Exception('Unexpected host forwarder output "%s", '
+                          'expected "device_port:host_port"' % output)
+        device_port = int(tokens[0])
+        host_port = int(tokens[1])
+        serial_with_port = (device_serial, device_port)
+        instance._device_to_host_port_map[serial_with_port] = host_port
+        instance._host_to_device_port_map[host_port] = serial_with_port
+        logging.info('Forwarding device port: %d to host port: %d.',
+                     device_port, host_port)
+
+  @staticmethod
+  def UnmapDevicePort(device_port, device):
+    """Unmaps a previously forwarded device port.
+
+    Args:
+      device: A DeviceUtils instance.
+      device_port: A previously forwarded port (through Map()).
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      Forwarder._UnmapDevicePortLocked(device_port, device)
+
+  @staticmethod
+  def UnmapAllDevicePorts(device):
+    """Unmaps all the previously forwarded ports for the provided device.
+
+    Args:
+      device: A DeviceUtils instance.
+      port_pairs: A list of tuples (device_port, host_port) to unmap.
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      if not Forwarder._instance:
+        return
+      adb_serial = str(device)
+      if adb_serial not in Forwarder._instance._initialized_devices:
+        return
+      port_map = Forwarder._GetInstanceLocked(
+          None)._device_to_host_port_map
+      for (device_serial, device_port) in port_map.keys():
+        if adb_serial == device_serial:
+          Forwarder._UnmapDevicePortLocked(device_port, device)
+      # There are no more ports mapped, kill the device_forwarder.
+      tool = valgrind_tools.CreateTool(None, device)
+      Forwarder._KillDeviceLocked(device, tool)
+
+  @staticmethod
+  def DevicePortForHostPort(host_port):
+    """Returns the device port that corresponds to a given host port."""
+    with _FileLock(Forwarder._LOCK_PATH):
+      (_device_serial, device_port) = Forwarder._GetInstanceLocked(
+          None)._host_to_device_port_map.get(host_port)
+      return device_port
+
+  @staticmethod
+  def RemoveHostLog():
+    if os.path.exists(Forwarder._HOST_FORWARDER_LOG):
+      os.unlink(Forwarder._HOST_FORWARDER_LOG)
+
+  @staticmethod
+  def GetHostLog():
+    if not os.path.exists(Forwarder._HOST_FORWARDER_LOG):
+      return ''
+    with file(Forwarder._HOST_FORWARDER_LOG, 'r') as f:
+      return f.read()
+
+  @staticmethod
+  def _GetInstanceLocked(tool):
+    """Returns the singleton instance.
+
+    Note that the global lock must be acquired before calling this method.
+
+    Args:
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    if not Forwarder._instance:
+      Forwarder._instance = Forwarder(tool)
+    return Forwarder._instance
+
+  def __init__(self, tool):
+    """Constructs a new instance of Forwarder.
+
+    Note that Forwarder is a singleton therefore this constructor should be
+    called only once.
+
+    Args:
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    assert not Forwarder._instance
+    self._tool = tool
+    self._initialized_devices = set()
+    self._device_to_host_port_map = dict()
+    self._host_to_device_port_map = dict()
+    self._host_forwarder_path = os.path.join(
+        constants.GetOutDirectory(), 'host_forwarder')
+    assert os.path.exists(self._host_forwarder_path), 'Please build forwarder2'
+    self._device_forwarder_path_on_host = os.path.join(
+        constants.GetOutDirectory(), 'forwarder_dist')
+    self._InitHostLocked()
+
+  @staticmethod
+  def _UnmapDevicePortLocked(device_port, device):
+    """Internal method used by UnmapDevicePort().
+
+    Note that the global lock must be acquired before calling this method.
+    """
+    instance = Forwarder._GetInstanceLocked(None)
+    serial = str(device)
+    serial_with_port = (serial, device_port)
+    if not serial_with_port in instance._device_to_host_port_map:
+      logging.error('Trying to unmap non-forwarded port %d' % device_port)
+      return
+    redirection_command = ['--serial-id=' + serial, '--unmap', str(device_port)]
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [instance._host_forwarder_path] + redirection_command)
+    if exit_code != 0:
+      logging.error('%s exited with %d:\n%s' % (
+          instance._host_forwarder_path, exit_code, '\n'.join(output)))
+    host_port = instance._device_to_host_port_map[serial_with_port]
+    del instance._device_to_host_port_map[serial_with_port]
+    del instance._host_to_device_port_map[host_port]
+
+  @staticmethod
+  def _GetPidForLock():
+    """Returns the PID used for host_forwarder initialization.
+
+    In case multi-process sharding is used, the PID of the "sharder" is used.
+    The "sharder" is the initial process that forks that is the parent process.
+    By default, multi-processing is not used. In that case the PID of the
+    current process is returned.
+    """
+    use_multiprocessing = Forwarder._MULTIPROCESSING_ENV_VAR in os.environ
+    return os.getpgrp() if use_multiprocessing else os.getpid()
+
+  def _InitHostLocked(self):
+    """Initializes the host forwarder daemon.
+
+    Note that the global lock must be acquired before calling this method. This
+    method kills any existing host_forwarder process that could be stale.
+    """
+    # See if the host_forwarder daemon was already initialized by a concurrent
+    # process or thread (in case multi-process sharding is not used).
+    pid_for_lock = Forwarder._GetPidForLock()
+    fd = os.open(Forwarder._LOCK_PATH, os.O_RDWR | os.O_CREAT)
+    with os.fdopen(fd, 'r+') as pid_file:
+      pid_with_start_time = pid_file.readline()
+      if pid_with_start_time:
+        (pid, process_start_time) = pid_with_start_time.split(':')
+        if pid == str(pid_for_lock):
+          if process_start_time == str(_GetProcessStartTime(pid_for_lock)):
+            return
+      self._KillHostLocked()
+      pid_file.seek(0)
+      pid_file.write(
+          '%s:%s' % (pid_for_lock, str(_GetProcessStartTime(pid_for_lock))))
+
+  def _InitDeviceLocked(self, device, tool):
+    """Initializes the device_forwarder daemon for a specific device (once).
+
+    Note that the global lock must be acquired before calling this method. This
+    method kills any existing device_forwarder daemon on the device that could
+    be stale, pushes the latest version of the daemon (to the device) and starts
+    it.
+
+    Args:
+      device: A DeviceUtils instance.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    device_serial = str(device)
+    if device_serial in self._initialized_devices:
+      return
+    Forwarder._KillDeviceLocked(device, tool)
+    device.PushChangedFiles(
+        self._device_forwarder_path_on_host,
+        Forwarder._DEVICE_FORWARDER_FOLDER)
+    cmd = '%s %s' % (tool.GetUtilWrapper(), Forwarder._DEVICE_FORWARDER_PATH)
+    (exit_code, output) = device.old_interface.GetAndroidToolStatusAndOutput(
+        cmd, lib_path=Forwarder._DEVICE_FORWARDER_FOLDER)
+    if exit_code != 0:
+      raise Exception(
+          'Failed to start device forwarder:\n%s' % '\n'.join(output))
+    self._initialized_devices.add(device_serial)
+
+  def _KillHostLocked(self):
+    """Kills the forwarder process running on the host.
+
+    Note that the global lock must be acquired before calling this method.
+    """
+    logging.info('Killing host_forwarder.')
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [self._host_forwarder_path, '--kill-server'])
+    if exit_code != 0:
+      (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+          ['pkill', '-9', 'host_forwarder'])
+      if exit_code != 0:
+        raise Exception('%s exited with %d:\n%s' % (
+              self._host_forwarder_path, exit_code, '\n'.join(output)))
+
+  @staticmethod
+  def _KillDeviceLocked(device, tool):
+    """Kills the forwarder process running on the device.
+
+    Note that the global lock must be acquired before calling this method.
+
+    Args:
+      device: Instance of DeviceUtils for talking to the device.
+      tool: Wrapper tool (e.g. valgrind) that can be used to execute the device
+            forwarder (see valgrind_tools.py).
+    """
+    logging.info('Killing device_forwarder.')
+    Forwarder._instance._initialized_devices.discard(str(device))
+    if not device.FileExists(Forwarder._DEVICE_FORWARDER_PATH):
+      return
+
+    cmd = '%s %s --kill-server' % (tool.GetUtilWrapper(),
+                                   Forwarder._DEVICE_FORWARDER_PATH)
+    device.old_interface.GetAndroidToolStatusAndOutput(
+        cmd, lib_path=Forwarder._DEVICE_FORWARDER_FOLDER)
diff --git a/build/android/pylib/gtest/__init__.py b/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/gtest/filter/OWNERS b/build/android/pylib/gtest/filter/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/build/android/pylib/gtest/filter/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/build/android/pylib/gtest/filter/base_unittests_disabled b/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000..a64dd57
--- /dev/null
+++ b/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+ProcessUtilTest.GetAppOutputRestrictedSIGPIPE
+# TODO(jrg): Fails on bots.  Works locally.  Figure out why.  2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+# Death tests are not supported with apks.
+*DeathTest*
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..85e8fd6
--- /dev/null
+++ b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Addtional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/build/android/pylib/gtest/filter/blink_heap_unittests_disabled b/build/android/pylib/gtest/filter/blink_heap_unittests_disabled
new file mode 100644
index 0000000..7a43fb1
--- /dev/null
+++ b/build/android/pylib/gtest/filter/blink_heap_unittests_disabled
@@ -0,0 +1,2 @@
+# List of suppressions
+
diff --git a/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000..cefc64f
--- /dev/null
+++ b/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/build/android/pylib/gtest/filter/content_browsertests_disabled b/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000..748f680
--- /dev/null
+++ b/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,125 @@
+# List of suppressions
+# Timeouts
+Http/MediaTest.*
+File/MediaTest.*
+WorkerTest.*
+MediaTest.*
+MediaSourceTest.ConfigChangeVideo
+WebGLConformanceTest.*
+MessagePortTest.Tests
+CrossPlatformAccessibilityBrowserTest.*
+DatabaseTest.*
+ResourceDispatcherHostBrowserTest.SyncXMLHttpRequest_DuringUnload
+
+# Crashes
+ResourceDispatcherHostBrowserTest.CrossSiteAfterCrash
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+RenderFrameHostManagerTest.ProcessExitWithSwappedOutViews
+
+# Failures
+RenderViewHostTest.BaseURLParam
+OffTheRecordClipboardTest.ClearContentData
+GpuPixelBrowserTest.*
+FileSystemBrowserTestWithLowQuota.QuotaTest
+ChildProcessSecurityPolicyInProcessBrowserTest.NoLeak
+
+# Needs to start the test server before
+# BrowserTestBase::SetUpCommandLine is called, but on Android
+# the test server needs to be started in SetUpOnMainThread().
+SecurityExploitBrowserTest.SetWebUIProperty
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# These used to be test_shell_tests and were ported to content_browsertests in
+# https://codereview.chromium.org/14304004/. Investigate more.
+DomSerializerTests.*
+
+# http://crbug.com/297230
+DumpAccessibilityTreeTest.AccessibilityAriaLevel
+DumpAccessibilityTreeTest.AccessibilityAriaProgressbar
+DumpAccessibilityTreeTest.AccessibilityListMarkers
+DumpAccessibilityTreeTest.AccessibilityUl
+DumpAccessibilityTreeTest.AccessibilityCanvas
+RendererAccessibilityTest.DetachAccessibilityObject
+RendererAccessibilityTest.EditableTextModeFocusEvents
+DumpAccessibilityTreeTest.AccessibilityDialog
+DumpAccessibilityTreeTest.AccessibilityModalDialogClosed
+DumpAccessibilityTreeTest.AccessibilityModalDialogInIframeOpened
+RendererAccessibilityTest.EventOnObjectNotInTree
+
+# http://crbug.com/187500
+RenderViewImplTest.*
+RendererAccessibilityTest.EditableTextModeFocusNotifications
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+RenderWidgetTest.OnMsgPaintAtSize
+PasswordFormConversionUtilsTest.ValidWebFormElementToPasswordForm
+PasswordFormConversionUtilsTest.InvalidWebFormElementToPasswordForm
+
+# http://crbug.com/215894
+DownloadContentTest.CancelInterruptedDownload
+DownloadContentTest.CancelResumingDownload
+DownloadContentTest.RemoveDownload
+DownloadContentTest.RemoveResumingDownload
+DownloadContentTest.ResumeInterruptedDownload
+DownloadContentTest.ResumeInterruptedDownloadNoRange
+DownloadContentTest.ResumeInterruptedDownloadNoVerifiers
+DownloadContentTest.ResumeInterruptedDownloadBadPrecondition
+DownloadContentTest.ResumeWithDeletedFile
+
+# http://crbug.com/224134
+RenderWidgetHostBrowserTest.GetSnapshotFromRendererTest
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/386222
+IndexedDBBrowserTest.DoesntHangTest
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/342525
+IndexedDBBrowserTestSingleProcess.RenderThreadShutdownTest
+
+BookmarkletTest.NonEmptyResult
+
+# http://crbug.com/256238
+SignalTest.*
+
+# http://crbug.com/311344
+BrowserGpuChannelHostFactoryTest.AlreadyEstablished
+BrowserGpuChannelHostFactoryTest.Basic
+BrowserGpuChannelHostFactoryTest.CrashAndRecover
+BrowserGpuChannelHostFactoryTest.EstablishAndTerminate
+
+# http://crbug.com/338011
+TouchInputBrowserTest.MultiPointTouchPress
+TouchInputBrowserTest.TouchHandlerConsume
+TouchInputBrowserTest.TouchHandlerNoConsume
+TouchInputBrowserTest.TouchNoHandler
+TouchActionBrowserTest.*
+
+# http://crbug.com/338408
+TracingControllerTest.EnableCaptureAndDisableMonitoring
+
+# http://crbug.com/338023
+SecurityExploitBrowserTest.AttemptDuplicateRenderViewHost
+SecurityExploitBrowserTest.AttemptDuplicateRenderWidgetHost
+
+# http://crbug.com/338411
+FrameTreeBrowserTest.NavigateWithLeftoverFrames
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/341995
+TracingControllerTest.EnableCaptureAndDisableMonitoringWithFilePath
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
diff --git a/build/android/pylib/gtest/filter/content_unittests_disabled b/build/android/pylib/gtest/filter/content_unittests_disabled
new file mode 100644
index 0000000..4427849
--- /dev/null
+++ b/build/android/pylib/gtest/filter/content_unittests_disabled
@@ -0,0 +1,16 @@
+# List of suppressions
+
+# crbug.com/104950
+DeviceOrientationProviderTest.ObserverNotRemoved
+DeviceOrientationProviderTest.StartFailing
+# crbug.com/139095
+RenderWidgetTest.OnMsgPaintAtSize
+# crbug.com/147549
+GamepadProviderTest.PollingAccess
+PepperGamepadHostTest.WaitForReply
+# crbug.com/159234
+WebContentsVideoCaptureDeviceTest.*
+# crbug.com/167045
+ContentViewPopupZoomerTest.testPopupZoomerShowsUp
+# crbug.com/254034
+PageStateSerializationTest.BackwardsCompat_v11
diff --git a/build/android/pylib/gtest/filter/gfx_unittests_disabled b/build/android/pylib/gtest/filter/gfx_unittests_disabled
new file mode 100644
index 0000000..ade8b38
--- /dev/null
+++ b/build/android/pylib/gtest/filter/gfx_unittests_disabled
@@ -0,0 +1,27 @@
+CanvasTest.StringSizeEmptyString
+CanvasTest.StringWidth
+FontListTest.FontDescString_Derive
+FontListTest.FontDescString_FromFont
+FontListTest.FontDescString_FromFontNamesStyleAndSize
+FontListTest.FontDescString_FromFontVector
+FontListTest.FontDescString_FromFontWithNonNormalStyle
+FontListTest.Fonts_Derive
+FontListTest.Fonts_DeriveWithSizeDelta
+FontListTest.Fonts_DescStringWithStyleInFlexibleFormat_RoundTrip
+FontListTest.Fonts_FontVector_RoundTrip
+FontListTest.Fonts_FromDescString
+FontListTest.Fonts_FromDescStringInFlexibleFormat
+FontListTest.Fonts_FromDescStringWithStyleInFlexibleFormat
+FontListTest.Fonts_FromFont
+FontListTest.Fonts_FromFontVector
+FontListTest.Fonts_FromFontWithNonNormalStyle
+FontListTest.Fonts_GetHeight_GetBaseline
+FontListTest.Fonts_GetStyle
+FontTest.Ascent
+FontTest.AvgWidths
+FontTest.CapHeight
+FontTest.GetActualFontNameForTesting
+FontTest.Height
+FontTest.LoadArial
+FontTest.LoadArialBold
+TextUtilsTest.GetStringWidth
diff --git a/build/android/pylib/gtest/filter/ipc_tests_disabled b/build/android/pylib/gtest/filter/ipc_tests_disabled
new file mode 100644
index 0000000..e8d0691
--- /dev/null
+++ b/build/android/pylib/gtest/filter/ipc_tests_disabled
@@ -0,0 +1,18 @@
+# Times out
+IPCSyncChannelTest.ChattyServer
+
+# MultiProcessTest related failures. These tests fail if DCHECK is enabled.
+IPCChannelPosixTest.AdvancedConnected
+IPCChannelPosixTest.ResetState
+IPCChannelPosixTest.MultiConnection
+IPCFuzzingTest.SanityTest
+IPCFuzzingTest.MsgBadPayloadArgs
+IPCFuzzingTest.MsgBadPayloadShort
+IPCSendFdsTest.DescriptorTest
+IPCChannelProxyTest.MessageClassFilters
+IPCChannelProxyTest.GlobalAndMessageClassFilters
+IPCChannelProxyTest.FilterRemoval
+IPCChannelTest.ChannelTest
+IPCChannelTest.ChannelProxyTest
+IPCChannelTest.SendMessageInChannelConnected
+SyncSocketTest.SanityTest
diff --git a/build/android/pylib/gtest/filter/media_unittests_disabled b/build/android/pylib/gtest/filter/media_unittests_disabled
new file mode 100644
index 0000000..ed3b9aa
--- /dev/null
+++ b/build/android/pylib/gtest/filter/media_unittests_disabled
@@ -0,0 +1,8 @@
+# List of suppressions
+
+# Death tests are not supported on APK
+# http://crbug.com/138855
+CompositeFilterDeathTest.*
+
+# http://crbug.com/138833
+AesDecryptorTest.*
diff --git a/build/android/pylib/gtest/filter/net_unittests_disabled b/build/android/pylib/gtest/filter/net_unittests_disabled
new file mode 100644
index 0000000..28e1db1
--- /dev/null
+++ b/build/android/pylib/gtest/filter/net_unittests_disabled
@@ -0,0 +1,41 @@
+# List of suppressions.
+
+# Bug: 171812
+MultiThreadedCertVerifierTest.CancelRequest
+
+# Bug: 380340
+SSLServerSocketTest.Handshake
+
+PythonUtils.PythonRunTime
+URLRequestTestHTTP.HTTPSToHTTPRedirectNoRefererTest
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/0
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/1
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/2
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/0
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/1
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/2
+# Fail only on bots.
+HttpCache.RangeGET_Cancel
+HttpCache.RangeGET_Cancel2
+HttpCache.RangeGET_OK
+HttpCache.RangeGET_Previous200
+HttpCache.RangeGET_Revalidate2
+HttpCache.RangeGET_SyncOK
+HttpCache.TypicalGET_ConditionalRequest
+# Death tests are not supported with apks.
+*DeathTest*
+# These are death tests and thus also disabled.
+PrioritizedDispatcherTest.CancelNull
+PrioritizedDispatcherTest.CancelMissing
diff --git a/build/android/pylib/gtest/filter/sync_unit_tests_disabled b/build/android/pylib/gtest/filter/sync_unit_tests_disabled
new file mode 100644
index 0000000..cc4b72d
--- /dev/null
+++ b/build/android/pylib/gtest/filter/sync_unit_tests_disabled
@@ -0,0 +1,4 @@
+SyncHttpBridgeTest.*
+
+# crbug.com/144422
+OnDiskSyncableDirectory.FailInitialWrite
diff --git a/build/android/pylib/gtest/filter/unit_tests_disabled b/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000..6e7be1a
--- /dev/null
+++ b/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,125 @@
+# List of suppressions
+
+# crbug.com/139429
+BrowserMainTest.WarmConnectionFieldTrial_Invalid
+BrowserMainTest.WarmConnectionFieldTrial_Random
+BrowserMainTest.WarmConnectionFieldTrial_WarmestSocket
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+WebsiteSettingsTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# Extension support is limited on Android.
+# Some of these can be enabled if we register extension related prefs in
+# browser_prefs.cc
+ExtensionTest.*
+ExtensionAPI.*
+ExtensionFileUtilTest.*
+ExtensionPermissionsTest.*
+ExtensionUnpackerTest.*
+ActiveTabTest.*
+ExtensionAppsPromo.*
+ComponentLoaderTest.*
+ExtensionFromUserScript.*
+ExtensionFromWebApp.*
+ExtensionIconManagerTest.*
+ExtensionServiceTest.*
+ExtensionServiceTestSimple.*
+ExtensionSourcePriorityTest.*
+ExtensionSpecialStoragePolicyTest.*
+ExternalPolicyProviderTest.*
+ExternalProviderImplTest.*
+MenuManagerTest.*
+PageActionControllerTest.*
+PermissionsUpdaterTest.*
+ImageLoaderTest.*
+ImageLoadingTrackerTest.*
+ScriptBadgeControllerTest.*
+ExtensionSettingsFrontendTest.*
+ExtensionSettingsSyncTest.*
+ExtensionUpdaterTest.*
+UserScriptListenerTest.*
+WebApplicationTest.GetShortcutInfoForTab
+ExtensionActionIconFactoryTest.*
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/138275
+PrerenderTest.*
+RenderWidgetTest.OnMsgPaintAtSize
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/144227
+ExtensionIconImageTest.*
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/build/android/pylib/gtest/filter/webkit_unit_tests_disabled b/build/android/pylib/gtest/filter/webkit_unit_tests_disabled
new file mode 100644
index 0000000..50292aa
--- /dev/null
+++ b/build/android/pylib/gtest/filter/webkit_unit_tests_disabled
@@ -0,0 +1,28 @@
+# List of suppressions
+
+# crbug.com/159935
+WebCompositorInputHandlerImplTest.gestureFlingAnimates
+WebCompositorInputHandlerImplTest.gestureFlingTransferResets
+WebPageSerializerTest.HTMLNodes
+
+# crbug.com/241730
+ScrollAnimatorNoneTest.CurveMathQuartic
+ScrollAnimatorNoneTest.ScrollDownToBumper
+ScrollAnimatorNoneTest.ScrollQuadraticSmoothed
+ScrollAnimatorNoneTest.ScrollTwiceCubic
+ScrollAnimatorNoneTest.VaryingInputsEquivalencyCoastSteep
+WebViewTest.VisitedLinkCrash
+
+# Disabled until blink roll r151682
+DeferredImageDecoderTest.drawScaledIntoSkPicture
+
+# Disabled until blink roll r173540
+DeferredImageDecoderTest.decodeOnOtherThread
+DeferredImageDecoderTest.drawIntoSkPicture
+DeferredImageDecoderTest.drawIntoSkPictureProgressive
+
+# crbug.com/320005
+CoreAnimationCompositorAnimationsTest.ConvertTimingForCompositorIterationCount
+
+# crbug.com/412145
+TouchActionTest.Pan
diff --git a/build/android/pylib/gtest/gtest_config.py b/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000..3b51a42
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+    'content_gl_tests',
+    'heap_profiler_unittests',
+    'devtools_bridge_tests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+    'android_webview_unittests',
+    'base_unittests',
+    'breakpad_unittests',
+    'cc_unittests',
+    'components_unittests',
+    'content_browsertests',
+    'content_unittests',
+    'events_unittests',
+    'gl_tests',
+    'gpu_unittests',
+    'ipc_tests',
+    'media_unittests',
+    'net_unittests',
+    'sandbox_linux_unittests',
+    'sql_unittests',
+    'sync_unit_tests',
+    'ui_base_unittests',
+    'ui_unittests',
+    'unit_tests',
+    'webkit_unit_tests',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+    'breakpad_unittests',
+    'sandbox_linux_unittests'
+]
diff --git a/build/android/pylib/gtest/setup.py b/build/android/pylib/gtest/setup.py
new file mode 100644
index 0000000..1e52d3b
--- /dev/null
+++ b/build/android/pylib/gtest/setup.py
@@ -0,0 +1,342 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for GTests."""
+# pylint: disable=W0212
+
+import fnmatch
+import glob
+import logging
+import os
+import shutil
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+from pylib.base import base_test_result
+from pylib.base import test_dispatcher
+from pylib.gtest import test_package_apk
+from pylib.gtest import test_package_exe
+from pylib.gtest import test_runner
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import unittest_util # pylint: disable=F0401
+
+
+_ISOLATE_FILE_PATHS = {
+    'base_unittests': 'base/base_unittests.isolate',
+    'blink_heap_unittests':
+      'third_party/WebKit/Source/platform/heap/BlinkHeapUnitTests.isolate',
+    'breakpad_unittests': 'breakpad/breakpad_unittests.isolate',
+    'cc_perftests': 'cc/cc_perftests.isolate',
+    'components_unittests': 'components/components_unittests.isolate',
+    'content_browsertests': 'content/content_browsertests.isolate',
+    'content_unittests': 'content/content_unittests.isolate',
+    'media_perftests': 'media/media_perftests.isolate',
+    'media_unittests': 'media/media_unittests.isolate',
+    'net_unittests': 'net/net_unittests.isolate',
+    'sql_unittests': 'sql/sql_unittests.isolate',
+    'ui_base_unittests': 'ui/base/ui_base_tests.isolate',
+    'ui_unittests': 'ui/base/ui_base_tests.isolate',
+    'unit_tests': 'chrome/unit_tests.isolate',
+    'webkit_unit_tests':
+      'third_party/WebKit/Source/web/WebKitUnitTests.isolate',
+}
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+_ISOLATE_SCRIPT = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client', 'isolate.py')
+
+
+def _GenerateDepsDirUsingIsolate(suite_name, isolate_file_path=None):
+  """Generate the dependency dir for the test suite using isolate.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+    isolate_file_path: .isolate file path to use. If there is a default .isolate
+                       file path for the suite_name, this will override it.
+  """
+  if os.path.isdir(constants.ISOLATE_DEPS_DIR):
+    shutil.rmtree(constants.ISOLATE_DEPS_DIR)
+
+  if isolate_file_path:
+    if os.path.isabs(isolate_file_path):
+      isolate_abs_path = isolate_file_path
+    else:
+      isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      isolate_file_path)
+  else:
+    isolate_rel_path = _ISOLATE_FILE_PATHS.get(suite_name)
+    if not isolate_rel_path:
+      logging.info('Did not find an isolate file for the test suite.')
+      return
+    isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT, isolate_rel_path)
+
+  isolated_abs_path = os.path.join(
+      constants.GetOutDirectory(), '%s.isolated' % suite_name)
+  assert os.path.exists(isolate_abs_path), 'Cannot find %s' % isolate_abs_path
+  # This needs to be kept in sync with the cmd line options for isolate.py
+  # in src/build/isolate.gypi.
+  isolate_cmd = [
+      'python', _ISOLATE_SCRIPT,
+      'remap',
+      '--isolate', isolate_abs_path,
+      '--isolated', isolated_abs_path,
+      '--outdir', constants.ISOLATE_DEPS_DIR,
+
+      '--path-variable', 'DEPTH', constants.DIR_SOURCE_ROOT,
+      '--path-variable', 'PRODUCT_DIR', constants.GetOutDirectory(),
+
+      '--config-variable', 'OS', 'android',
+      '--config-variable', 'CONFIGURATION_NAME', constants.GetBuildType(),
+      '--config-variable', 'asan', '0',
+      '--config-variable', 'chromeos', '0',
+      '--config-variable', 'component', 'static_library',
+      '--config-variable', 'fastbuild', '0',
+      '--config-variable', 'icu_use_data_file_flag', '1',
+      '--config-variable', 'lsan', '0',
+      # TODO(maruel): This may not be always true.
+      '--config-variable', 'target_arch', 'arm',
+      '--config-variable', 'use_openssl', '0',
+      '--config-variable', 'use_ozone', '0',
+  ]
+  assert not cmd_helper.RunCmd(isolate_cmd)
+
+  # We're relying on the fact that timestamps are preserved
+  # by the remap command (hardlinked). Otherwise, all the data
+  # will be pushed to the device once we move to using time diff
+  # instead of md5sum. Perform a sanity check here.
+  for root, _, filenames in os.walk(constants.ISOLATE_DEPS_DIR):
+    if filenames:
+      linked_file = os.path.join(root, filenames[0])
+      orig_file = os.path.join(
+          constants.DIR_SOURCE_ROOT,
+          os.path.relpath(linked_file, constants.ISOLATE_DEPS_DIR))
+      if os.stat(linked_file).st_ino == os.stat(orig_file).st_ino:
+        break
+      else:
+        raise Exception('isolate remap command did not use hardlinks.')
+
+  # Delete excluded files as defined by _DEPS_EXCLUSION_LIST.
+  old_cwd = os.getcwd()
+  try:
+    os.chdir(constants.ISOLATE_DEPS_DIR)
+    excluded_paths = [x for y in _DEPS_EXCLUSION_LIST for x in glob.glob(y)]
+    if excluded_paths:
+      logging.info('Excluding the following from dependency list: %s',
+                   excluded_paths)
+    for p in excluded_paths:
+      if os.path.isdir(p):
+        shutil.rmtree(p)
+      else:
+        os.remove(p)
+  finally:
+    os.chdir(old_cwd)
+
+  # On Android, all pak files need to be in the top-level 'paks' directory.
+  paks_dir = os.path.join(constants.ISOLATE_DEPS_DIR, 'paks')
+  os.mkdir(paks_dir)
+
+  deps_out_dir = os.path.join(
+      constants.ISOLATE_DEPS_DIR,
+      os.path.relpath(os.path.join(constants.GetOutDirectory(), os.pardir),
+                      constants.DIR_SOURCE_ROOT))
+  for root, _, filenames in os.walk(deps_out_dir):
+    for filename in fnmatch.filter(filenames, '*.pak'):
+      shutil.move(os.path.join(root, filename), paks_dir)
+
+  # Move everything in PRODUCT_DIR to top level.
+  deps_product_dir = os.path.join(deps_out_dir, constants.GetBuildType())
+  if os.path.isdir(deps_product_dir):
+    for p in os.listdir(deps_product_dir):
+      shutil.move(os.path.join(deps_product_dir, p), constants.ISOLATE_DEPS_DIR)
+    os.rmdir(deps_product_dir)
+    os.rmdir(deps_out_dir)
+
+
+def _GetDisabledTestsFilterFromFile(suite_name):
+  """Returns a gtest filter based on the *_disabled file.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+
+  Returns:
+    A gtest filter which excludes disabled tests.
+    Example: '*-StackTrace.*:StringPrintfTest.StringPrintfMisc'
+  """
+  filter_file_path = os.path.join(
+      os.path.abspath(os.path.dirname(__file__)),
+      'filter', '%s_disabled' % suite_name)
+
+  if not filter_file_path or not os.path.exists(filter_file_path):
+    logging.info('No filter file found at %s', filter_file_path)
+    return '*'
+
+  filters = [x for x in [x.strip() for x in file(filter_file_path).readlines()]
+             if x and x[0] != '#']
+  disabled_filter = '*-%s' % ':'.join(filters)
+  logging.info('Applying filter "%s" obtained from %s',
+               disabled_filter, filter_file_path)
+  return disabled_filter
+
+
+def _GetTests(test_options, test_package, devices):
+  """Get a list of tests.
+
+  Args:
+    test_options: A GTestOptions object.
+    test_package: A TestPackageApk object.
+    devices: A list of attached devices.
+
+  Returns:
+    A list of all the tests in the test suite.
+  """
+  def TestListerRunnerFactory(device, _shard_index):
+    class TestListerRunner(test_runner.TestRunner):
+      def RunTest(self, _test):
+        result = base_test_result.BaseTestResult(
+            'gtest_list_tests', base_test_result.ResultType.PASS)
+        self.test_package.Install(self.device)
+        result.test_list = self.test_package.GetAllTests(self.device)
+        results = base_test_result.TestRunResults()
+        results.AddResult(result)
+        return results, None
+    return TestListerRunner(test_options, device, test_package)
+
+  results, _no_retry = test_dispatcher.RunTests(
+      ['gtest_list_tests'], TestListerRunnerFactory, devices)
+  tests = []
+  for r in results.GetAll():
+    tests.extend(r.test_list)
+  return tests
+
+
+def _FilterTestsUsingPrefixes(all_tests, pre=False, manual=False):
+  """Removes tests with disabled prefixes.
+
+  Args:
+    all_tests: List of tests to filter.
+    pre: If True, include tests with PRE_ prefix.
+    manual: If True, include tests with MANUAL_ prefix.
+
+  Returns:
+    List of tests remaining.
+  """
+  filtered_tests = []
+  filter_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_']
+
+  if not pre:
+    filter_prefixes.append('PRE_')
+
+  if not manual:
+    filter_prefixes.append('MANUAL_')
+
+  for t in all_tests:
+    test_case, test = t.split('.', 1)
+    if not any([test_case.startswith(prefix) or test.startswith(prefix) for
+                prefix in filter_prefixes]):
+      filtered_tests.append(t)
+  return filtered_tests
+
+
+def _FilterDisabledTests(tests, suite_name, has_gtest_filter):
+  """Removes disabled tests from |tests|.
+
+  Applies the following filters in order:
+    1. Remove tests with disabled prefixes.
+    2. Remove tests specified in the *_disabled files in the 'filter' dir
+
+  Args:
+    tests: List of tests.
+    suite_name: Name of the test suite (e.g. base_unittests).
+    has_gtest_filter: Whether a gtest_filter is provided.
+
+  Returns:
+    List of tests remaining.
+  """
+  tests = _FilterTestsUsingPrefixes(
+      tests, has_gtest_filter, has_gtest_filter)
+  tests = unittest_util.FilterTestNames(
+      tests, _GetDisabledTestsFilterFromFile(suite_name))
+
+  return tests
+
+
+def Setup(test_options, devices):
+  """Create the test runner factory and tests.
+
+  Args:
+    test_options: A GTestOptions object.
+    devices: A list of attached devices.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_package = test_package_apk.TestPackageApk(test_options.suite_name)
+  if not os.path.exists(test_package.suite_path):
+    exe_test_package = test_package_exe.TestPackageExecutable(
+        test_options.suite_name)
+    if not os.path.exists(exe_test_package.suite_path):
+      raise Exception(
+          'Did not find %s target. Ensure it has been built.\n'
+          '(not found at %s or %s)'
+          % (test_options.suite_name,
+             test_package.suite_path,
+             exe_test_package.suite_path))
+    test_package = exe_test_package
+  logging.warning('Found target %s', test_package.suite_path)
+
+  _GenerateDepsDirUsingIsolate(test_options.suite_name,
+                               test_options.isolate_file_path)
+
+  tests = _GetTests(test_options, test_package, devices)
+
+  # Constructs a new TestRunner with the current options.
+  def TestRunnerFactory(device, _shard_index):
+    return test_runner.TestRunner(
+        test_options,
+        device,
+        test_package)
+
+  if test_options.run_disabled:
+    test_options = test_options._replace(
+        test_arguments=('%s --gtest_also_run_disabled_tests' %
+                        test_options.test_arguments))
+  else:
+    tests = _FilterDisabledTests(tests, test_options.suite_name,
+                                 bool(test_options.gtest_filter))
+  if test_options.gtest_filter:
+    tests = unittest_util.FilterTestNames(tests, test_options.gtest_filter)
+
+  # Coalesce unit tests into a single test per device
+  if test_options.suite_name != 'content_browsertests':
+    num_devices = len(devices)
+    tests = [':'.join(tests[i::num_devices]) for i in xrange(num_devices)]
+    tests = [t for t in tests if t]
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/gtest/test_options.py b/build/android/pylib/gtest/test_options.py
new file mode 100644
index 0000000..6f7df0d
--- /dev/null
+++ b/build/android/pylib/gtest/test_options.py
@@ -0,0 +1,18 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the GTestOptions named tuple."""
+
+import collections
+
+GTestOptions = collections.namedtuple('GTestOptions', [
+    'tool',
+    'cleanup_test_files',
+    'push_deps',
+    'gtest_filter',
+    'run_disabled',
+    'test_arguments',
+    'timeout',
+    'isolate_file_path',
+    'suite_name'])
diff --git a/build/android/pylib/gtest/test_package.py b/build/android/pylib/gtest/test_package.py
new file mode 100644
index 0000000..a0a6b64
--- /dev/null
+++ b/build/android/pylib/gtest/test_package.py
@@ -0,0 +1,98 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class representing GTest test packages."""
+# pylint: disable=R0201
+
+
+class TestPackage(object):
+
+  """A helper base class for both APK and stand-alone executables.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+  """
+  def __init__(self, suite_name):
+    self.suite_name = suite_name
+
+  def ClearApplicationState(self, device):
+    """Clears the application state.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overriden.')
+
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    """Creates a test runner script and pushes to the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+      test_filter: A test_filter flag.
+      test_arguments: Additional arguments to pass to the test binary.
+    """
+    raise NotImplementedError('Method must be overriden.')
+
+  def GetAllTests(self, device):
+    """Returns a list of all tests available in the test suite.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overriden.')
+
+  def GetGTestReturnCode(self, _device):
+    return None
+
+  def SpawnTestProcess(self, device):
+    """Spawn the test process.
+
+    Args:
+      device: Instance of DeviceUtils.
+
+    Returns:
+      An instance of pexpect spawn class.
+    """
+    raise NotImplementedError('Method must be overriden.')
+
+  def Install(self, device):
+    """Install the test package to the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overriden.')
+
+  @staticmethod
+  def _ParseGTestListTests(raw_list):
+    """Parses a raw test list as provided by --gtest_list_tests.
+
+    Args:
+      raw_list: The raw test listing with the following format:
+
+      IPCChannelTest.
+        SendMessageInChannelConnected
+      IPCSyncChannelTest.
+        Simple
+        DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+    Returns:
+      A list of all tests. For the above raw listing:
+
+      [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+       IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+    """
+    ret = []
+    current = ''
+    for test in raw_list:
+      if not test:
+        continue
+      if test[0] != ' ':
+        test_case = test.split()[0]
+        if test_case.endswith('.'):
+          current = test_case
+      elif not 'YOU HAVE' in test:
+        test_name = test.split()[0]
+        ret += [current + test_name]
+    return ret
diff --git a/build/android/pylib/gtest/test_package_apk.py b/build/android/pylib/gtest/test_package_apk.py
new file mode 100644
index 0000000..429cd2b
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_apk.py
@@ -0,0 +1,135 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines TestPackageApk to help run APK-based native tests."""
+# pylint: disable=W0212
+
+import logging
+import os
+import shlex
+import sys
+import tempfile
+import time
+
+from pylib import android_commands
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.device import intent
+from pylib.gtest.test_package import TestPackage
+
+
+class TestPackageApk(TestPackage):
+  """A helper class for running APK-based native tests."""
+
+  def __init__(self, suite_name):
+    """
+    Args:
+      suite_name: Name of the test suite (e.g. base_unittests).
+    """
+    TestPackage.__init__(self, suite_name)
+    if suite_name == 'content_browsertests':
+      self.suite_path = os.path.join(
+          constants.GetOutDirectory(), 'apks', '%s.apk' % suite_name)
+      self._package_info = constants.PACKAGE_INFO['content_browsertests']
+    else:
+      self.suite_path = os.path.join(
+          constants.GetOutDirectory(), '%s_apk' % suite_name,
+          '%s-debug.apk' % suite_name)
+      self._package_info = constants.PACKAGE_INFO['gtest']
+
+  def _CreateCommandLineFileOnDevice(self, device, options):
+    command_line_file = tempfile.NamedTemporaryFile()
+    # GTest expects argv[0] to be the executable path.
+    command_line_file.write(self.suite_name + ' ' + options)
+    command_line_file.flush()
+    device.PushChangedFiles(
+        command_line_file.name,
+        self._package_info.cmdline_file)
+
+  def _GetFifo(self):
+    # The test.fifo path is determined by:
+    # testing/android/java/src/org/chromium/native_test/
+    #     ChromeNativeTestActivity.java and
+    # testing/android/native_test_launcher.cc
+    return '/data/data/' + self._package_info.package + '/files/test.fifo'
+
+  def _ClearFifo(self, device):
+    device.RunShellCommand('rm -f ' + self._GetFifo())
+
+  def _WatchFifo(self, device, timeout, logfile=None):
+    for i in range(10):
+      if device.FileExists(self._GetFifo()):
+        logging.info('Fifo created.')
+        break
+      time.sleep(i)
+    else:
+      raise device_errors.DeviceUnreachableError(
+          'Unable to find fifo on device %s ' % self._GetFifo())
+    args = shlex.split(device.old_interface.Adb()._target_arg)
+    args += ['shell', 'cat', self._GetFifo()]
+    return pexpect.spawn('adb', args, timeout=timeout, logfile=logfile)
+
+  def _StartActivity(self, device):
+    device.StartActivity(
+        intent.Intent(package=self._package_info.package,
+                      activity=self._package_info.activity,
+                      action='android.intent.action.MAIN'),
+        # No wait since the runner waits for FIFO creation anyway.
+        blocking=False,
+        force_stop=True)
+
+  #override
+  def ClearApplicationState(self, device):
+    device.ClearApplicationState(self._package_info.package)
+    # Content shell creates a profile on the sdscard which accumulates cache
+    # files over time.
+    if self.suite_name == 'content_browsertests':
+      try:
+        device.RunShellCommand(
+            'rm -r %s/content_shell' % device.GetExternalStoragePath(),
+            timeout=60 * 2)
+      except device_errors.CommandFailedError:
+        # TODO(jbudorick) Handle this exception appropriately once the
+        #                 conversions are done.
+        pass
+
+  #override
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    self._CreateCommandLineFileOnDevice(
+        device, '--gtest_filter=%s %s' % (test_filter, test_arguments))
+
+  #override
+  def GetAllTests(self, device):
+    self._CreateCommandLineFileOnDevice(device, '--gtest_list_tests')
+    try:
+      self.tool.SetupEnvironment()
+      # Clear and start monitoring logcat.
+      self._ClearFifo(device)
+      self._StartActivity(device)
+      # Wait for native test to complete.
+      p = self._WatchFifo(device, timeout=30 * self.tool.GetTimeoutScale())
+      p.expect('<<ScopedMainEntryLogger')
+      p.close()
+    finally:
+      self.tool.CleanUpEnvironment()
+    # We need to strip the trailing newline.
+    content = [line.rstrip() for line in p.before.splitlines()]
+    return self._ParseGTestListTests(content)
+
+  #override
+  def SpawnTestProcess(self, device):
+    try:
+      self.tool.SetupEnvironment()
+      self._ClearFifo(device)
+      self._StartActivity(device)
+    finally:
+      self.tool.CleanUpEnvironment()
+    logfile = android_commands.NewLineNormalizer(sys.stdout)
+    return self._WatchFifo(device, timeout=10, logfile=logfile)
+
+  #override
+  def Install(self, device):
+    self.tool.CopyFiles()
+    device.Install(self.suite_path)
diff --git a/build/android/pylib/gtest/test_package_exe.py b/build/android/pylib/gtest/test_package_exe.py
new file mode 100644
index 0000000..5f82aad
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_exe.py
@@ -0,0 +1,151 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines TestPackageExecutable to help run stand-alone executables."""
+
+import logging
+import os
+import sys
+import tempfile
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.gtest.test_package import TestPackage
+
+
+class TestPackageExecutable(TestPackage):
+  """A helper class for running stand-alone executables."""
+
+  _TEST_RUNNER_RET_VAL_FILE = 'gtest_retval'
+
+  def __init__(self, suite_name):
+    """
+    Args:
+      suite_name: Name of the test suite (e.g. base_unittests).
+    """
+    TestPackage.__init__(self, suite_name)
+    self.suite_path = os.path.join(constants.GetOutDirectory(), suite_name)
+    self._symbols_dir = os.path.join(constants.GetOutDirectory(),
+                                     'lib.target')
+
+  #override
+  def GetGTestReturnCode(self, device):
+    ret = None
+    ret_code = 1  # Assume failure if we can't find it
+    ret_code_file = tempfile.NamedTemporaryFile()
+    try:
+      if not device.PullFile(
+          constants.TEST_EXECUTABLE_DIR + '/' +
+          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE,
+          ret_code_file.name):
+        logging.critical('Unable to pull gtest ret val file %s',
+                         ret_code_file.name)
+        raise ValueError
+      ret_code = file(ret_code_file.name).read()
+      ret = int(ret_code)
+    except ValueError:
+      logging.critical('Error reading gtest ret val file %s [%s]',
+                       ret_code_file.name, ret_code)
+      ret = 1
+    return ret
+
+  @staticmethod
+  def _AddNativeCoverageExports(device):
+    # export GCOV_PREFIX set the path for native coverage results
+    # export GCOV_PREFIX_STRIP indicates how many initial directory
+    #                          names to strip off the hardwired absolute paths.
+    #                          This value is calculated in buildbot.sh and
+    #                          depends on where the tree is built.
+    # Ex: /usr/local/google/code/chrome will become
+    #     /code/chrome if GCOV_PREFIX_STRIP=3
+    try:
+      depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      export_string = ('export GCOV_PREFIX="%s/gcov"\n' %
+                       device.GetExternalStoragePath())
+      export_string += 'export GCOV_PREFIX_STRIP=%s\n' % depth
+      return export_string
+    except KeyError:
+      logging.info('NATIVE_COVERAGE_DEPTH_STRIP is not defined: '
+                   'No native coverage.')
+      return ''
+    except device_errors.CommandFailedError:
+      logging.info('No external storage found: No native coverage.')
+      return ''
+
+  #override
+  def ClearApplicationState(self, device):
+    try:
+      # We don't expect the executable to be running, so we don't attempt
+      # to retry on failure.
+      device.KillAll(self.suite_name, blocking=True, timeout=30, retries=0)
+    except device_errors.CommandFailedError:
+      # KillAll raises an exception if it can't find a process with the given
+      # name. We only care that there is no process with the given name, so
+      # we can safely eat the exception.
+      pass
+
+  #override
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    tool_wrapper = self.tool.GetTestWrapper()
+    sh_script_file = tempfile.NamedTemporaryFile()
+    # We need to capture the exit status from the script since adb shell won't
+    # propagate to us.
+    sh_script_file.write('cd %s\n'
+                         '%s'
+                         '%s %s/%s --gtest_filter=%s %s\n'
+                         'echo $? > %s' %
+                         (constants.TEST_EXECUTABLE_DIR,
+                          self._AddNativeCoverageExports(device),
+                          tool_wrapper, constants.TEST_EXECUTABLE_DIR,
+                          self.suite_name,
+                          test_filter, test_arguments,
+                          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE))
+    sh_script_file.flush()
+    cmd_helper.RunCmd(['chmod', '+x', sh_script_file.name])
+    device.PushChangedFiles(
+        sh_script_file.name,
+        constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh')
+    logging.info('Conents of the test runner script: ')
+    for line in open(sh_script_file.name).readlines():
+      logging.info('  ' + line.rstrip())
+
+  #override
+  def GetAllTests(self, device):
+    all_tests = device.RunShellCommand(
+        '%s %s/%s --gtest_list_tests' %
+        (self.tool.GetTestWrapper(),
+         constants.TEST_EXECUTABLE_DIR,
+         self.suite_name))
+    return self._ParseGTestListTests(all_tests)
+
+  #override
+  def SpawnTestProcess(self, device):
+    args = ['adb', '-s', str(device), 'shell', 'sh',
+            constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh']
+    logging.info(args)
+    return pexpect.spawn(args[0], args[1:], logfile=sys.stdout)
+
+  #override
+  def Install(self, device):
+    if self.tool.NeedsDebugInfo():
+      target_name = self.suite_path
+    else:
+      target_name = self.suite_path + '_stripped'
+      if not os.path.isfile(target_name):
+        raise Exception('Did not find %s, build target %s' %
+                        (target_name, self.suite_name + '_stripped'))
+
+      target_mtime = os.stat(target_name).st_mtime
+      source_mtime = os.stat(self.suite_path).st_mtime
+      if target_mtime < source_mtime:
+        raise Exception(
+            'stripped binary (%s, timestamp %d) older than '
+            'source binary (%s, timestamp %d), build target %s' %
+            (target_name, target_mtime, self.suite_path, source_mtime,
+             self.suite_name + '_stripped'))
+
+    test_binary = constants.TEST_EXECUTABLE_DIR + '/' + self.suite_name
+    device.PushChangedFiles(target_name, test_binary)
diff --git a/build/android/pylib/gtest/test_package_test.py b/build/android/pylib/gtest/test_package_test.py
new file mode 100755
index 0000000..c4d552e
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_test.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.gtest import test_package
+
+# pylint: disable=W0212
+
+
+class TestPackageTest(unittest.TestCase):
+
+  def testParseGTestListTests_simple(self):
+    raw_output = [
+      'TestCaseOne.',
+      '  testOne',
+      '  testTwo',
+      'TestCaseTwo.',
+      '  testThree',
+      '  testFour',
+    ]
+    actual = test_package.TestPackage._ParseGTestListTests(raw_output)
+    expected = [
+      'TestCaseOne.testOne',
+      'TestCaseOne.testTwo',
+      'TestCaseTwo.testThree',
+      'TestCaseTwo.testFour',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_old(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = test_package.TestPackage._ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_new(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.  # TypeParam = TypeParam0',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = test_package.TestPackage._ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_old(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0',
+      '  testWithValueParam/1',
+    ]
+    actual = test_package.TestPackage._ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_new(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0  # GetParam() = 0',
+      '  testWithValueParam/1  # GetParam() = 1',
+    ]
+    actual = test_package.TestPackage._ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/gtest/test_runner.py b/build/android/pylib/gtest/test_runner.py
new file mode 100644
index 0000000..faffe8f
--- /dev/null
+++ b/build/android/pylib/gtest/test_runner.py
@@ -0,0 +1,198 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib import pexpect
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.perf import perf_control
+
+
+def _TestSuiteRequiresMockTestServer(suite_name):
+  """Returns True if the test suite requires mock test server."""
+  tests_require_net_test_server = ['unit_tests', 'net_unittests',
+                                   'content_unittests',
+                                   'content_browsertests']
+  return (suite_name in
+          tests_require_net_test_server)
+
+def _TestSuiteRequiresHighPerfMode(suite_name):
+  """Returns True if the test suite requires high performance mode."""
+  return 'perftests' in suite_name
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  def __init__(self, test_options, device, test_package):
+    """Single test suite attached to a single device.
+
+    Args:
+      test_options: A GTestOptions object.
+      device: Device to run the tests.
+      test_package: An instance of TestPackage class.
+    """
+
+    super(TestRunner, self).__init__(device, test_options.tool,
+                                     test_options.push_deps,
+                                     test_options.cleanup_test_files)
+
+    self.test_package = test_package
+    self.test_package.tool = self.tool
+    self._test_arguments = test_options.test_arguments
+
+    timeout = test_options.timeout
+    if timeout == 0:
+      timeout = 60
+    # On a VM (e.g. chromium buildbots), this timeout is way too small.
+    if os.environ.get('BUILDBOT_SLAVENAME'):
+      timeout = timeout * 2
+
+    self._timeout = timeout * self.tool.GetTimeoutScale()
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller = perf_control.PerfControl(self.device)
+
+  #override
+  def InstallTestPackage(self):
+    self.test_package.Install(self.device)
+
+  #override
+  def PushDataDeps(self):
+    self.device.WaitUntilFullyBooted(timeout=20)
+    self.tool.CopyFiles()
+    if os.path.exists(constants.ISOLATE_DEPS_DIR):
+      # TODO(frankf): linux_dumper_unittest_helper needs to be in the same dir
+      # as breakpad_unittests exe. Find a better way to do this.
+      if self.test_package.suite_name == 'breakpad_unittests':
+        device_dir = constants.TEST_EXECUTABLE_DIR
+      else:
+        device_dir = self.device.GetExternalStoragePath()
+      for p in os.listdir(constants.ISOLATE_DEPS_DIR):
+        self.device.PushChangedFiles(
+            os.path.join(constants.ISOLATE_DEPS_DIR, p),
+            os.path.join(device_dir, p))
+
+  def _ParseTestOutput(self, p):
+    """Process the test output.
+
+    Args:
+      p: An instance of pexpect spawn class.
+
+    Returns:
+      A TestRunResults object.
+    """
+    results = base_test_result.TestRunResults()
+
+    # Test case statuses.
+    re_run = re.compile('\[ RUN      \] ?(.*)\r\n')
+    re_fail = re.compile('\[  FAILED  \] ?(.*)\r\n')
+    re_ok = re.compile('\[       OK \] ?(.*?) .*\r\n')
+
+    # Test run statuses.
+    re_passed = re.compile('\[  PASSED  \] ?(.*)\r\n')
+    re_runner_fail = re.compile('\[ RUNNER_FAILED \] ?(.*)\r\n')
+    # Signal handlers are installed before starting tests
+    # to output the CRASHED marker when a crash happens.
+    re_crash = re.compile('\[ CRASHED      \](.*)\r\n')
+
+    log = ''
+    try:
+      while True:
+        full_test_name = None
+        found = p.expect([re_run, re_passed, re_runner_fail],
+                         timeout=self._timeout)
+        if found == 1:  # re_passed
+          break
+        elif found == 2:  # re_runner_fail
+          break
+        else:  # re_run
+          full_test_name = p.match.group(1).replace('\r', '')
+          found = p.expect([re_ok, re_fail, re_crash], timeout=self._timeout)
+          log = p.before.replace('\r', '')
+          if found == 0:  # re_ok
+            if full_test_name == p.match.group(1).replace('\r', ''):
+              results.AddResult(base_test_result.BaseTestResult(
+                  full_test_name, base_test_result.ResultType.PASS,
+                  log=log))
+          elif found == 2:  # re_crash
+            results.AddResult(base_test_result.BaseTestResult(
+                full_test_name, base_test_result.ResultType.CRASH,
+                log=log))
+            break
+          else:  # re_fail
+            results.AddResult(base_test_result.BaseTestResult(
+                full_test_name, base_test_result.ResultType.FAIL, log=log))
+    except pexpect.EOF:
+      logging.error('Test terminated - EOF')
+      # We're here because either the device went offline, or the test harness
+      # crashed without outputting the CRASHED marker (crbug.com/175538).
+      if not self.device.IsOnline():
+        raise device_errors.DeviceUnreachableError(
+            'Device %s went offline.' % str(self.device))
+      if full_test_name:
+        results.AddResult(base_test_result.BaseTestResult(
+            full_test_name, base_test_result.ResultType.CRASH,
+            log=p.before.replace('\r', '')))
+    except pexpect.TIMEOUT:
+      logging.error('Test terminated after %d second timeout.',
+                    self._timeout)
+      if full_test_name:
+        results.AddResult(base_test_result.BaseTestResult(
+            full_test_name, base_test_result.ResultType.TIMEOUT,
+            log=p.before.replace('\r', '')))
+    finally:
+      p.close()
+
+    ret_code = self.test_package.GetGTestReturnCode(self.device)
+    if ret_code:
+      logging.critical(
+          'gtest exit code: %d\npexpect.before: %s\npexpect.after: %s',
+          ret_code, p.before, p.after)
+
+    return results
+
+  #override
+  def RunTest(self, test):
+    test_results = base_test_result.TestRunResults()
+    if not test:
+      return test_results, None
+
+    try:
+      self.test_package.ClearApplicationState(self.device)
+      self.test_package.CreateCommandLineFileOnDevice(
+          self.device, test, self._test_arguments)
+      test_results = self._ParseTestOutput(
+          self.test_package.SpawnTestProcess(self.device))
+    finally:
+      self.CleanupSpawningServerState()
+    # Calculate unknown test results.
+    all_tests = set(test.split(':'))
+    all_tests_ran = set([t.GetName() for t in test_results.GetAll()])
+    unknown_tests = all_tests - all_tests_ran
+    test_results.AddResults(
+        [base_test_result.BaseTestResult(t, base_test_result.ResultType.UNKNOWN)
+         for t in unknown_tests])
+    retry = ':'.join([t.GetName() for t in test_results.GetNotPass()])
+    return test_results, retry
+
+  #override
+  def SetUp(self):
+    """Sets up necessary test enviroment for the test suite."""
+    super(TestRunner, self).SetUp()
+    if _TestSuiteRequiresMockTestServer(self.test_package.suite_name):
+      self.LaunchChromeTestServerSpawner()
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller.SetHighPerfMode()
+    self.tool.SetupEnvironment()
+
+  #override
+  def TearDown(self):
+    """Cleans up the test enviroment for the test suite."""
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller.SetDefaultPerfMode()
+    self.test_package.ClearApplicationState(self.device)
+    self.tool.CleanUpEnvironment()
+    super(TestRunner, self).TearDown()
diff --git a/build/android/pylib/host_driven/__init__.py b/build/android/pylib/host_driven/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/host_driven/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/host_driven/setup.py b/build/android/pylib/host_driven/setup.py
new file mode 100644
index 0000000..d48f908
--- /dev/null
+++ b/build/android/pylib/host_driven/setup.py
@@ -0,0 +1,202 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Setup for instrumentation host-driven tests."""
+
+import logging
+import os
+import sys
+import types
+
+from pylib.host_driven import test_case
+from pylib.host_driven import test_info_collection
+from pylib.host_driven import test_runner
+
+
+def _GetPythonFiles(root, files):
+  """Returns all files from |files| that end in 'Test.py'.
+
+  Args:
+    root: A directory name with python files.
+    files: A list of file names.
+
+  Returns:
+    A list with all python files that match the testing naming scheme.
+  """
+  return [os.path.join(root, f) for f in files if f.endswith('Test.py')]
+
+
+def _InferImportNameFromFile(python_file):
+  """Given a file, infer the import name for that file.
+
+  Example: /usr/foo/bar/baz.py -> baz.
+
+  Args:
+    python_file: Path to the Python file, ostensibly to import later.
+
+  Returns:
+    The module name for the given file.
+  """
+  return os.path.splitext(os.path.basename(python_file))[0]
+
+
+def _GetTestModules(host_driven_test_root, is_official_build):
+  """Retrieve a list of python modules that match the testing naming scheme.
+
+  Walks the location of host-driven tests, imports them, and provides the list
+  of imported modules to the caller.
+
+  Args:
+    host_driven_test_root: The path to walk, looking for the
+        pythonDrivenTests or host_driven_tests directory
+    is_official_build: Whether to run only those tests marked 'official'
+
+  Returns:
+    A list of python modules under |host_driven_test_root| which match the
+    testing naming scheme. Each module should define one or more classes that
+    derive from HostDrivenTestCase.
+  """
+  # By default run all host-driven tests under pythonDrivenTests or
+  # host_driven_tests.
+  host_driven_test_file_list = []
+  for root, _, files in os.walk(host_driven_test_root):
+    if (root.endswith('host_driven_tests') or
+        root.endswith('pythonDrivenTests') or
+        (is_official_build and (root.endswith('pythonDrivenTests/official') or
+                                root.endswith('host_driven_tests/official')))):
+      host_driven_test_file_list += _GetPythonFiles(root, files)
+  host_driven_test_file_list.sort()
+
+  test_module_list = [_GetModuleFromFile(test_file)
+                      for test_file in host_driven_test_file_list]
+  return test_module_list
+
+
+def _GetModuleFromFile(python_file):
+  """Gets the python module associated with a file by importing it.
+
+  Args:
+    python_file: File to import.
+
+  Returns:
+    The module object.
+  """
+  sys.path.append(os.path.dirname(python_file))
+  import_name = _InferImportNameFromFile(python_file)
+  return __import__(import_name)
+
+
+def _GetTestsFromClass(test_case_class, **kwargs):
+  """Returns one test object for each test method in |test_case_class|.
+
+  Test methods are methods on the class which begin with 'test'.
+
+  Args:
+    test_case_class: Class derived from HostDrivenTestCase which contains zero
+        or more test methods.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    A list of test case objects, each initialized for a particular test method.
+  """
+  test_names = [m for m in dir(test_case_class)
+                if _IsTestMethod(m, test_case_class)]
+  return [test_case_class(name, **kwargs) for name in test_names]
+
+
+def _GetTestsFromModule(test_module, **kwargs):
+  """Gets a list of test objects from |test_module|.
+
+  Args:
+    test_module: Module from which to get the set of test methods.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    A list of test case objects each initialized for a particular test method
+    defined in |test_module|.
+  """
+
+  tests = []
+  for name in dir(test_module):
+    attr = getattr(test_module, name)
+    if _IsTestCaseClass(attr):
+      tests.extend(_GetTestsFromClass(attr, **kwargs))
+  return tests
+
+
+def _IsTestCaseClass(test_class):
+  return (type(test_class) is types.TypeType and
+          issubclass(test_class, test_case.HostDrivenTestCase) and
+          test_class is not test_case.HostDrivenTestCase)
+
+
+def _IsTestMethod(attrname, test_case_class):
+  """Checks whether this is a valid test method.
+
+  Args:
+    attrname: The method name.
+    test_case_class: The test case class.
+
+  Returns:
+    True if test_case_class.'attrname' is callable and it starts with 'test';
+    False otherwise.
+  """
+  attr = getattr(test_case_class, attrname)
+  return callable(attr) and attrname.startswith('test')
+
+
+def _GetAllTests(test_root, is_official_build, **kwargs):
+  """Retrieve a list of host-driven tests defined under |test_root|.
+
+  Args:
+    test_root: Path which contains host-driven test files.
+    is_official_build: Whether this is an official build.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    List of test case objects, one for each available test method.
+  """
+  if not test_root:
+    return []
+  all_tests = []
+  test_module_list = _GetTestModules(test_root, is_official_build)
+  for module in test_module_list:
+    all_tests.extend(_GetTestsFromModule(module, **kwargs))
+  return all_tests
+
+
+def InstrumentationSetup(host_driven_test_root, official_build,
+                         instrumentation_options):
+  """Creates a list of host-driven instrumentation tests and a runner factory.
+
+  Args:
+    host_driven_test_root: Directory where the host-driven tests are.
+    official_build: True if this is an official build.
+    instrumentation_options: An InstrumentationOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+
+  test_collection = test_info_collection.TestInfoCollection()
+  all_tests = _GetAllTests(
+      host_driven_test_root, official_build,
+      instrumentation_options=instrumentation_options)
+  test_collection.AddTests(all_tests)
+
+  available_tests = test_collection.GetAvailableTests(
+      instrumentation_options.annotations,
+      instrumentation_options.exclude_annotations,
+      instrumentation_options.test_filter)
+  logging.debug('All available tests: ' + str(
+      [t.tagged_name for t in available_tests]))
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.HostDrivenTestRunner(
+        device, shard_index,
+        instrumentation_options.tool,
+        instrumentation_options.push_deps,
+        instrumentation_options.cleanup_test_files)
+
+  return (TestRunnerFactory, available_tests)
diff --git a/build/android/pylib/host_driven/test_case.py b/build/android/pylib/host_driven/test_case.py
new file mode 100644
index 0000000..fe8c363
--- /dev/null
+++ b/build/android/pylib/host_driven/test_case.py
@@ -0,0 +1,197 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for host-driven test cases.
+
+This test case is intended to serve as the base class for any host-driven
+test cases. It is similar to the Python unitttest module in that test cases
+inherit from this class and add methods which will be run as tests.
+
+When a HostDrivenTestCase object is instantiated, its purpose is to run only one
+test method in the derived class. The test runner gives it the name of the test
+method the instance will run. The test runner calls SetUp with the device ID
+which the test method will run against. The test runner runs the test method
+itself, collecting the result, and calls TearDown.
+
+Tests can perform arbitrary Python commands and asserts in test methods. Tests
+that run instrumentation tests can make use of the _RunJavaTestFilters helper
+function to trigger Java tests and convert results into a single host-driven
+test result.
+"""
+
+import logging
+import os
+import time
+
+from pylib import constants
+from pylib import forwarder
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.device import device_utils
+from pylib.instrumentation import test_package
+from pylib.instrumentation import test_result
+from pylib.instrumentation import test_runner
+
+# aka the parent of com.google.android
+BASE_ROOT = 'src' + os.sep
+
+
+class HostDrivenTestCase(object):
+  """Base class for host-driven test cases."""
+
+  _HOST_DRIVEN_TAG = 'HostDriven'
+
+  def __init__(self, test_name, instrumentation_options=None):
+    """Create a test case initialized to run |test_name|.
+
+    Args:
+      test_name: The name of the method to run as the test.
+      instrumentation_options: An InstrumentationOptions object.
+    """
+    class_name = self.__class__.__name__
+    self.adb = None
+    self.cleanup_test_files = False
+    self.device = None
+    self.device_id = ''
+    self.has_forwarded_ports = False
+    self.instrumentation_options = instrumentation_options
+    self.ports_to_forward = []
+    self.push_deps = False
+    self.shard_index = 0
+
+    # Use tagged_name when creating results, so that we can identify host-driven
+    # tests in the overall results.
+    self.test_name = test_name
+    self.qualified_name = '%s.%s' % (class_name, self.test_name)
+    self.tagged_name = '%s_%s' % (self._HOST_DRIVEN_TAG, self.qualified_name)
+
+  # TODO(bulach): make ports_to_forward not optional and move the Forwarder
+  # mapping here.
+  def SetUp(self, device, shard_index, push_deps,
+            cleanup_test_files, ports_to_forward=None):
+    if not ports_to_forward:
+      ports_to_forward = []
+    self.device_id = device
+    self.shard_index = shard_index
+    self.device = device_utils.DeviceUtils(self.device_id)
+    self.adb = self.device.old_interface
+    self.push_deps = push_deps
+    self.cleanup_test_files = cleanup_test_files
+    if ports_to_forward:
+      self.ports_to_forward = ports_to_forward
+
+  def TearDown(self):
+    pass
+
+  # TODO(craigdh): Remove GetOutDir once references have been removed
+  # downstream.
+  @staticmethod
+  def GetOutDir():
+    return constants.GetOutDirectory()
+
+  def Run(self):
+    logging.info('Running host-driven test: %s', self.tagged_name)
+    # Get the test method on the derived class and execute it
+    return getattr(self, self.test_name)()
+
+  @staticmethod
+  def __GetHostForwarderLog():
+    return ('-- Begin Full HostForwarder log\n'
+            '%s\n'
+            '--End Full HostForwarder log\n' % forwarder.Forwarder.GetHostLog())
+
+  def __StartForwarder(self):
+    logging.warning('Forwarding %s %s', self.ports_to_forward,
+                    self.has_forwarded_ports)
+    if self.ports_to_forward and not self.has_forwarded_ports:
+      self.has_forwarded_ports = True
+      tool = valgrind_tools.CreateTool(None, self.device)
+      forwarder.Forwarder.Map([(port, port) for port in self.ports_to_forward],
+                              self.device, tool)
+
+  def __RunJavaTest(self, test, test_pkg, additional_flags=None):
+    """Runs a single Java test in a Java TestRunner.
+
+    Args:
+      test: Fully qualified test name (ex. foo.bar.TestClass#testMethod)
+      test_pkg: TestPackage object.
+      additional_flags: A list of additional flags to add to the command line.
+
+    Returns:
+      TestRunResults object with a single test result.
+    """
+    # TODO(bulach): move this to SetUp() stage.
+    self.__StartForwarder()
+
+    java_test_runner = test_runner.TestRunner(self.instrumentation_options,
+                                              self.device_id,
+                                              self.shard_index, test_pkg,
+                                              additional_flags=additional_flags)
+    try:
+      java_test_runner.SetUp()
+      return java_test_runner.RunTest(test)[0]
+    finally:
+      java_test_runner.TearDown()
+
+  def _RunJavaTestFilters(self, test_filters, additional_flags=None):
+    """Calls a list of tests and stops at the first test failure.
+
+    This method iterates until either it encounters a non-passing test or it
+    exhausts the list of tests. Then it returns the appropriate overall result.
+
+    Test cases may make use of this method internally to assist in running
+    instrumentation tests. This function relies on instrumentation_options
+    being defined.
+
+    Args:
+      test_filters: A list of Java test filters.
+      additional_flags: A list of addition flags to add to the command line.
+
+    Returns:
+      A TestRunResults object containing an overall result for this set of Java
+      tests. If any Java tests do not pass, this is a fail overall.
+    """
+    test_type = base_test_result.ResultType.PASS
+    log = ''
+
+    test_pkg = test_package.TestPackage(
+        self.instrumentation_options.test_apk_path,
+        self.instrumentation_options.test_apk_jar_path,
+        self.instrumentation_options.test_support_apk_path)
+
+    start_ms = int(time.time()) * 1000
+    done = False
+    for test_filter in test_filters:
+      tests = test_pkg.GetAllMatchingTests(None, None, test_filter)
+      # Filters should always result in >= 1 test.
+      if len(tests) == 0:
+        raise Exception('Java test filter "%s" returned no tests.'
+                        % test_filter)
+      for test in tests:
+        # We're only running one test at a time, so this TestRunResults object
+        # will hold only one result.
+        java_result = self.__RunJavaTest(test, test_pkg, additional_flags)
+        assert len(java_result.GetAll()) == 1
+        if not java_result.DidRunPass():
+          result = java_result.GetNotPass().pop()
+          log = result.GetLog()
+          log += self.__GetHostForwarderLog()
+          test_type = result.GetType()
+          done = True
+          break
+      if done:
+        break
+    duration_ms = int(time.time()) * 1000 - start_ms
+
+    overall_result = base_test_result.TestRunResults()
+    overall_result.AddResult(
+        test_result.InstrumentationTestResult(
+            self.tagged_name, test_type, start_ms, duration_ms, log=log))
+    return overall_result
+
+  def __str__(self):
+    return self.tagged_name
+
+  def __repr__(self):
+    return self.tagged_name
diff --git a/build/android/pylib/host_driven/test_info_collection.py b/build/android/pylib/host_driven/test_info_collection.py
new file mode 100644
index 0000000..c65d417
--- /dev/null
+++ b/build/android/pylib/host_driven/test_info_collection.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing information about the host-driven tests."""
+
+import logging
+import os
+import sys
+
+from pylib.host_driven import tests_annotations
+
+from pylib import constants
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                             'build', 'util', 'lib', 'common'))
+
+import unittest_util # pylint: disable=F0401
+
+class TestInfo(object):
+  """An object containing and representing a test function, plus metadata."""
+
+  def __init__(self, runnable, set_up=None, tear_down=None):
+    # The actual test function/method.
+    self.runnable = runnable
+    # Qualified name of test function/method (e.g. FooModule.testBar).
+    self.qualified_name = self._GetQualifiedName(runnable)
+    # setUp and teardown functions, if any.
+    self.set_up = set_up
+    self.tear_down = tear_down
+
+  @staticmethod
+  def _GetQualifiedName(runnable):
+    """Helper method to infer a runnable's name and module name.
+
+    Many filters and lists presuppose a format of module_name.testMethodName.
+    To make this easy on everyone, we use some reflection magic to infer this
+    name automatically.
+
+    Args:
+      runnable: the test method to get the qualified name for
+
+    Returns:
+      qualified name for this runnable, incl. module name and method name.
+    """
+    runnable_name = runnable.__name__
+    # See also tests_annotations.
+    module_name = os.path.splitext(
+        os.path.basename(runnable.__globals__['__file__']))[0]
+    return '.'.join([module_name, runnable_name])
+
+  def __str__(self):
+    return self.qualified_name
+
+
+class TestInfoCollection(object):
+  """A collection of TestInfo objects which facilitates filtering."""
+
+  def __init__(self):
+    """Initialize a new TestInfoCollection."""
+    # Master list of all valid tests.
+    self.all_tests = []
+
+  def AddTests(self, test_infos):
+    """Adds a set of tests to this collection.
+
+    The user may then retrieve them, optionally according to criteria, via
+    GetAvailableTests().
+
+    Args:
+      test_infos: a list of TestInfos representing test functions/methods.
+    """
+    self.all_tests = test_infos
+
+  def GetAvailableTests(self, annotations, exclude_annotations, name_filter):
+    """Get a collection of TestInfos which match the supplied criteria.
+
+    Args:
+      annotations: List of annotations. Each test in the returned list is
+        annotated with atleast one of these annotations.
+      exclude_annotations: List of annotations. The tests in the returned
+        list are not annotated with any of these annotations.
+      name_filter: name filter which tests must match, if any
+
+    Returns:
+      List of available tests.
+    """
+    available_tests = self.all_tests
+
+    # Filter out tests which match neither the requested annotation, nor the
+    # requested name filter, if any.
+    available_tests = [t for t in available_tests if
+                       self._AnnotationIncludesTest(t, annotations)]
+    if annotations and len(annotations) == 1 and annotations[0] == 'SmallTest':
+      tests_without_annotation = [
+          t for t in self.all_tests if
+          not tests_annotations.AnnotatedFunctions.GetTestAnnotations(
+              t.qualified_name)]
+      test_names = [t.qualified_name for t in tests_without_annotation]
+      logging.warning('The following tests do not contain any annotation. '
+                      'Assuming "SmallTest":\n%s',
+                      '\n'.join(test_names))
+      available_tests += tests_without_annotation
+    if exclude_annotations:
+      excluded_tests = [t for t in available_tests if
+                        self._AnnotationIncludesTest(t, exclude_annotations)]
+      available_tests = list(set(available_tests) - set(excluded_tests))
+
+    if name_filter:
+      available_test_names = unittest_util.FilterTestNames(
+          [t.qualified_name for t in available_tests], name_filter)
+      available_tests = [
+          t for t in available_tests if
+          t.qualified_name in available_test_names]
+    return available_tests
+
+  @staticmethod
+  def _AnnotationIncludesTest(test_info, annotation_filter_list):
+    """Checks whether a given test represented by test_info matches annotation.
+
+    Args:
+      test_info: TestInfo object representing the test
+      annotation_filter_list: list of annotation filters to match (e.g. Smoke)
+
+    Returns:
+      True if no annotation was supplied or the test matches; false otherwise.
+    """
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if tests_annotations.AnnotatedFunctions.IsAnnotated(
+              key + ':' + value, test_info.qualified_name):
+            return True
+      elif tests_annotations.AnnotatedFunctions.IsAnnotated(
+          annotation_filter, test_info.qualified_name):
+        return True
+    return False
+
diff --git a/build/android/pylib/host_driven/test_runner.py b/build/android/pylib/host_driven/test_runner.py
new file mode 100644
index 0000000..b61fa40
--- /dev/null
+++ b/build/android/pylib/host_driven/test_runner.py
@@ -0,0 +1,133 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs host-driven tests on a particular device."""
+
+import logging
+import sys
+import time
+import traceback
+
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.host_driven import test_case
+from pylib.instrumentation import test_result
+
+
+class HostDrivenExceptionTestResult(test_result.InstrumentationTestResult):
+  """Test result corresponding to a python exception in a host-driven test."""
+
+  def __init__(self, test_name, start_date_ms, exc_info):
+    """Constructs a HostDrivenExceptionTestResult object.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      start_date_ms: the starting time for the test.
+      exc_info: exception info, ostensibly from sys.exc_info().
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+    duration_ms = (int(time.time()) * 1000) - start_date_ms
+
+    super(HostDrivenExceptionTestResult, self).__init__(
+        test_name,
+        base_test_result.ResultType.FAIL,
+        start_date_ms,
+        duration_ms,
+        log=str(exc_type) + ' ' + log_msg)
+
+
+class HostDrivenTestRunner(base_test_runner.BaseTestRunner):
+  """Orchestrates running a set of host-driven tests.
+
+  Any Python exceptions in the tests are caught and translated into a failed
+  result, rather than being re-raised on the main thread.
+  """
+
+  #override
+  def __init__(self, device, shard_index, tool, push_deps,
+               cleanup_test_files):
+    """Creates a new HostDrivenTestRunner.
+
+    Args:
+      device: Attached android device.
+      shard_index: Shard index.
+      tool: Name of the Valgrind tool.
+      push_deps: If True, push all dependencies to the device.
+      cleanup_test_files: Whether or not to cleanup test files on device.
+    """
+
+    super(HostDrivenTestRunner, self).__init__(device, tool, push_deps,
+                                               cleanup_test_files)
+
+    # The shard index affords the ability to create unique port numbers (e.g.
+    # DEFAULT_PORT + shard_index) if the test so wishes.
+    self.shard_index = shard_index
+
+  #override
+  def RunTest(self, test):
+    """Sets up and runs a test case.
+
+    Args:
+      test: An object which is ostensibly a subclass of HostDrivenTestCase.
+
+    Returns:
+      A TestRunResults object which contains the result produced by the test
+      and, in the case of a failure, the test that should be retried.
+    """
+
+    assert isinstance(test, test_case.HostDrivenTestCase)
+
+    start_date_ms = int(time.time()) * 1000
+    exception_raised = False
+
+    try:
+      test.SetUp(str(self.device), self.shard_index,
+                 self._push_deps, self._cleanup_test_files)
+    except Exception:
+      logging.exception(
+          'Caught exception while trying to run SetUp() for test: ' +
+          test.tagged_name)
+      # Tests whose SetUp() method has failed are likely to fail, or at least
+      # yield invalid results.
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(HostDrivenExceptionTestResult(
+          test.tagged_name, start_date_ms, exc_info))
+      return results, test
+
+    try:
+      results = test.Run()
+    except Exception:
+      # Setting this lets TearDown() avoid stomping on our stack trace from
+      # Run() should TearDown() also raise an exception.
+      exception_raised = True
+      logging.exception('Caught exception while trying to run test: ' +
+                        test.tagged_name)
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(HostDrivenExceptionTestResult(
+          test.tagged_name, start_date_ms, exc_info))
+
+    try:
+      test.TearDown()
+    except Exception:
+      logging.exception(
+          'Caught exception while trying run TearDown() for test: ' +
+          test.tagged_name)
+      if not exception_raised:
+        # Don't stomp the error during the test if TearDown blows up. This is a
+        # trade-off: if the test fails, this will mask any problem with TearDown
+        # until the test is fixed.
+        exc_info = sys.exc_info()
+        results = base_test_result.TestRunResults()
+        results.AddResult(HostDrivenExceptionTestResult(
+            test.tagged_name, start_date_ms, exc_info))
+
+    if not results.DidRunPass():
+      return results, test
+    else:
+      return results, None
diff --git a/build/android/pylib/host_driven/test_server.py b/build/android/pylib/host_driven/test_server.py
new file mode 100644
index 0000000..d6203cc
--- /dev/null
+++ b/build/android/pylib/host_driven/test_server.py
@@ -0,0 +1,120 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Host driven test server controller.
+
+This class controls the startup and shutdown of a python driven test server that
+runs in a separate process.
+
+The server starts up automatically when the object is created.
+
+After it starts up, it is possible to retreive the hostname it started on
+through accessing the member field |host| and the port name through |port|.
+
+For shutting down the server, call TearDown().
+"""
+
+import logging
+import subprocess
+import os
+import os.path
+import time
+import urllib2
+
+from pylib import constants
+
+# NOTE: when adding or modifying these lines, omit any leading slashes!
+# Otherwise os.path.join() will (correctly) treat them as absolute paths
+# instead of relative paths, and will do nothing.
+_PYTHONPATH_DIRS = [
+    'net/tools/testserver/',
+    'third_party/',
+    'third_party/pyftpdlib/src/',
+    'third_party/pywebsocket/src',
+    'third_party/tlslite/',
+]
+
+# Python files in these directories are generated as part of the build.
+# These dirs are located in out/(Debug|Release) directory.
+# The correct path is determined based on the build type. E.g. out/Debug for
+# debug builds and out/Release for release builds.
+_GENERATED_PYTHONPATH_DIRS = [
+    'pyproto/sync/protocol/',
+    'pyproto/'
+]
+
+_TEST_SERVER_HOST = '127.0.0.1'
+# Paths for supported test server executables.
+TEST_NET_SERVER_PATH = 'net/tools/testserver/testserver.py'
+TEST_SYNC_SERVER_PATH = 'sync/tools/testserver/sync_testserver.py'
+# Parameters to check that the server is up and running.
+TEST_SERVER_CHECK_PARAMS = {
+  TEST_NET_SERVER_PATH: {
+      'url_path': '/',
+      'response': 'Default response given for path'
+  },
+  TEST_SYNC_SERVER_PATH: {
+      'url_path': 'chromiumsync/time',
+      'response': '0123456789'
+  },
+}
+
+class TestServer(object):
+  """Sets up a host driven test server on the host machine.
+
+  For shutting down the server, call TearDown().
+  """
+
+  def __init__(self, shard_index, test_server_port, test_server_path):
+    """Sets up a Python driven test server on the host machine.
+
+    Args:
+      shard_index: Index of the current shard.
+      test_server_port: Port to run the test server on. This is multiplexed with
+                        the shard index. To retrieve the real port access the
+                        member variable |port|.
+      test_server_path: The path (relative to the root src dir) of the server
+    """
+    self.host = _TEST_SERVER_HOST
+    self.port = test_server_port + shard_index
+
+    src_dir = constants.DIR_SOURCE_ROOT
+    # Make dirs into a list of absolute paths.
+    abs_dirs = [os.path.join(src_dir, d) for d in _PYTHONPATH_DIRS]
+    # Add the generated python files to the path
+    abs_dirs.extend([os.path.join(src_dir, constants.GetOutDirectory(), d)
+                     for d in _GENERATED_PYTHONPATH_DIRS])
+    current_python_path = os.environ.get('PYTHONPATH')
+    extra_python_path = ':'.join(abs_dirs)
+    if current_python_path:
+      python_path = current_python_path + ':' + extra_python_path
+    else:
+      python_path = extra_python_path
+
+    # NOTE: A separate python process is used to simplify getting the right
+    # system path for finding includes.
+    cmd = ['python', os.path.join(src_dir, test_server_path),
+           '--log-to-console',
+           ('--host=%s' % self.host),
+           ('--port=%d' % self.port)]
+    self._test_server_process = subprocess.Popen(
+          cmd, env={'PYTHONPATH': python_path})
+    test_url = 'http://%s:%d/%s' % (self.host, self.port,
+        TEST_SERVER_CHECK_PARAMS[test_server_path]['url_path'])
+    expected_response = TEST_SERVER_CHECK_PARAMS[test_server_path]['response']
+    retries = 0
+    while retries < 5:
+      try:
+        d = urllib2.urlopen(test_url).read()
+        logging.info('URL %s GOT: %s' % (test_url, d))
+        if d.startswith(expected_response):
+          break
+      except Exception as e:
+        logging.info('URL %s GOT: %s' % (test_url, e))
+      time.sleep(retries * 0.1)
+      retries += 1
+
+  def TearDown(self):
+    self._test_server_process.kill()
+    self._test_server_process.wait()
diff --git a/build/android/pylib/host_driven/tests_annotations.py b/build/android/pylib/host_driven/tests_annotations.py
new file mode 100644
index 0000000..5331140
--- /dev/null
+++ b/build/android/pylib/host_driven/tests_annotations.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Annotations for host-driven tests."""
+# pylint: disable=W0212
+
+import os
+
+
+class AnnotatedFunctions(object):
+  """A container for annotated methods."""
+  _ANNOTATED = {}
+
+  @staticmethod
+  def _AddFunction(annotation, function):
+    """Adds an annotated function to our container.
+
+    Args:
+      annotation: the annotation string.
+      function: the function.
+    Returns:
+      The function passed in.
+    """
+    module_name = os.path.splitext(os.path.basename(
+        function.__globals__['__file__']))[0]
+    qualified_function_name = '.'.join([module_name, function.func_name])
+    function_list = AnnotatedFunctions._ANNOTATED.get(annotation, [])
+    function_list.append(qualified_function_name)
+    AnnotatedFunctions._ANNOTATED[annotation] = function_list
+    return function
+
+  @staticmethod
+  def IsAnnotated(annotation, qualified_function_name):
+    """True if function name (module.function) contains the annotation.
+
+    Args:
+      annotation: the annotation string.
+      qualified_function_name: the qualified function name.
+    Returns:
+      True if module.function contains the annotation.
+    """
+    return qualified_function_name in AnnotatedFunctions._ANNOTATED.get(
+        annotation, [])
+
+  @staticmethod
+  def GetTestAnnotations(qualified_function_name):
+    """Returns a list containing all annotations for the given function.
+
+    Args:
+      qualified_function_name: the qualified function name.
+    Returns:
+      List of all annotations for this function.
+    """
+    return [annotation
+            for annotation, tests in AnnotatedFunctions._ANNOTATED.iteritems()
+            if qualified_function_name in tests]
+
+
+# The following functions are annotations used for the host-driven tests.
+def Smoke(function):
+  return AnnotatedFunctions._AddFunction('Smoke', function)
+
+
+def SmallTest(function):
+  return AnnotatedFunctions._AddFunction('SmallTest', function)
+
+
+def MediumTest(function):
+  return AnnotatedFunctions._AddFunction('MediumTest', function)
+
+
+def LargeTest(function):
+  return AnnotatedFunctions._AddFunction('LargeTest', function)
+
+
+def EnormousTest(function):
+  return AnnotatedFunctions._AddFunction('EnormousTest', function)
+
+
+def FlakyTest(function):
+  return AnnotatedFunctions._AddFunction('FlakyTest', function)
+
+
+def DisabledTest(function):
+  return AnnotatedFunctions._AddFunction('DisabledTest', function)
+
+
+def Feature(feature_list):
+  def _AddFeatures(function):
+    for feature in feature_list:
+      AnnotatedFunctions._AddFunction('Feature:%s' % feature, function)
+    return AnnotatedFunctions._AddFunction('Feature', function)
+  return _AddFeatures
diff --git a/build/android/pylib/instrumentation/__init__.py b/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/instrumentation/json_perf_parser.py b/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000..ffdfbe7
--- /dev/null
+++ b/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+  """Summarizes TraceEvent JSON data for performance metrics.
+
+  Example JSON Inputs (More tags can be added but these are required):
+  Measuring Duration:
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "S",
+      "name": "TestTrace"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "F",
+      "name": "TestTrace"
+    },
+    ...
+  ]
+
+  Measuring Call Frequency (FPS):
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    ...
+  ]
+
+  Args:
+    json_data: A list of dictonaries each representing a JSON object.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    A dictionary of result data with the following tags:
+      min: The minimum value tracked.
+      max: The maximum value tracked.
+      average: The average of all the values tracked.
+      count: The number of times the category/name pair was tracked.
+      type: The type of tracking ('Instant' for instant tags and 'Span' for
+            begin/end tags.
+      category: The passed in category filter.
+      name: The passed in name filter.
+      data_points: A list of all of the times used to generate this data.
+      units: The units for the values being reported.
+
+  Raises:
+    Exception: if entry contains invalid data.
+  """
+
+  def EntryFilter(entry):
+    return entry['cat'] == 'Java' and entry['name'] == name
+  filtered_entries = filter(EntryFilter, json_data)
+
+  result = {}
+
+  result['min'] = -1
+  result['max'] = -1
+  result['average'] = 0
+  result['count'] = 0
+  result['type'] = 'Unknown'
+  result['category'] = 'Java'
+  result['name'] = name
+  result['data_points'] = []
+  result['units'] = ''
+
+  total_sum = 0
+
+  last_val = 0
+  val_type = None
+  for entry in filtered_entries:
+    if not val_type:
+      if 'mem' in entry:
+        val_type = 'mem'
+
+        def GetVal(entry):
+          return entry['mem']
+
+        result['units'] = 'kb'
+      elif 'ts' in entry:
+        val_type = 'ts'
+
+        def GetVal(entry):
+          return float(entry['ts']) / 1000.0
+
+        result['units'] = 'ms'
+      else:
+        raise Exception('Entry did not contain valid value info: %s' % entry)
+
+    if not val_type in entry:
+      raise Exception('Entry did not contain expected value type "%s" '
+                      'information: %s' % (val_type, entry))
+    val = GetVal(entry)
+    if (entry['ph'] == 'S' and
+        (result['type'] == 'Unknown' or result['type'] == 'Span')):
+      result['type'] = 'Span'
+      last_val = val
+    elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+          (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+                                   result['type'] == 'Instant'))):
+      if last_val > 0:
+        delta = val - last_val
+        if result['min'] == -1 or result['min'] > delta:
+          result['min'] = delta
+        if result['max'] == -1 or result['max'] < delta:
+          result['max'] = delta
+        total_sum += delta
+        result['count'] += 1
+        result['data_points'].append(delta)
+      if entry['ph'] == 'I':
+        result['type'] = 'Instant'
+        last_val = val
+  if result['count'] > 0:
+    result['average'] = total_sum / result['count']
+
+  return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+  """Returns the results from GetAverageRunInfo using a JSON string.
+
+  Args:
+    json_string: The string containing JSON.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+  """Returns the results from GetAverageRunInfo using a JSON file.
+
+  Args:
+    json_file: The path to a JSON file.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  with open(json_file, 'r') as f:
+    data = f.read()
+    perf = json.loads(data)
+
+  return GetAverageRunInfo(perf, name)
diff --git a/build/android/pylib/instrumentation/setup.py b/build/android/pylib/instrumentation/setup.py
new file mode 100644
index 0000000..57286e2
--- /dev/null
+++ b/build/android/pylib/instrumentation/setup.py
@@ -0,0 +1,41 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for instrumentation tests."""
+
+import logging
+import os
+
+from pylib.instrumentation import test_package
+from pylib.instrumentation import test_runner
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: An InstrumentationOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  if (test_options.coverage_dir and not
+      os.path.exists(test_options.coverage_dir)):
+    os.makedirs(test_options.coverage_dir)
+
+  test_pkg = test_package.TestPackage(test_options.test_apk_path,
+                                      test_options.test_apk_jar_path,
+                                      test_options.test_support_apk_path)
+  tests = test_pkg.GetAllMatchingTests(
+      test_options.annotations,
+      test_options.exclude_annotations,
+      test_options.test_filter)
+  if not tests:
+    logging.error('No instrumentation tests to run with current args.')
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(test_options, device, shard_index,
+                                  test_pkg)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/instrumentation/test_jar.py b/build/android/pylib/instrumentation/test_jar.py
new file mode 100644
index 0000000..964dca7
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_jar.py
@@ -0,0 +1,307 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper class for instrumenation test jar."""
+# pylint: disable=W0702
+
+import logging
+import os
+import pickle
+import re
+import sys
+import tempfile
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import device_utils
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                             'build', 'util', 'lib', 'common'))
+
+import unittest_util # pylint: disable=F0401
+
+# If you change the cached output of proguard, increment this number
+PICKLE_FORMAT_VERSION = 2
+
+
+class TestJar(object):
+  _ANNOTATIONS = frozenset(
+      ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest',
+       'FlakyTest', 'DisabledTest', 'Manual', 'PerfTest', 'HostDrivenTest',
+       'IntegrationTest'])
+  _DEFAULT_ANNOTATION = 'SmallTest'
+  _PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+  _PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+  _PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+  _PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+  _PROGUARD_ANNOTATION_CONST_RE = (
+      re.compile(r'\s*?- Constant element value.*$'))
+  _PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+
+  def __init__(self, jar_path):
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+
+    self._PROGUARD_PATH = os.path.join(constants.ANDROID_SDK_ROOT,
+                                       'tools/proguard/lib/proguard.jar')
+    if not os.path.exists(self._PROGUARD_PATH):
+      self._PROGUARD_PATH = os.path.join(os.environ['ANDROID_BUILD_TOP'],
+                                         'external/proguard/lib/proguard.jar')
+    self._jar_path = jar_path
+    self._pickled_proguard_name = self._jar_path + '-proguard.pickle'
+    self._test_methods = {}
+    if not self._GetCachedProguardData():
+      self._GetProguardData()
+
+  def _GetCachedProguardData(self):
+    if (os.path.exists(self._pickled_proguard_name) and
+        (os.path.getmtime(self._pickled_proguard_name) >
+         os.path.getmtime(self._jar_path))):
+      logging.info('Loading cached proguard output from %s',
+                   self._pickled_proguard_name)
+      try:
+        with open(self._pickled_proguard_name, 'r') as r:
+          d = pickle.loads(r.read())
+        if d['VERSION'] == PICKLE_FORMAT_VERSION:
+          self._test_methods = d['TEST_METHODS']
+          return True
+      except:
+        logging.warning('PICKLE_FORMAT_VERSION has changed, ignoring cache')
+    return False
+
+  def _GetProguardData(self):
+    logging.info('Retrieving test methods via proguard.')
+
+    with tempfile.NamedTemporaryFile() as proguard_output:
+      cmd_helper.RunCmd(['java', '-jar',
+                         self._PROGUARD_PATH,
+                         '-injars', self._jar_path,
+                         '-dontshrink',
+                         '-dontoptimize',
+                         '-dontobfuscate',
+                         '-dontpreverify',
+                         '-dump', proguard_output.name])
+
+      clazzez = {}
+
+      annotation = None
+      annotation_has_value = False
+      clazz = None
+      method = None
+
+      for line in proguard_output:
+        if len(line) == 0:
+          annotation = None
+          annotation_has_value = False
+          method = None
+          continue
+
+        m = self._PROGUARD_CLASS_RE.match(line)
+        if m:
+          clazz = m.group(1).replace('/', '.')
+          clazzez[clazz] = {
+            'methods': {},
+            'annotations': {}
+          }
+          annotation = None
+          annotation_has_value = False
+          method = None
+          continue
+
+        if not clazz:
+          continue
+
+        m = self._PROGUARD_SUPERCLASS_RE.match(line)
+        if m:
+          clazzez[clazz]['superclass'] = m.group(1).replace('/', '.')
+          continue
+
+        if clazz.endswith('Test'):
+          m = self._PROGUARD_METHOD_RE.match(line)
+          if m:
+            method = m.group(1)
+            clazzez[clazz]['methods'][method] = {'annotations': {}}
+            annotation = None
+            annotation_has_value = False
+            continue
+
+        m = self._PROGUARD_ANNOTATION_RE.match(line)
+        if m:
+          # Ignore the annotation package.
+          annotation = m.group(1).split('/')[-1]
+          if method:
+            clazzez[clazz]['methods'][method]['annotations'][annotation] = None
+          else:
+            clazzez[clazz]['annotations'][annotation] = None
+          continue
+
+        if annotation:
+          if not annotation_has_value:
+            m = self._PROGUARD_ANNOTATION_CONST_RE.match(line)
+            annotation_has_value = bool(m)
+          else:
+            m = self._PROGUARD_ANNOTATION_VALUE_RE.match(line)
+            if m:
+              if method:
+                clazzez[clazz]['methods'][method]['annotations'][annotation] = (
+                    m.group(1))
+              else:
+                clazzez[clazz]['annotations'][annotation] = m.group(1)
+            annotation_has_value = None
+
+    test_clazzez = ((n, i) for n, i in clazzez.items() if n.endswith('Test'))
+    for clazz_name, clazz_info in test_clazzez:
+      logging.info('Processing %s' % clazz_name)
+      c = clazz_name
+      min_sdk_level = None
+
+      while c in clazzez:
+        c_info = clazzez[c]
+        if not min_sdk_level:
+          min_sdk_level = c_info['annotations'].get('MinAndroidSdkLevel', None)
+        c = c_info.get('superclass', None)
+
+      for method_name, method_info in clazz_info['methods'].items():
+        if method_name.startswith('test'):
+          qualified_method = '%s#%s' % (clazz_name, method_name)
+          method_annotations = []
+          for annotation_name, annotation_value in (
+              method_info['annotations'].items()):
+            method_annotations.append(annotation_name)
+            if annotation_value:
+              method_annotations.append(
+                  annotation_name + ':' + annotation_value)
+          self._test_methods[qualified_method] = {
+            'annotations': method_annotations
+          }
+          if min_sdk_level is not None:
+            self._test_methods[qualified_method]['min_sdk_level'] = (
+                int(min_sdk_level))
+
+    logging.info('Storing proguard output to %s', self._pickled_proguard_name)
+    d = {'VERSION': PICKLE_FORMAT_VERSION,
+         'TEST_METHODS': self._test_methods}
+    with open(self._pickled_proguard_name, 'w') as f:
+      f.write(pickle.dumps(d))
+
+
+  @staticmethod
+  def _IsTestMethod(test):
+    class_name, method = test.split('#')
+    return class_name.endswith('Test') and method.startswith('test')
+
+  def GetTestAnnotations(self, test):
+    """Returns a list of all annotations for the given |test|. May be empty."""
+    if not self._IsTestMethod(test) or not test in self._test_methods:
+      return []
+    return self._test_methods[test]['annotations']
+
+  @staticmethod
+  def _AnnotationsMatchFilters(annotation_filter_list, annotations):
+    """Checks if annotations match any of the filters."""
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if key + ':' + value in annotations:
+            return True
+      elif annotation_filter in annotations:
+        return True
+    return False
+
+  def GetAnnotatedTests(self, annotation_filter_list):
+    """Returns a list of all tests that match the given annotation filters."""
+    return [test for test, attr in self.GetTestMethods().iteritems()
+            if self._IsTestMethod(test) and self._AnnotationsMatchFilters(
+                annotation_filter_list, attr['annotations'])]
+
+  def GetTestMethods(self):
+    """Returns a dict of all test methods and relevant attributes.
+
+    Test methods are retrieved as Class#testMethod.
+    """
+    return self._test_methods
+
+  def _GetTestsMissingAnnotation(self):
+    """Get a list of test methods with no known annotations."""
+    tests_missing_annotations = []
+    for test_method in self.GetTestMethods().iterkeys():
+      annotations_ = frozenset(self.GetTestAnnotations(test_method))
+      if (annotations_.isdisjoint(self._ANNOTATIONS) and
+          not self.IsHostDrivenTest(test_method)):
+        tests_missing_annotations.append(test_method)
+    return sorted(tests_missing_annotations)
+
+  def _IsTestValidForSdkRange(self, test_name, attached_min_sdk_level):
+    required_min_sdk_level = self.GetTestMethods()[test_name].get(
+        'min_sdk_level', None)
+    return (required_min_sdk_level is None or
+            attached_min_sdk_level >= required_min_sdk_level)
+
+  def GetAllMatchingTests(self, annotation_filter_list,
+                          exclude_annotation_list, test_filter):
+    """Get a list of tests matching any of the annotations and the filter.
+
+    Args:
+      annotation_filter_list: List of test annotations. A test must have at
+        least one of these annotations. A test without any annotations is
+        considered to be SmallTest.
+      exclude_annotation_list: List of test annotations. A test must not have
+        any of these annotations.
+      test_filter: Filter used for partial matching on the test method names.
+
+    Returns:
+      List of all matching tests.
+    """
+    if annotation_filter_list:
+      available_tests = self.GetAnnotatedTests(annotation_filter_list)
+      # Include un-annotated tests in SmallTest.
+      if annotation_filter_list.count(self._DEFAULT_ANNOTATION) > 0:
+        for test in self._GetTestsMissingAnnotation():
+          logging.warning(
+              '%s has no annotations. Assuming "%s".', test,
+              self._DEFAULT_ANNOTATION)
+          available_tests.append(test)
+      if exclude_annotation_list:
+        excluded_tests = self.GetAnnotatedTests(exclude_annotation_list)
+        available_tests = list(set(available_tests) - set(excluded_tests))
+    else:
+      available_tests = [m for m in self.GetTestMethods()
+                         if not self.IsHostDrivenTest(m)]
+
+    tests = []
+    if test_filter:
+      # |available_tests| are in adb instrument format: package.path.class#test.
+
+      # Maps a 'class.test' name to each 'package.path.class#test' name.
+      sanitized_test_names = dict([
+          (t.split('.')[-1].replace('#', '.'), t) for t in available_tests])
+      # Filters 'class.test' names and populates |tests| with the corresponding
+      # 'package.path.class#test' names.
+      tests = [
+          sanitized_test_names[t] for t in unittest_util.FilterTestNames(
+              sanitized_test_names.keys(), test_filter.replace('#', '.'))]
+    else:
+      tests = available_tests
+
+    # Filter out any tests with SDK level requirements that don't match the set
+    # of attached devices.
+    sdk_versions = [
+        int(v) for v in
+        device_utils.DeviceUtils.parallel().GetProp(
+            'ro.build.version.sdk').pGet(None)]
+    tests = filter(
+        lambda t: self._IsTestValidForSdkRange(t, min(sdk_versions)),
+        tests)
+
+    return tests
+
+  @staticmethod
+  def IsHostDrivenTest(test):
+    return 'pythonDrivenTests' in test
diff --git a/build/android/pylib/instrumentation/test_options.py b/build/android/pylib/instrumentation/test_options.py
new file mode 100644
index 0000000..23fd82c
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_options.py
@@ -0,0 +1,26 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the InstrumentationOptions named tuple."""
+
+import collections
+
+InstrumentationOptions = collections.namedtuple('InstrumentationOptions', [
+    'tool',
+    'cleanup_test_files',
+    'push_deps',
+    'annotations',
+    'exclude_annotations',
+    'test_filter',
+    'test_data',
+    'save_perf_json',
+    'screenshot_failures',
+    'wait_for_debugger',
+    'coverage_dir',
+    'test_apk',
+    'test_apk_path',
+    'test_apk_jar_path',
+    'test_runner',
+    'test_support_apk_path',
+    'device_flags'])
diff --git a/build/android/pylib/instrumentation/test_package.py b/build/android/pylib/instrumentation/test_package.py
new file mode 100644
index 0000000..f32556f
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_package.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class representing instrumentation test apk and jar."""
+
+import os
+
+from pylib.instrumentation import test_jar
+from pylib.utils import apk_helper
+
+
+class TestPackage(test_jar.TestJar):
+  def __init__(self, apk_path, jar_path, test_support_apk_path):
+    test_jar.TestJar.__init__(self, jar_path)
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+    self._apk_path = apk_path
+    self._apk_name = os.path.splitext(os.path.basename(apk_path))[0]
+    self._package_name = apk_helper.GetPackageName(self._apk_path)
+    self._test_support_apk_path = test_support_apk_path
+
+  def GetApkPath(self):
+    """Returns the absolute path to the APK."""
+    return self._apk_path
+
+  def GetApkName(self):
+    """Returns the name of the apk without the suffix."""
+    return self._apk_name
+
+  def GetPackageName(self):
+    """Returns the package name of this APK."""
+    return self._package_name
+
+  # Override.
+  def Install(self, device):
+    device.Install(self.GetApkPath())
+    if (self._test_support_apk_path and
+        os.path.exists(self._test_support_apk_path)):
+      device.Install(self._test_support_apk_path)
+
diff --git a/build/android/pylib/instrumentation/test_result.py b/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000..a0eea65
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,34 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+  """Result information for a single instrumentation test."""
+
+  def __init__(self, full_name, test_type, start_date, dur, log=''):
+    """Construct an InstrumentationTestResult object.
+
+    Args:
+      full_name: Full name of the test.
+      test_type: Type of the test result as defined in ResultType.
+      start_date: Date in milliseconds when the test began running.
+      dur: Duration of the test run in milliseconds.
+      log: A string listing any errors.
+    """
+    super(InstrumentationTestResult, self).__init__(full_name, test_type, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 1:
+      self._test_name = name_pieces[1]
+      self._class_name = name_pieces[0]
+    else:
+      self._class_name = full_name
+      self._test_name = full_name
+    self._start_date = start_date
+    self._dur = dur
+
+  def GetDur(self):
+    """Get the test duration."""
+    return self._dur
diff --git a/build/android/pylib/instrumentation/test_runner.py b/build/android/pylib/instrumentation/test_runner.py
new file mode 100644
index 0000000..4f8cdcf
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_runner.py
@@ -0,0 +1,511 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class for running instrumentation tests on a single device."""
+
+import logging
+import os
+import re
+import sys
+import time
+
+from pylib import constants
+from pylib import flag_changer
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.instrumentation import json_perf_parser
+from pylib.instrumentation import test_result
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper # pylint: disable=F0401
+
+
+_PERF_TEST_ANNOTATION = 'PerfTest'
+
+
+def _GetDataFilesForTestSuite(suite_basename):
+  """Returns a list of data files/dirs needed by the test suite.
+
+  Args:
+    suite_basename: The test suite basename for which to return file paths.
+
+  Returns:
+    A list of test file and directory paths.
+  """
+  test_files = []
+  if suite_basename in ['ChromeTest', 'ContentShellTest']:
+    test_files += [
+        'net/data/ssl/certificates/',
+    ]
+  return test_files
+
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  _DEVICE_DATA_DIR = 'chrome/test/data'
+  _DEVICE_COVERAGE_DIR = 'chrome/test/coverage'
+  _HOSTMACHINE_PERF_OUTPUT_FILE = '/tmp/chrome-profile'
+  _DEVICE_PERF_OUTPUT_SEARCH_PREFIX = (constants.DEVICE_PERF_OUTPUT_DIR +
+                                       '/chrome-profile*')
+  _DEVICE_HAS_TEST_FILES = {}
+
+  def __init__(self, test_options, device, shard_index, test_pkg,
+               additional_flags=None):
+    """Create a new TestRunner.
+
+    Args:
+      test_options: An InstrumentationOptions object.
+      device: Attached android device.
+      shard_index: Shard index.
+      test_pkg: A TestPackage object.
+      additional_flags: A list of additional flags to add to the command line.
+    """
+    super(TestRunner, self).__init__(device, test_options.tool,
+                                     test_options.push_deps,
+                                     test_options.cleanup_test_files)
+    self._lighttp_port = constants.LIGHTTPD_RANDOM_PORT_FIRST + shard_index
+
+    self.coverage_device_file = None
+    self.coverage_dir = test_options.coverage_dir
+    self.coverage_host_file = None
+    self.options = test_options
+    self.test_pkg = test_pkg
+    # Use the correct command line file for the package under test.
+    cmdline_file = [a.cmdline_file for a in constants.PACKAGE_INFO.itervalues()
+                    if a.test_package == self.test_pkg.GetPackageName()]
+    assert len(cmdline_file) < 2, 'Multiple packages have the same test package'
+    if len(cmdline_file) and cmdline_file[0]:
+      self.flags = flag_changer.FlagChanger(self.device, cmdline_file[0])
+      if additional_flags:
+        self.flags.AddFlags(additional_flags)
+    else:
+      self.flags = None
+
+  #override
+  def InstallTestPackage(self):
+    self.test_pkg.Install(self.device)
+
+  #override
+  def PushDataDeps(self):
+    # TODO(frankf): Implement a general approach for copying/installing
+    # once across test runners.
+    if TestRunner._DEVICE_HAS_TEST_FILES.get(self.device, False):
+      logging.warning('Already copied test files to device %s, skipping.',
+                      str(self.device))
+      return
+
+    test_data = _GetDataFilesForTestSuite(self.test_pkg.GetApkName())
+    if test_data:
+      # Make sure SD card is ready.
+      self.device.WaitUntilFullyBooted(timeout=20)
+      for p in test_data:
+        self.device.PushChangedFiles(
+            os.path.join(constants.DIR_SOURCE_ROOT, p),
+            os.path.join(self.device.GetExternalStoragePath(), p))
+
+    # TODO(frankf): Specify test data in this file as opposed to passing
+    # as command-line.
+    for dest_host_pair in self.options.test_data:
+      dst_src = dest_host_pair.split(':', 1)
+      dst_layer = dst_src[0]
+      host_src = dst_src[1]
+      host_test_files_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                          host_src)
+      if os.path.exists(host_test_files_path):
+        self.device.PushChangedFiles(
+            host_test_files_path,
+            '%s/%s/%s' % (
+                self.device.GetExternalStoragePath(),
+                TestRunner._DEVICE_DATA_DIR,
+                dst_layer))
+    self.tool.CopyFiles()
+    TestRunner._DEVICE_HAS_TEST_FILES[str(self.device)] = True
+
+  def _GetInstrumentationArgs(self):
+    ret = {}
+    if self.options.wait_for_debugger:
+      ret['debug'] = 'true'
+    if self.coverage_dir:
+      ret['coverage'] = 'true'
+      ret['coverageFile'] = self.coverage_device_file
+
+    return ret
+
+  def _TakeScreenshot(self, test):
+    """Takes a screenshot from the device."""
+    screenshot_name = os.path.join(constants.SCREENSHOTS_DIR, '%s.png' % test)
+    logging.info('Taking screenshot named %s', screenshot_name)
+    self.device.TakeScreenshot(screenshot_name)
+
+  def SetUp(self):
+    """Sets up the test harness and device before all tests are run."""
+    super(TestRunner, self).SetUp()
+    if not self.device.HasRoot():
+      logging.warning('Unable to enable java asserts for %s, non rooted device',
+                      str(self.device))
+    else:
+      if self.device.SetJavaAsserts(True):
+        # TODO(jbudorick) How to best do shell restart after the
+        #                 android_commands refactor?
+        self.device.RunShellCommand('stop')
+        self.device.RunShellCommand('start')
+
+    # We give different default value to launch HTTP server based on shard index
+    # because it may have race condition when multiple processes are trying to
+    # launch lighttpd with same port at same time.
+    self.LaunchTestHttpServer(
+        os.path.join(constants.DIR_SOURCE_ROOT), self._lighttp_port)
+    if self.flags:
+      self.flags.AddFlags(['--disable-fre', '--enable-test-intents'])
+      if self.options.device_flags:
+        with open(self.options.device_flags) as device_flags_file:
+          stripped_flags = (l.strip() for l in device_flags_file)
+          self.flags.AddFlags([flag for flag in stripped_flags if flag])
+
+  def TearDown(self):
+    """Cleans up the test harness and saves outstanding data from test run."""
+    if self.flags:
+      self.flags.Restore()
+    super(TestRunner, self).TearDown()
+
+  def TestSetup(self, test):
+    """Sets up the test harness for running a particular test.
+
+    Args:
+      test: The name of the test that will be run.
+    """
+    self.SetupPerfMonitoringIfNeeded(test)
+    self._SetupIndividualTestTimeoutScale(test)
+    self.tool.SetupEnvironment()
+
+    # Make sure the forwarder is still running.
+    self._RestartHttpServerForwarderIfNecessary()
+
+    if self.coverage_dir:
+      coverage_basename = '%s.ec' % test
+      self.coverage_device_file = '%s/%s/%s' % (
+          self.device.GetExternalStoragePath(),
+          TestRunner._DEVICE_COVERAGE_DIR, coverage_basename)
+      self.coverage_host_file = os.path.join(
+          self.coverage_dir, coverage_basename)
+
+  def _IsPerfTest(self, test):
+    """Determines whether a test is a performance test.
+
+    Args:
+      test: The name of the test to be checked.
+
+    Returns:
+      Whether the test is annotated as a performance test.
+    """
+    return _PERF_TEST_ANNOTATION in self.test_pkg.GetTestAnnotations(test)
+
+  def SetupPerfMonitoringIfNeeded(self, test):
+    """Sets up performance monitoring if the specified test requires it.
+
+    Args:
+      test: The name of the test to be run.
+    """
+    if not self._IsPerfTest(test):
+      return
+    self.device.old_interface.Adb().SendCommand(
+        'shell rm ' + TestRunner._DEVICE_PERF_OUTPUT_SEARCH_PREFIX)
+    self.device.old_interface.StartMonitoringLogcat()
+
+  def TestTeardown(self, test, result):
+    """Cleans up the test harness after running a particular test.
+
+    Depending on the options of this TestRunner this might handle performance
+    tracking.  This method will only be called if the test passed.
+
+    Args:
+      test: The name of the test that was just run.
+      result: result for this test.
+    """
+
+    self.tool.CleanUpEnvironment()
+
+    # The logic below relies on the test passing.
+    if not result or not result.DidRunPass():
+      return
+
+    self.TearDownPerfMonitoring(test)
+
+    if self.coverage_dir:
+      self.device.PullFile(
+          self.coverage_device_file, self.coverage_host_file)
+      self.device.RunShellCommand(
+          'rm -f %s' % self.coverage_device_file)
+
+  def TearDownPerfMonitoring(self, test):
+    """Cleans up performance monitoring if the specified test required it.
+
+    Args:
+      test: The name of the test that was just run.
+    Raises:
+      Exception: if there's anything wrong with the perf data.
+    """
+    if not self._IsPerfTest(test):
+      return
+    raw_test_name = test.split('#')[1]
+
+    # Wait and grab annotation data so we can figure out which traces to parse
+    regex = self.device.old_interface.WaitForLogMatch(
+        re.compile('\*\*PERFANNOTATION\(' + raw_test_name + '\)\:(.*)'), None)
+
+    # If the test is set to run on a specific device type only (IE: only
+    # tablet or phone) and it is being run on the wrong device, the test
+    # just quits and does not do anything.  The java test harness will still
+    # print the appropriate annotation for us, but will add --NORUN-- for
+    # us so we know to ignore the results.
+    # The --NORUN-- tag is managed by MainActivityTestBase.java
+    if regex.group(1) != '--NORUN--':
+
+      # Obtain the relevant perf data.  The data is dumped to a
+      # JSON formatted file.
+      json_string = self.device.ReadFile(
+          '/data/data/com.google.android.apps.chrome/files/PerfTestData.txt',
+          as_root=True)
+
+      if json_string:
+        json_string = '\n'.join(json_string)
+      else:
+        raise Exception('Perf file does not exist or is empty')
+
+      if self.options.save_perf_json:
+        json_local_file = '/tmp/chromium-android-perf-json-' + raw_test_name
+        with open(json_local_file, 'w') as f:
+          f.write(json_string)
+        logging.info('Saving Perf UI JSON from test ' +
+                     test + ' to ' + json_local_file)
+
+      raw_perf_data = regex.group(1).split(';')
+
+      for raw_perf_set in raw_perf_data:
+        if raw_perf_set:
+          perf_set = raw_perf_set.split(',')
+          if len(perf_set) != 3:
+            raise Exception('Unexpected number of tokens in perf annotation '
+                            'string: ' + raw_perf_set)
+
+          # Process the performance data
+          result = json_perf_parser.GetAverageRunInfoFromJSONString(json_string,
+                                                                    perf_set[0])
+          perf_tests_results_helper.PrintPerfResult(perf_set[1], perf_set[2],
+                                                    [result['average']],
+                                                    result['units'])
+
+  def _SetupIndividualTestTimeoutScale(self, test):
+    timeout_scale = self._GetIndividualTestTimeoutScale(test)
+    valgrind_tools.SetChromeTimeoutScale(self.device, timeout_scale)
+
+  def _GetIndividualTestTimeoutScale(self, test):
+    """Returns the timeout scale for the given |test|."""
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    timeout_scale = 1
+    if 'TimeoutScale' in annotations:
+      for annotation in annotations:
+        scale_match = re.match('TimeoutScale:([0-9]+)', annotation)
+        if scale_match:
+          timeout_scale = int(scale_match.group(1))
+    if self.options.wait_for_debugger:
+      timeout_scale *= 100
+    return timeout_scale
+
+  def _GetIndividualTestTimeoutSecs(self, test):
+    """Returns the timeout in seconds for the given |test|."""
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    if 'Manual' in annotations:
+      return 10 * 60 * 60
+    if 'IntegrationTest' in annotations:
+      return 30 * 60
+    if 'External' in annotations:
+      return 10 * 60
+    if 'EnormousTest' in annotations:
+      return 10 * 60
+    if 'LargeTest' in annotations or _PERF_TEST_ANNOTATION in annotations:
+      return 5 * 60
+    if 'MediumTest' in annotations:
+      return 3 * 60
+    if 'SmallTest' in annotations:
+      return 1 * 60
+
+    logging.warn(("Test size not found in annotations for test '{0}', using " +
+                  "1 minute for timeout.").format(test))
+    return 1 * 60
+
+  def _RunTest(self, test, timeout):
+    """Runs a single instrumentation test.
+
+    Args:
+      test: Test class/method.
+      timeout: Timeout time in seconds.
+
+    Returns:
+      The raw output of am instrument as a list of lines.
+    """
+    # Build the 'am instrument' command
+    instrumentation_path = (
+        '%s/%s' % (self.test_pkg.GetPackageName(), self.options.test_runner))
+
+    cmd = ['am', 'instrument', '-r']
+    for k, v in self._GetInstrumentationArgs().iteritems():
+      cmd.extend(['-e', k, "'%s'" % v])
+    cmd.extend(['-e', 'class', "'%s'" % test])
+    cmd.extend(['-w', instrumentation_path])
+    return self.device.RunShellCommand(cmd, timeout=timeout, retries=0)
+
+  @staticmethod
+  def _ParseAmInstrumentRawOutput(raw_output):
+    """Parses the output of an |am instrument -r| call.
+
+    Args:
+      raw_output: the output of an |am instrument -r| call as a list of lines
+    Returns:
+      A 3-tuple containing:
+        - the instrumentation code as an integer
+        - the instrumentation result as a list of lines
+        - the instrumentation statuses received as a list of 2-tuples
+          containing:
+          - the status code as an integer
+          - the bundle dump as a dict mapping string keys to a list of
+            strings, one for each line.
+    """
+    INSTR_STATUS = 'INSTRUMENTATION_STATUS: '
+    INSTR_STATUS_CODE = 'INSTRUMENTATION_STATUS_CODE: '
+    INSTR_RESULT = 'INSTRUMENTATION_RESULT: '
+    INSTR_CODE = 'INSTRUMENTATION_CODE: '
+
+    last = None
+    instr_code = None
+    instr_result = []
+    instr_statuses = []
+    bundle = {}
+    for line in raw_output:
+      if line.startswith(INSTR_STATUS):
+        instr_var = line[len(INSTR_STATUS):]
+        if '=' in instr_var:
+          k, v = instr_var.split('=', 1)
+          bundle[k] = [v]
+          last = INSTR_STATUS
+          last_key = k
+        else:
+          logging.debug('Unknown "%s" line: %s' % (INSTR_STATUS, line))
+
+      elif line.startswith(INSTR_STATUS_CODE):
+        instr_status = line[len(INSTR_STATUS_CODE):]
+        instr_statuses.append((int(instr_status), bundle))
+        bundle = {}
+        last = INSTR_STATUS_CODE
+
+      elif line.startswith(INSTR_RESULT):
+        instr_result.append(line[len(INSTR_RESULT):])
+        last = INSTR_RESULT
+
+      elif line.startswith(INSTR_CODE):
+        instr_code = int(line[len(INSTR_CODE):])
+        last = INSTR_CODE
+
+      elif last == INSTR_STATUS:
+        bundle[last_key].append(line)
+
+      elif last == INSTR_RESULT:
+        instr_result.append(line)
+
+    return (instr_code, instr_result, instr_statuses)
+
+  def _GenerateTestResult(self, test, instr_statuses, start_ms, duration_ms):
+    """Generate the result of |test| from |instr_statuses|.
+
+    Args:
+      instr_statuses: A list of 2-tuples containing:
+        - the status code as an integer
+        - the bundle dump as a dict mapping string keys to string values
+        Note that this is the same as the third item in the 3-tuple returned by
+        |_ParseAmInstrumentRawOutput|.
+      start_ms: The start time of the test in milliseconds.
+      duration_ms: The duration of the test in milliseconds.
+    Returns:
+      An InstrumentationTestResult object.
+    """
+    INSTR_STATUS_CODE_START = 1
+    INSTR_STATUS_CODE_OK = 0
+    INSTR_STATUS_CODE_ERROR = -1
+    INSTR_STATUS_CODE_FAIL = -2
+
+    log = ''
+    result_type = base_test_result.ResultType.UNKNOWN
+
+    for status_code, bundle in instr_statuses:
+      if status_code == INSTR_STATUS_CODE_START:
+        pass
+      elif status_code == INSTR_STATUS_CODE_OK:
+        bundle_test = '%s#%s' % (
+            ''.join(bundle.get('class', [''])),
+            ''.join(bundle.get('test', [''])))
+        skipped = ''.join(bundle.get('test_skipped', ['']))
+
+        if (test == bundle_test and
+            result_type == base_test_result.ResultType.UNKNOWN):
+          result_type = base_test_result.ResultType.PASS
+        elif skipped.lower() in ('true', '1', 'yes'):
+          result_type = base_test_result.ResultType.SKIP
+          logging.info('Skipped ' + test)
+      else:
+        if status_code not in (INSTR_STATUS_CODE_ERROR,
+                               INSTR_STATUS_CODE_FAIL):
+          logging.info('Unrecognized status code %d. Handling as an error.',
+                       status_code)
+        result_type = base_test_result.ResultType.FAIL
+        if 'stack' in bundle:
+          log = '\n'.join(bundle['stack'])
+        # Dismiss any error dialogs. Limit the number in case we have an error
+        # loop or we are failing to dismiss.
+        for _ in xrange(10):
+          package = self.device.old_interface.DismissCrashDialogIfNeeded()
+          if not package:
+            break
+          # Assume test package convention of ".test" suffix
+          if package in self.test_pkg.GetPackageName():
+            result_type = base_test_result.ResultType.CRASH
+            break
+
+    return test_result.InstrumentationTestResult(
+        test, result_type, start_ms, duration_ms, log=log)
+
+  #override
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    timeout = (self._GetIndividualTestTimeoutSecs(test) *
+               self._GetIndividualTestTimeoutScale(test) *
+               self.tool.GetTimeoutScale())
+
+    start_ms = 0
+    duration_ms = 0
+    try:
+      self.TestSetup(test)
+
+      time_ms = lambda: int(time.time() * 1000)
+      start_ms = time_ms()
+      raw_output = self._RunTest(test, timeout)
+      duration_ms = time_ms() - start_ms
+
+      # Parse the test output
+      _, _, statuses = self._ParseAmInstrumentRawOutput(raw_output)
+      result = self._GenerateTestResult(test, statuses, start_ms, duration_ms)
+      results.AddResult(result)
+    except device_errors.CommandTimeoutError as e:
+      results.AddResult(test_result.InstrumentationTestResult(
+          test, base_test_result.ResultType.TIMEOUT, start_ms, duration_ms,
+          log=str(e) or 'No information'))
+    except device_errors.DeviceUnreachableError as e:
+      results.AddResult(test_result.InstrumentationTestResult(
+          test, base_test_result.ResultType.CRASH, start_ms, duration_ms,
+          log=str(e) or 'No information'))
+    self.TestTeardown(test, results)
+    return (results, None if results.DidRunPass() else test)
diff --git a/build/android/pylib/instrumentation/test_runner_test.py b/build/android/pylib/instrumentation/test_runner_test.py
new file mode 100755
index 0000000..1a2b40f
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_runner_test.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.TestRunner."""
+
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.instrumentation import test_runner
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock  # pylint: disable=F0401
+
+
+class InstrumentationTestRunnerTest(unittest.TestCase):
+
+  def setUp(self):
+    options = mock.Mock()
+    options.tool = ''
+    package = mock.Mock()
+    self.instance = test_runner.TestRunner(options, None, 0, package)
+
+  def testParseAmInstrumentRawOutput_nothing(self):
+    code, result, statuses = (
+        test_runner.TestRunner._ParseAmInstrumentRawOutput(['']))
+    self.assertEqual(None, code)
+    self.assertEqual([], result)
+    self.assertEqual([], statuses)
+
+  def testParseAmInstrumentRawOutput_noMatchingStarts(self):
+    raw_output = [
+      '',
+      'this.is.a.test.package.TestClass:.',
+      'Test result for =.',
+      'Time: 1.234',
+      '',
+      'OK (1 test)',
+    ]
+
+    code, result, statuses = (
+        test_runner.TestRunner._ParseAmInstrumentRawOutput(raw_output))
+    self.assertEqual(None, code)
+    self.assertEqual([], result)
+    self.assertEqual([], statuses)
+
+  def testParseAmInstrumentRawOutput_resultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_RESULT: foo',
+      'bar',
+      'INSTRUMENTATION_CODE: -1',
+    ]
+
+    code, result, _ = (
+        test_runner.TestRunner._ParseAmInstrumentRawOutput(raw_output))
+    self.assertEqual(-1, code)
+    self.assertEqual(['foo', 'bar'], result)
+
+  def testParseAmInstrumentRawOutput_oneStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: foo=1',
+      'INSTRUMENTATION_STATUS: bar=hello',
+      'INSTRUMENTATION_STATUS: world=false',
+      'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+      'INSTRUMENTATION_STATUS: test=testMethod',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+    ]
+
+    _, _, statuses = (
+        test_runner.TestRunner._ParseAmInstrumentRawOutput(raw_output))
+
+    expected = [
+      (0, {
+        'foo': ['1'],
+        'bar': ['hello'],
+        'world': ['false'],
+        'class': ['this.is.a.test.package.TestClass'],
+        'test': ['testMethod'],
+      })
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testParseAmInstrumentRawOutput_multiStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_STATUS: test_skipped=true',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+      'INSTRUMENTATION_STATUS: class=hello',
+      'INSTRUMENTATION_STATUS: test=world',
+      'INSTRUMENTATION_STATUS: stack=',
+      'foo/bar.py (27)',
+      'hello/world.py (42)',
+      'test/file.py (1)',
+      'INSTRUMENTATION_STATUS_CODE: -1',
+    ]
+
+    _, _, statuses = (
+        test_runner.TestRunner._ParseAmInstrumentRawOutput(raw_output))
+
+    expected = [
+      (1, {'class': ['foo'], 'test': ['bar'],}),
+      (0, {'test_skipped': ['true']}),
+      (-1, {
+        'class': ['hello'],
+        'test': ['world'],
+        'stack': ['', 'foo/bar.py (27)', 'hello/world.py (42)',
+                  'test/file.py (1)'],
+      }),
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testParseAmInstrumentRawOutput_statusResultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_RESULT: hello',
+      'world',
+      '',
+      '',
+      'INSTRUMENTATION_CODE: 0',
+    ]
+
+    code, result, statuses = (
+        test_runner.TestRunner._ParseAmInstrumentRawOutput(raw_output))
+
+    self.assertEqual(0, code)
+    self.assertEqual(['hello', 'world', '', ''], result)
+    self.assertEqual([(1, {'class': ['foo'], 'test': ['bar']})], statuses)
+
+  def testGenerateTestResult_noStatus(self):
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', [], 0, 1000)
+    self.assertEqual('test.package.TestClass#testMethod', result.GetName())
+    self.assertEqual(base_test_result.ResultType.UNKNOWN, result.GetType())
+    self.assertEqual('', result.GetLog())
+    self.assertEqual(1000, result.GetDur())
+
+  def testGenerateTestResult_testPassed(self):
+    statuses = [
+      (1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (0, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+    ]
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', statuses, 0, 1000)
+    self.assertEqual(base_test_result.ResultType.PASS, result.GetType())
+
+  def testGenerateTestResult_testSkipped_first(self):
+    statuses = [
+      (0, {
+        'test_skipped': ['true'],
+      }),
+      (1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (0, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+    ]
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', statuses, 0, 1000)
+    self.assertEqual(base_test_result.ResultType.SKIP, result.GetType())
+
+  def testGenerateTestResult_testSkipped_last(self):
+    statuses = [
+      (1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (0, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (0, {
+        'test_skipped': ['true'],
+      }),
+    ]
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', statuses, 0, 1000)
+    self.assertEqual(base_test_result.ResultType.SKIP, result.GetType())
+
+  def testGenerateTestResult_testSkipped_false(self):
+    statuses = [
+      (0, {
+        'test_skipped': ['false'],
+      }),
+      (1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (0, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+    ]
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', statuses, 0, 1000)
+    self.assertEqual(base_test_result.ResultType.PASS, result.GetType())
+
+  def testGenerateTestResult_testFailed(self):
+    statuses = [
+      (1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (-2, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+    ]
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', statuses, 0, 1000)
+    self.assertEqual(base_test_result.ResultType.FAIL, result.GetType())
+
+  def testGenerateTestResult_testCrashed(self):
+    self.instance.test_pkg.GetPackageName = mock.Mock(
+        return_value='generate.test.result.test.package')
+    self.instance.device.old_interface.DismissCrashDialogIfNeeded = mock.Mock(
+        return_value='generate.test.result.test.package')
+    statuses = [
+      (1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+      }),
+      (-1, {
+        'class': ['test.package.TestClass'],
+        'test': ['testMethod'],
+        'stack': ['', 'foo/bar.py (27)', 'hello/world.py (42)'],
+      }),
+    ]
+    result = self.instance._GenerateTestResult(
+        'test.package.TestClass#testMethod', statuses, 0, 1000)
+    self.assertEqual(base_test_result.ResultType.CRASH, result.GetType())
+    self.assertEqual('\nfoo/bar.py (27)\nhello/world.py (42)', result.GetLog())
+
+  def test_RunTest_verifyAdbShellCommand(self):
+    self.instance.options.test_runner = 'MyTestRunner'
+    self.instance.device.RunShellCommand = mock.Mock()
+    self.instance.test_pkg.GetPackageName = mock.Mock(
+        return_value='test.package')
+    self.instance._GetInstrumentationArgs = mock.Mock(
+        return_value={'test_arg_key': 'test_arg_value'})
+    self.instance._RunTest('test.package.TestClass#testMethod', 100)
+    self.instance.device.RunShellCommand.assert_called_with(
+        ['am', 'instrument', '-r',
+         '-e', 'test_arg_key', "'test_arg_value'",
+         '-e', 'class', "'test.package.TestClass#testMethod'",
+         '-w', 'test.package/MyTestRunner'],
+        timeout=100, retries=0)
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/junit/__init__.py b/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000..5cac026
--- /dev/null
+++ b/build/android/pylib/junit/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/junit/setup.py b/build/android/pylib/junit/setup.py
new file mode 100644
index 0000000..6ae0006
--- /dev/null
+++ b/build/android/pylib/junit/setup.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.junit import test_runner
+
+def Setup(options):
+  """Creates a test runner factory for junit tests.
+
+  Return:
+    A (runner_factory, tests) tuple.
+  """
+
+  def TestRunnerFactory(_unused_device, _unused_shard_index):
+    return test_runner.JavaTestRunner(options)
+
+  return (TestRunnerFactory, ['JUnit tests'])
+
diff --git a/build/android/pylib/junit/test_dispatcher.py b/build/android/pylib/junit/test_dispatcher.py
new file mode 100644
index 0000000..b821b75
--- /dev/null
+++ b/build/android/pylib/junit/test_dispatcher.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def RunTests(tests, runner_factory):
+  """Runs a set of java tests on the host.
+
+  Return:
+    A tuple containing the results & the exit code.
+  """
+  def run(t):
+    runner = runner_factory(None, None)
+    runner.SetUp()
+    result = runner.RunTest(t)
+    runner.TearDown()
+    return result == 0
+
+  return (None, 0 if all(run(t) for t in tests) else 1)
+
diff --git a/build/android/pylib/junit/test_runner.py b/build/android/pylib/junit/test_runner.py
new file mode 100644
index 0000000..d0803ea
--- /dev/null
+++ b/build/android/pylib/junit/test_runner.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+
+class JavaTestRunner(object):
+  """Runs java tests on the host."""
+
+  def __init__(self, options):
+    self._package_filter = options.package_filter
+    self._runner_filter = options.runner_filter
+    self._sdk_version = options.sdk_version
+    self._test_filter = options.test_filter
+    self._test_suite = options.test_suite
+
+  def SetUp(self):
+    pass
+
+  def RunTest(self, _test):
+    """Runs junit tests from |self._test_suite|."""
+    command = ['java',
+               '-jar', os.path.join(constants.GetOutDirectory(), 'lib.java',
+                                    '%s.jar' % self._test_suite)]
+    if self._test_filter:
+      command.extend(['-gtest-filter', self._test_filter])
+    if self._package_filter:
+      command.extend(['-package-filter', self._package_filter])
+    if self._runner_filter:
+      command.extend(['-runner-filter', self._runner_filter])
+    if self._sdk_version:
+      command.extend(['-sdk-version', self._sdk_version])
+    return cmd_helper.RunCmd(command)
+
+  def TearDown(self):
+    pass
+
diff --git a/build/android/pylib/linker/__init__.py b/build/android/pylib/linker/__init__.py
new file mode 100644
index 0000000..af99437
--- /dev/null
+++ b/build/android/pylib/linker/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/linker/setup.py b/build/android/pylib/linker/setup.py
new file mode 100644
index 0000000..a863a0d
--- /dev/null
+++ b/build/android/pylib/linker/setup.py
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Setup for linker tests."""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.linker import test_case
+from pylib.linker import test_runner
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import unittest_util # pylint: disable=F0401
+
+def Setup(options, _devices):
+  """Creates a list of test cases and a runner factory.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_cases = [
+      test_case.LinkerLibraryAddressTest,
+      test_case.LinkerSharedRelroTest,
+      test_case.LinkerRandomizationTest ]
+
+  low_memory_modes = [False, True]
+  all_tests = [t(is_low_memory=m) for t in test_cases for m in low_memory_modes]
+
+  if options.test_filter:
+    all_test_names = [ test.qualified_name for test in all_tests ]
+    filtered_test_names = unittest_util.FilterTestNames(all_test_names,
+                                                        options.test_filter)
+    all_tests = [t for t in all_tests \
+                 if t.qualified_name in filtered_test_names]
+
+  def TestRunnerFactory(device, _shard_index):
+    return test_runner.LinkerTestRunner(
+        device, options.tool, options.push_deps,
+        options.cleanup_test_files)
+
+  return (TestRunnerFactory, all_tests)
diff --git a/build/android/pylib/linker/test_case.py b/build/android/pylib/linker/test_case.py
new file mode 100644
index 0000000..446bc84
--- /dev/null
+++ b/build/android/pylib/linker/test_case.py
@@ -0,0 +1,542 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for linker-specific test cases.
+
+   The custom dynamic linker can only be tested through a custom test case
+   for various technical reasons:
+
+     - It's an 'invisible feature', i.e. it doesn't expose a new API or
+       behaviour, all it does is save RAM when loading native libraries.
+
+     - Checking that it works correctly requires several things that do not
+       fit the existing GTest-based and instrumentation-based tests:
+
+         - Native test code needs to be run in both the browser and renderer
+           process at the same time just after loading native libraries, in
+           a completely asynchronous way.
+
+         - Each test case requires restarting a whole new application process
+           with a different command-line.
+
+         - Enabling test support in the Linker code requires building a special
+           APK with a flag to activate special test-only support code in the
+           Linker code itself.
+
+       Host-driven tests have also been tried, but since they're really
+       sub-classes of instrumentation tests, they didn't work well either.
+
+   To build and run the linker tests, do the following:
+
+     ninja -C out/Debug chromium_linker_test_apk
+     build/android/test_runner.py linker
+
+"""
+# pylint: disable=R0201
+
+import logging
+import os
+import re
+import time
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.device import intent
+
+
+ResultType = base_test_result.ResultType
+
+_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk'
+_ACTIVITY_NAME = '.ChromiumLinkerTestActivity'
+_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line'
+
+# Path to the Linker.java source file.
+_LINKER_JAVA_SOURCE_PATH = (
+    'base/android/java/src/org/chromium/base/library_loader/Linker.java')
+
+# A regular expression used to extract the browser shared RELRO configuration
+# from the Java source file above.
+_RE_LINKER_BROWSER_CONFIG = re.compile(
+    r'.*BROWSER_SHARED_RELRO_CONFIG\s+=\s+' +
+        'BROWSER_SHARED_RELRO_CONFIG_(\S+)\s*;.*',
+    re.MULTILINE | re.DOTALL)
+
+# Logcat filters used during each test. Only the 'chromium' one is really
+# needed, but the logs are added to the TestResult in case of error, and
+# it is handy to have the 'chromium_android_linker' ones as well when
+# troubleshooting.
+_LOGCAT_FILTERS = [ '*:s', 'chromium:v', 'chromium_android_linker:v' ]
+#_LOGCAT_FILTERS = [ '*:v' ]  ## DEBUG
+
+# Regular expression used to match status lines in logcat.
+re_status_line = re.compile(r'(BROWSER|RENDERER)_LINKER_TEST: (FAIL|SUCCESS)')
+
+# Regular expression used to mach library load addresses in logcat.
+re_library_address = re.compile(
+    r'(BROWSER|RENDERER)_LIBRARY_ADDRESS: (\S+) ([0-9A-Fa-f]+)')
+
+
+def _GetBrowserSharedRelroConfig():
+  """Returns a string corresponding to the Linker's configuration of shared
+     RELRO sections in the browser process. This parses the Java linker source
+     file to get the appropriate information.
+  Return:
+      None in case of error (e.g. could not locate the source file).
+     'NEVER' if the browser process shall never use shared RELROs.
+     'LOW_RAM_ONLY' if if uses it only on low-end devices.
+     'ALWAYS' if it always uses a shared RELRO.
+  """
+  source_path = \
+      os.path.join(constants.DIR_SOURCE_ROOT, _LINKER_JAVA_SOURCE_PATH)
+  if not os.path.exists(source_path):
+    logging.error('Could not find linker source file: ' + source_path)
+    return None
+
+  with open(source_path) as f:
+    configs = _RE_LINKER_BROWSER_CONFIG.findall(f.read())
+    if not configs:
+      logging.error(
+          'Can\'t find browser shared RELRO configuration value in ' + \
+          source_path)
+      return None
+
+    if configs[0] not in ['NEVER', 'LOW_RAM_ONLY', 'ALWAYS']:
+      logging.error('Unexpected browser config value: ' + configs[0])
+      return None
+
+    logging.info('Found linker browser shared RELRO config: ' + configs[0])
+    return configs[0]
+
+
+def _WriteCommandLineFile(device, command_line, command_line_file):
+  """Create a command-line file on the device. This does not use FlagChanger
+     because its implementation assumes the device has 'su', and thus does
+     not work at all with production devices."""
+  device.RunShellCommand(
+      'echo "%s" > %s' % (command_line, command_line_file))
+
+
+def _CheckLinkerTestStatus(logcat):
+  """Parse the content of |logcat| and checks for both a browser and
+     renderer status line.
+
+  Args:
+    logcat: A string to parse. Can include line separators.
+
+  Returns:
+    A tuple, result[0] is True if there is a complete match, then
+    result[1] and result[2] will be True or False to reflect the
+    test status for the browser and renderer processes, respectively.
+  """
+  browser_found = False
+  renderer_found = False
+  for m in re_status_line.finditer(logcat):
+    process_type, status = m.groups()
+    if process_type == 'BROWSER':
+      browser_found = True
+      browser_success = (status == 'SUCCESS')
+    elif process_type == 'RENDERER':
+      renderer_found = True
+      renderer_success = (status == 'SUCCESS')
+    else:
+      assert False, 'Invalid process type ' + process_type
+
+  if browser_found and renderer_found:
+    return (True, browser_success, renderer_success)
+
+  # Didn't find anything.
+  return (False, None, None)
+
+
+def _StartActivityAndWaitForLinkerTestStatus(device, timeout):
+  """Force-start an activity and wait up to |timeout| seconds until the full
+     linker test status lines appear in the logcat, recorded through |device|.
+  Args:
+    device: A DeviceUtils instance.
+    timeout: Timeout in seconds
+  Returns:
+    A (status, logs) tuple, where status is a ResultType constant, and logs
+    if the final logcat output as a string.
+  """
+  # 1. Start recording logcat with appropriate filters.
+  device.old_interface.StartRecordingLogcat(
+      clear=True, filters=_LOGCAT_FILTERS)
+
+  try:
+    # 2. Force-start activity.
+    device.StartActivity(
+        intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME),
+        force_stop=True)
+
+    # 3. Wait up to |timeout| seconds until the test status is in the logcat.
+    num_tries = 0
+    max_tries = timeout
+    found = False
+    while num_tries < max_tries:
+      time.sleep(1)
+      num_tries += 1
+      found, browser_ok, renderer_ok = _CheckLinkerTestStatus(
+          device.old_interface.GetCurrentRecordedLogcat())
+      if found:
+        break
+
+  finally:
+    logs = device.old_interface.StopRecordingLogcat()
+
+  if num_tries >= max_tries:
+    return ResultType.TIMEOUT, logs
+
+  if browser_ok and renderer_ok:
+    return ResultType.PASS, logs
+
+  return ResultType.FAIL, logs
+
+
+class LibraryLoadMap(dict):
+  """A helper class to pretty-print a map of library names to load addresses."""
+  def __str__(self):
+    items = ['\'%s\': 0x%x' % (name, address) for \
+        (name, address) in self.iteritems()]
+    return '{%s}' % (', '.join(items))
+
+  def __repr__(self):
+    return 'LibraryLoadMap(%s)' % self.__str__()
+
+
+class AddressList(list):
+  """A helper class to pretty-print a list of load addresses."""
+  def __str__(self):
+    items = ['0x%x' % address for address in self]
+    return '[%s]' % (', '.join(items))
+
+  def __repr__(self):
+    return 'AddressList(%s)' % self.__str__()
+
+
+def _ExtractLibraryLoadAddressesFromLogcat(logs):
+  """Extract the names and addresses of shared libraries loaded in the
+     browser and renderer processes.
+  Args:
+    logs: A string containing logcat output.
+  Returns:
+    A tuple (browser_libs, renderer_libs), where each item is a map of
+    library names (strings) to library load addresses (ints), for the
+    browser and renderer processes, respectively.
+  """
+  browser_libs = LibraryLoadMap()
+  renderer_libs = LibraryLoadMap()
+  for m in re_library_address.finditer(logs):
+    process_type, lib_name, lib_address = m.groups()
+    lib_address = int(lib_address, 16)
+    if process_type == 'BROWSER':
+      browser_libs[lib_name] = lib_address
+    elif process_type == 'RENDERER':
+      renderer_libs[lib_name] = lib_address
+    else:
+      assert False, 'Invalid process type'
+
+  return browser_libs, renderer_libs
+
+
+def _CheckLoadAddressRandomization(lib_map_list, process_type):
+  """Check that a map of library load addresses is random enough.
+  Args:
+    lib_map_list: a list of dictionaries that map library names (string)
+      to load addresses (int). Each item in the list corresponds to a
+      different run / process start.
+    process_type: a string describing the process type.
+  Returns:
+    (status, logs) tuple, where <status> is True iff the load addresses are
+    randomized, False otherwise, and <logs> is a string containing an error
+    message detailing the libraries that are not randomized properly.
+  """
+  # Collect, for each library, its list of load addresses.
+  lib_addr_map = {}
+  for lib_map in lib_map_list:
+    for lib_name, lib_address in lib_map.iteritems():
+      if lib_name not in lib_addr_map:
+        lib_addr_map[lib_name] = AddressList()
+      lib_addr_map[lib_name].append(lib_address)
+
+  logging.info('%s library load map: %s', process_type, lib_addr_map)
+
+  # For each library, check the randomness of its load addresses.
+  bad_libs = {}
+  for lib_name, lib_address_list in lib_addr_map.iteritems():
+    # If all addresses are different, skip to next item.
+    lib_address_set = set(lib_address_list)
+    # Consider that if there is more than one pair of identical addresses in
+    # the list, then randomization is broken.
+    if len(lib_address_set) < len(lib_address_list) - 1:
+      bad_libs[lib_name] = lib_address_list
+
+
+  if bad_libs:
+    return False, '%s libraries failed randomization: %s' % \
+        (process_type, bad_libs)
+
+  return True, '%s libraries properly randomized: %s' % \
+      (process_type, lib_addr_map)
+
+
+class LinkerTestCaseBase(object):
+  """Base class for linker test cases."""
+
+  def __init__(self, is_low_memory=False):
+    """Create a test case.
+    Args:
+      is_low_memory: True to simulate a low-memory device, False otherwise.
+    """
+    self.is_low_memory = is_low_memory
+    if is_low_memory:
+      test_suffix = 'ForLowMemoryDevice'
+    else:
+      test_suffix = 'ForRegularDevice'
+    class_name = self.__class__.__name__
+    self.qualified_name = '%s.%s' % (class_name, test_suffix)
+    self.tagged_name = self.qualified_name
+
+  def _RunTest(self, _device):
+    """Run the test, must be overriden.
+    Args:
+      _device: A DeviceUtils interface.
+    Returns:
+      A (status, log) tuple, where <status> is a ResultType constant, and <log>
+      is the logcat output captured during the test in case of error, or None
+      in case of success.
+    """
+    return ResultType.FAIL, 'Unimplemented _RunTest() method!'
+
+  def Run(self, device):
+    """Run the test on a given device.
+    Args:
+      device: Name of target device where to run the test.
+    Returns:
+      A base_test_result.TestRunResult() instance.
+    """
+    margin = 8
+    print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name)
+    logging.info('Running linker test: %s', self.tagged_name)
+
+    # Create command-line file on device.
+    command_line_flags = ''
+    if self.is_low_memory:
+      command_line_flags = '--low-memory-device'
+    _WriteCommandLineFile(device, command_line_flags, _COMMAND_LINE_FILE)
+
+    # Run the test.
+    status, logs = self._RunTest(device)
+
+    result_text = 'OK'
+    if status == ResultType.FAIL:
+      result_text = 'FAILED'
+    elif status == ResultType.TIMEOUT:
+      result_text = 'TIMEOUT'
+    print '[ %*s ] %s' % (margin, result_text, self.tagged_name)
+
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(
+            self.tagged_name,
+            status,
+            logs))
+
+    return results
+
+  def __str__(self):
+    return self.tagged_name
+
+  def __repr__(self):
+    return self.tagged_name
+
+
+class LinkerSharedRelroTest(LinkerTestCaseBase):
+  """A linker test case to check the status of shared RELRO sections.
+
+    The core of the checks performed here are pretty simple:
+
+      - Clear the logcat and start recording with an appropriate set of filters.
+      - Create the command-line appropriate for the test-case.
+      - Start the activity (always forcing a cold start).
+      - Every second, look at the current content of the filtered logcat lines
+        and look for instances of the following:
+
+            BROWSER_LINKER_TEST: <status>
+            RENDERER_LINKER_TEST: <status>
+
+        where <status> can be either FAIL or SUCCESS. These lines can appear
+        in any order in the logcat. Once both browser and renderer status are
+        found, stop the loop. Otherwise timeout after 30 seconds.
+
+        Note that there can be other lines beginning with BROWSER_LINKER_TEST:
+        and RENDERER_LINKER_TEST:, but are not followed by a <status> code.
+
+      - The test case passes if the <status> for both the browser and renderer
+        process are SUCCESS. Otherwise its a fail.
+  """
+  def _RunTest(self, device):
+    # Wait up to 30 seconds until the linker test status is in the logcat.
+    return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
+
+
+class LinkerLibraryAddressTest(LinkerTestCaseBase):
+  """A test case that verifies library load addresses.
+
+     The point of this check is to ensure that the libraries are loaded
+     according to the following rules:
+
+     - For low-memory devices, they should always be loaded at the same address
+       in both browser and renderer processes, both below 0x4000_0000.
+
+     - For regular devices, the browser process should load libraries above
+       0x4000_0000, and renderer ones below it.
+  """
+  def _RunTest(self, device):
+    result, logs = _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
+
+    # Return immediately in case of timeout.
+    if result == ResultType.TIMEOUT:
+      return result, logs
+
+    # Collect the library load addresses in the browser and renderer processes.
+    browser_libs, renderer_libs = _ExtractLibraryLoadAddressesFromLogcat(logs)
+
+    logging.info('Browser libraries: %s', browser_libs)
+    logging.info('Renderer libraries: %s', renderer_libs)
+
+    # Check that the same libraries are loaded into both processes:
+    browser_set = set(browser_libs.keys())
+    renderer_set = set(renderer_libs.keys())
+    if browser_set != renderer_set:
+      logging.error('Library set mistmach browser=%s renderer=%s',
+          browser_libs.keys(), renderer_libs.keys())
+      return ResultType.FAIL, logs
+
+    # And that there are not empty.
+    if not browser_set:
+      logging.error('No libraries loaded in any process!')
+      return ResultType.FAIL, logs
+
+    # Check that the renderer libraries are loaded at 'low-addresses'. i.e.
+    # below 0x4000_0000, for every kind of device.
+    memory_boundary = 0x40000000
+    bad_libs = []
+    for lib_name, lib_address in renderer_libs.iteritems():
+      if lib_address >= memory_boundary:
+        bad_libs.append((lib_name, lib_address))
+
+    if bad_libs:
+      logging.error('Renderer libraries loaded at high addresses: %s', bad_libs)
+      return ResultType.FAIL, logs
+
+    browser_config = _GetBrowserSharedRelroConfig()
+    if not browser_config:
+      return ResultType.FAIL, 'Bad linker source configuration'
+
+    if browser_config == 'ALWAYS' or \
+        (browser_config == 'LOW_RAM_ONLY' and self.is_low_memory):
+      # The libraries must all be loaded at the same addresses. This also
+      # implicitly checks that the browser libraries are at low addresses.
+      addr_mismatches = []
+      for lib_name, lib_address in browser_libs.iteritems():
+        lib_address2 = renderer_libs[lib_name]
+        if lib_address != lib_address2:
+          addr_mismatches.append((lib_name, lib_address, lib_address2))
+
+      if addr_mismatches:
+        logging.error('Library load address mismatches: %s',
+            addr_mismatches)
+        return ResultType.FAIL, logs
+
+    # Otherwise, check that libraries are loaded at 'high-addresses'.
+    # Note that for low-memory devices, the previous checks ensure that they
+    # were loaded at low-addresses.
+    else:
+      bad_libs = []
+      for lib_name, lib_address in browser_libs.iteritems():
+        if lib_address < memory_boundary:
+          bad_libs.append((lib_name, lib_address))
+
+      if bad_libs:
+        logging.error('Browser libraries loaded at low addresses: %s', bad_libs)
+        return ResultType.FAIL, logs
+
+    # Everything's ok.
+    return ResultType.PASS, logs
+
+
+class LinkerRandomizationTest(LinkerTestCaseBase):
+  """A linker test case to check that library load address randomization works
+     properly between successive starts of the test program/activity.
+
+     This starts the activity several time (each time forcing a new process
+     creation) and compares the load addresses of the libraries in them to
+     detect that they have changed.
+
+     In theory, two successive runs could (very rarely) use the same load
+     address, so loop 5 times and compare the values there. It is assumed
+     that if there are more than one pair of identical addresses, then the
+     load addresses are not random enough for this test.
+  """
+  def _RunTest(self, device):
+    max_loops = 5
+    browser_lib_map_list = []
+    renderer_lib_map_list = []
+    logs_list = []
+    for _ in range(max_loops):
+      # Start the activity.
+      result, logs = _StartActivityAndWaitForLinkerTestStatus(
+          device, timeout=30)
+      if result == ResultType.TIMEOUT:
+        # Something bad happened. Return immediately.
+        return result, logs
+
+      # Collect library addresses.
+      browser_libs, renderer_libs = _ExtractLibraryLoadAddressesFromLogcat(logs)
+      browser_lib_map_list.append(browser_libs)
+      renderer_lib_map_list.append(renderer_libs)
+      logs_list.append(logs)
+
+    # Check randomization in the browser libraries.
+    logs = '\n'.join(logs_list)
+
+    browser_status, browser_logs = _CheckLoadAddressRandomization(
+        browser_lib_map_list, 'Browser')
+
+    renderer_status, renderer_logs = _CheckLoadAddressRandomization(
+        renderer_lib_map_list, 'Renderer')
+
+    browser_config = _GetBrowserSharedRelroConfig()
+    if not browser_config:
+      return ResultType.FAIL, 'Bad linker source configuration'
+
+    if not browser_status:
+      if browser_config == 'ALWAYS' or \
+          (browser_config == 'LOW_RAM_ONLY' and self.is_low_memory):
+        return ResultType.FAIL, browser_logs
+
+      # IMPORTANT NOTE: The system's ASLR implementation seems to be very poor
+      # when starting an activity process in a loop with "adb shell am start".
+      #
+      # When simulating a regular device, loading libraries in the browser
+      # process uses a simple mmap(NULL, ...) to let the kernel device where to
+      # load the file (this is similar to what System.loadLibrary() does).
+      #
+      # Unfortunately, at least in the context of this test, doing so while
+      # restarting the activity with the activity manager very, very, often
+      # results in the system using the same load address for all 5 runs, or
+      # sometimes only 4 out of 5.
+      #
+      # This has been tested experimentally on both Android 4.1.2 and 4.3.
+      #
+      # Note that this behaviour doesn't seem to happen when starting an
+      # application 'normally', i.e. when using the application launcher to
+      # start the activity.
+      logging.info('Ignoring system\'s low randomization of browser libraries' +
+                   ' for regular devices')
+
+    if not renderer_status:
+      return ResultType.FAIL, renderer_logs
+
+    return ResultType.PASS, logs
diff --git a/build/android/pylib/linker/test_runner.py b/build/android/pylib/linker/test_runner.py
new file mode 100644
index 0000000..77d0dec
--- /dev/null
+++ b/build/android/pylib/linker/test_runner.py
@@ -0,0 +1,103 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs linker tests on a particular device."""
+
+import logging
+import os.path
+import sys
+import traceback
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.linker import test_case
+from pylib.utils import apk_helper
+
+
+# Name of the Android package to install for this to work.
+_PACKAGE_NAME = 'ChromiumLinkerTest'
+
+
+class LinkerExceptionTestResult(base_test_result.BaseTestResult):
+  """Test result corresponding to a python exception in a host-custom test."""
+
+  def __init__(self, test_name, exc_info):
+    """Constructs a LinkerExceptionTestResult object.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      exc_info: exception info, ostensibly from sys.exc_info().
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+
+    super(LinkerExceptionTestResult, self).__init__(
+        test_name,
+        base_test_result.ResultType.FAIL,
+        log = "%s %s" % (exc_type, log_msg))
+
+
+class LinkerTestRunner(base_test_runner.BaseTestRunner):
+  """Orchestrates running a set of linker tests.
+
+  Any Python exceptions in the tests are caught and translated into a failed
+  result, rather than being re-raised on the main thread.
+  """
+
+  #override
+  def __init__(self, device, tool, push_deps, cleanup_test_files):
+    """Creates a new LinkerTestRunner.
+
+    Args:
+      device: Attached android device.
+      tool: Name of the Valgrind tool.
+      push_deps: If True, push all dependencies to the device.
+      cleanup_test_files: Whether or not to cleanup test files on device.
+    """
+
+    super(LinkerTestRunner, self).__init__(device, tool, push_deps,
+                                               cleanup_test_files)
+
+  #override
+  def InstallTestPackage(self):
+    apk_path = os.path.join(
+        constants.GetOutDirectory(), 'apks', '%s.apk' % _PACKAGE_NAME)
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+
+    package_name = apk_helper.GetPackageName(apk_path)
+    self.device.old_interface.ManagedInstall(apk_path, package_name)
+
+  #override
+  def RunTest(self, test):
+    """Sets up and runs a test case.
+
+    Args:
+      test: An object which is ostensibly a subclass of LinkerTestCaseBase.
+
+    Returns:
+      A TestRunResults object which contains the result produced by the test
+      and, in the case of a failure, the test that should be retried.
+    """
+
+    assert isinstance(test, test_case.LinkerTestCaseBase)
+
+    try:
+      results = test.Run(self.device)
+    except Exception:
+      logging.exception('Caught exception while trying to run test: ' +
+                        test.tagged_name)
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(LinkerExceptionTestResult(
+          test.tagged_name, exc_info))
+
+    if not results.DidRunPass():
+      return results, test
+    else:
+      return results, None
diff --git a/build/android/pylib/monkey/__init__.py b/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/monkey/__init__.py
diff --git a/build/android/pylib/monkey/setup.py b/build/android/pylib/monkey/setup.py
new file mode 100644
index 0000000..fe690a5
--- /dev/null
+++ b/build/android/pylib/monkey/setup.py
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for monkey tests."""
+
+from pylib.monkey import test_runner
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: A MonkeyOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  # Token to replicate across devices as the "test". The TestRunner does all of
+  # the work to run the test.
+  tests = ['MonkeyTest']
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/monkey/test_options.py b/build/android/pylib/monkey/test_options.py
new file mode 100644
index 0000000..54d3d08
--- /dev/null
+++ b/build/android/pylib/monkey/test_options.py
@@ -0,0 +1,16 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the MonkeyOptions named tuple."""
+
+import collections
+
+MonkeyOptions = collections.namedtuple('MonkeyOptions', [
+    'verbose_count',
+    'package',
+    'event_count',
+    'category',
+    'throttle',
+    'seed',
+    'extra_args'])
diff --git a/build/android/pylib/monkey/test_runner.py b/build/android/pylib/monkey/test_runner.py
new file mode 100644
index 0000000..19dd339
--- /dev/null
+++ b/build/android/pylib/monkey/test_runner.py
@@ -0,0 +1,95 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a monkey test on a single device."""
+
+import logging
+import random
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import intent
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  """A TestRunner instance runs a monkey test on a single device."""
+
+  def __init__(self, test_options, device, _):
+    super(TestRunner, self).__init__(device, None)
+    self._options = test_options
+    self._package = constants.PACKAGE_INFO[self._options.package].package
+    self._activity = constants.PACKAGE_INFO[self._options.package].activity
+
+  def _LaunchMonkeyTest(self):
+    """Runs monkey test for a given package.
+
+    Returns:
+      Output from the monkey command on the device.
+    """
+
+    timeout_ms = self._options.event_count * self._options.throttle * 1.5
+
+    cmd = ['monkey',
+           '-p %s' % self._package,
+           ' '.join(['-c %s' % c for c in self._options.category]),
+           '--throttle %d' % self._options.throttle,
+           '-s %d' % (self._options.seed or random.randint(1, 100)),
+           '-v ' * self._options.verbose_count,
+           '--monitor-native-crashes',
+           '--kill-process-after-error',
+           self._options.extra_args,
+           '%d' % self._options.event_count]
+    return self.device.RunShellCommand(' '.join(cmd), timeout=timeout_ms)
+
+  def RunTest(self, test_name):
+    """Run a Monkey test on the device.
+
+    Args:
+      test_name: String to use for logging the test result.
+
+    Returns:
+      A tuple of (TestRunResults, retry).
+    """
+    self.device.StartActivity(
+        intent.Intent(package=self._package, activity=self._activity,
+                      action='android.intent.action.MAIN'),
+        blocking=True, force_stop=True)
+
+    # Chrome crashes are not always caught by Monkey test runner.
+    # Verify Chrome has the same PID before and after the test.
+    before_pids = self.device.GetPids(self._package)
+
+    # Run the test.
+    output = ''
+    if before_pids:
+      output = '\n'.join(self._LaunchMonkeyTest())
+      after_pids = self.device.GetPids(self._package)
+
+    crashed = True
+    if not self._package in before_pids:
+      logging.error('Failed to start the process.')
+    elif not self._package in after_pids:
+      logging.error('Process %s has died.', before_pids[self._package])
+    elif before_pids[self._package] != after_pids[self._package]:
+      logging.error('Detected process restart %s -> %s',
+                    before_pids[self._package], after_pids[self._package])
+    else:
+      crashed = False
+
+    results = base_test_result.TestRunResults()
+    success_pattern = 'Events injected: %d' % self._options.event_count
+    if success_pattern in output and not crashed:
+      result = base_test_result.BaseTestResult(
+          test_name, base_test_result.ResultType.PASS, log=output)
+    else:
+      result = base_test_result.BaseTestResult(
+          test_name, base_test_result.ResultType.FAIL, log=output)
+      if 'chrome' in self._options.package:
+        logging.warning('Starting MinidumpUploadService...')
+        try:
+          self.device.old_interface.StartCrashUploadService(self._package)
+        except AssertionError as e:
+          logging.error('Failed to start MinidumpUploadService: %s', e)
+    results.AddResult(result)
+    return results, False
diff --git a/build/android/pylib/perf/__init__.py b/build/android/pylib/perf/__init__.py
new file mode 100644
index 0000000..9228df8
--- /dev/null
+++ b/build/android/pylib/perf/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/perf/cache_control.py b/build/android/pylib/perf/cache_control.py
new file mode 100644
index 0000000..8065cf9
--- /dev/null
+++ b/build/android/pylib/perf/cache_control.py
@@ -0,0 +1,21 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+class CacheControl(object):
+  _DROP_CACHES = '/proc/sys/vm/drop_caches'
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+
+  def DropRamCaches(self):
+    """Drops the filesystem ram caches for performance testing."""
+    self._device.RunShellCommand('sync', as_root=True)
+    self._device.WriteFile(CacheControl._DROP_CACHES, '3', as_root=True)
+
diff --git a/build/android/pylib/perf/perf_control.py b/build/android/pylib/perf/perf_control.py
new file mode 100644
index 0000000..d90edf4
--- /dev/null
+++ b/build/android/pylib/perf/perf_control.py
@@ -0,0 +1,115 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import atexit
+import logging
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+class PerfControl(object):
+  """Provides methods for setting the performance mode of a device."""
+  _SCALING_GOVERNOR_FMT = (
+      '/sys/devices/system/cpu/cpu%d/cpufreq/scaling_governor')
+  _CPU_ONLINE_FMT = '/sys/devices/system/cpu/cpu%d/online'
+  _KERNEL_MAX = '/sys/devices/system/cpu/kernel_max'
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    cpu_files = self._device.RunShellCommand(
+      'ls -d /sys/devices/system/cpu/cpu[0-9]*')
+    self._num_cpu_cores = len(cpu_files)
+    assert self._num_cpu_cores > 0, 'Failed to detect CPUs.'
+    logging.info('Number of CPUs: %d', self._num_cpu_cores)
+    self._have_mpdecision = self._device.FileExists('/system/bin/mpdecision')
+
+  def SetHighPerfMode(self):
+    """Sets the highest possible performance mode for the device."""
+    if not self._device.old_interface.IsRootEnabled():
+      message = 'Need root for performance mode. Results may be NOISY!!'
+      logging.warning(message)
+      # Add an additional warning at exit, such that it's clear that any results
+      # may be different/noisy (due to the lack of intended performance mode).
+      atexit.register(logging.warning, message)
+      return
+    # TODO(epenner): Enable on all devices (http://crbug.com/383566)
+    if 'Nexus 4' == self._device.old_interface.GetProductModel():
+      self._ForceAllCpusOnline(True)
+      if not self._AllCpusAreOnline():
+        logging.warning('Failed to force CPUs online. Results may be NOISY!')
+    self._SetScalingGovernorInternal('performance')
+
+  def SetPerfProfilingMode(self):
+    """Enables all cores for reliable perf profiling."""
+    self._ForceAllCpusOnline(True)
+    self._SetScalingGovernorInternal('performance')
+    if not self._AllCpusAreOnline():
+      if not self._device.old_interface.IsRootEnabled():
+        raise RuntimeError('Need root to force CPUs online.')
+      raise RuntimeError('Failed to force CPUs online.')
+
+  def SetDefaultPerfMode(self):
+    """Sets the performance mode for the device to its default mode."""
+    if not self._device.old_interface.IsRootEnabled():
+      return
+    product_model = self._device.GetProp('ro.product.model')
+    governor_mode = {
+        'GT-I9300': 'pegasusq',
+        'Galaxy Nexus': 'interactive',
+        'Nexus 4': 'ondemand',
+        'Nexus 7': 'interactive',
+        'Nexus 10': 'interactive'
+    }.get(product_model, 'ondemand')
+    self._SetScalingGovernorInternal(governor_mode)
+    self._ForceAllCpusOnline(False)
+
+  def _SetScalingGovernorInternal(self, value):
+    cpu_cores = ' '.join([str(x) for x in range(self._num_cpu_cores)])
+    script = ('for CPU in %s; do\n'
+        '  FILE="/sys/devices/system/cpu/cpu$CPU/cpufreq/scaling_governor"\n'
+        '  test -e $FILE && echo %s > $FILE\n'
+        'done\n') % (cpu_cores, value)
+    logging.info('Setting scaling governor mode: %s', value)
+    self._device.RunShellCommand(script, as_root=True)
+
+  def _AllCpusAreOnline(self):
+    for cpu in range(1, self._num_cpu_cores):
+      online_path = PerfControl._CPU_ONLINE_FMT % cpu
+      # TODO(epenner): Investigate why file may be missing
+      # (http://crbug.com/397118)
+      if not self._device.FileExists(online_path) or \
+            self._device.ReadFile(online_path)[0] == '0':
+        return False
+    return True
+
+  def _ForceAllCpusOnline(self, force_online):
+    """Enable all CPUs on a device.
+
+    Some vendors (or only Qualcomm?) hot-plug their CPUs, which can add noise
+    to measurements:
+    - In perf, samples are only taken for the CPUs that are online when the
+      measurement is started.
+    - The scaling governor can't be set for an offline CPU and frequency scaling
+      on newly enabled CPUs adds noise to both perf and tracing measurements.
+
+    It appears Qualcomm is the only vendor that hot-plugs CPUs, and on Qualcomm
+    this is done by "mpdecision".
+
+    """
+    if self._have_mpdecision:
+      script = 'stop mpdecision' if force_online else 'start mpdecision'
+      self._device.RunShellCommand(script, as_root=True)
+
+    if not self._have_mpdecision and not self._AllCpusAreOnline():
+      logging.warning('Unexpected cpu hot plugging detected.')
+
+    if not force_online:
+      return
+
+    for cpu in range(self._num_cpu_cores):
+      online_path = PerfControl._CPU_ONLINE_FMT % cpu
+      self._device.WriteFile(online_path, '1', as_root=True)
diff --git a/build/android/pylib/perf/perf_control_unittest.py b/build/android/pylib/perf/perf_control_unittest.py
new file mode 100644
index 0000000..aa31f68
--- /dev/null
+++ b/build/android/pylib/perf/perf_control_unittest.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+from pylib import android_commands
+from pylib.device import device_utils
+from pylib.perf import perf_control
+
+class TestPerfControl(unittest.TestCase):
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self._device = device_utils.DeviceUtils(
+        android_commands.AndroidCommands(device=devices[0]))
+
+  def testHighPerfMode(self):
+    perf = perf_control.PerfControl(self._device)
+    try:
+      perf.SetPerfProfilingMode()
+      for cpu in range(perf._num_cpu_cores):
+        path = perf_control.PerfControl._CPU_ONLINE_FMT % cpu
+        self.assertEquals('1',
+                          self._device.ReadFile(path)[0])
+        path = perf_control.PerfControl._SCALING_GOVERNOR_FMT % cpu
+        self.assertEquals('performance',
+                          self._device.ReadFile(path)[0])
+    finally:
+      perf.SetDefaultPerfMode()
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/perf/setup.py b/build/android/pylib/perf/setup.py
new file mode 100644
index 0000000..99c3e19
--- /dev/null
+++ b/build/android/pylib/perf/setup.py
@@ -0,0 +1,97 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for performance tests."""
+
+import json
+import fnmatch
+import logging
+import os
+import shutil
+
+from pylib import android_commands
+from pylib import constants
+from pylib import forwarder
+from pylib.device import device_list
+from pylib.perf import test_runner
+from pylib.utils import test_environment
+
+
+def _GetAllDevices():
+  devices_path = os.path.join(os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                              device_list.LAST_DEVICES_FILENAME)
+  try:
+    devices = device_list.GetPersistentDeviceList(devices_path)
+  except IOError as e:
+    logging.error('Unable to find %s [%s]', devices_path, e)
+    devices = android_commands.GetAttachedDevices()
+  return sorted(devices)
+
+
+def _GetStepsDictFromSingleStep(test_options):
+  # Running a single command, build the tests structure.
+  steps_dict = {
+    'version': 1,
+    'steps': {
+        'single_step': {
+          'device_affinity': 0,
+          'cmd': test_options.single_step
+        },
+    }
+  }
+  return steps_dict
+
+
+def _GetStepsDict(test_options):
+  if test_options.single_step:
+    return _GetStepsDictFromSingleStep(test_options)
+  if test_options.steps:
+    with file(test_options.steps, 'r') as f:
+      steps = json.load(f)
+
+      # Already using the new format.
+      assert steps['version'] == 1
+      return steps
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: A PerformanceOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests, devices).
+  """
+  # TODO(bulach): remove this once the bot side lands. BUG=318369
+  constants.SetBuildType('Release')
+  if os.path.exists(constants.PERF_OUTPUT_DIR):
+    shutil.rmtree(constants.PERF_OUTPUT_DIR)
+  os.makedirs(constants.PERF_OUTPUT_DIR)
+
+  # Before running the tests, kill any leftover server.
+  test_environment.CleanupLeftoverProcesses()
+  forwarder.Forwarder.UseMultiprocessing()
+
+  # We want to keep device affinity, so return all devices ever seen.
+  all_devices = _GetAllDevices()
+
+  steps_dict = _GetStepsDict(test_options)
+  sorted_step_names = sorted(steps_dict['steps'].keys())
+
+  if test_options.test_filter:
+    sorted_step_names = fnmatch.filter(sorted_step_names,
+                                       test_options.test_filter)
+
+  flaky_steps = []
+  if test_options.flaky_steps:
+    with file(test_options.flaky_steps, 'r') as f:
+      flaky_steps = json.load(f)
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index, len(all_devices),
+        steps_dict, flaky_steps)
+
+  return (TestRunnerFactory, sorted_step_names, all_devices)
diff --git a/build/android/pylib/perf/surface_stats_collector.py b/build/android/pylib/perf/surface_stats_collector.py
new file mode 100644
index 0000000..a34d87d
--- /dev/null
+++ b/build/android/pylib/perf/surface_stats_collector.py
@@ -0,0 +1,312 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import Queue
+import datetime
+import logging
+import re
+import threading
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+# Log marker containing SurfaceTexture timestamps.
+_SURFACE_TEXTURE_TIMESTAMPS_MESSAGE = 'SurfaceTexture update timestamps'
+_SURFACE_TEXTURE_TIMESTAMP_RE = '\d+'
+
+_MIN_NORMALIZED_FRAME_LENGTH = 0.5
+
+
+class SurfaceStatsCollector(object):
+  """Collects surface stats for a SurfaceView from the output of SurfaceFlinger.
+
+  Args:
+    device: A DeviceUtils instance.
+  """
+  class Result(object):
+    def __init__(self, name, value, unit):
+      self.name = name
+      self.value = value
+      self.unit = unit
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    self._collector_thread = None
+    self._use_legacy_method = False
+    self._surface_before = None
+    self._get_data_event = None
+    self._data_queue = None
+    self._stop_event = None
+    self._results = []
+    self._warn_about_empty_data = True
+
+  def DisableWarningAboutEmptyData(self):
+    self._warn_about_empty_data = False
+
+  def Start(self):
+    assert not self._collector_thread
+
+    if self._ClearSurfaceFlingerLatencyData():
+      self._get_data_event = threading.Event()
+      self._stop_event = threading.Event()
+      self._data_queue = Queue.Queue()
+      self._collector_thread = threading.Thread(target=self._CollectorThread)
+      self._collector_thread.start()
+    else:
+      self._use_legacy_method = True
+      self._surface_before = self._GetSurfaceStatsLegacy()
+
+  def Stop(self):
+    self._StorePerfResults()
+    if self._collector_thread:
+      self._stop_event.set()
+      self._collector_thread.join()
+      self._collector_thread = None
+
+  def SampleResults(self):
+    self._StorePerfResults()
+    results = self.GetResults()
+    self._results = []
+    return results
+
+  def GetResults(self):
+    return self._results or self._GetEmptyResults()
+
+  @staticmethod
+  def _GetEmptyResults():
+    return [
+        SurfaceStatsCollector.Result('refresh_period', None, 'seconds'),
+        SurfaceStatsCollector.Result('jank_count', None, 'janks'),
+        SurfaceStatsCollector.Result('max_frame_delay', None, 'vsyncs'),
+        SurfaceStatsCollector.Result('frame_lengths', None, 'vsyncs'),
+        SurfaceStatsCollector.Result('avg_surface_fps', None, 'fps')
+    ]
+
+  @staticmethod
+  def _GetNormalizedDeltas(data, refresh_period, min_normalized_delta=None):
+    deltas = [t2 - t1 for t1, t2 in zip(data, data[1:])]
+    if min_normalized_delta != None:
+      deltas = filter(lambda d: d / refresh_period >= min_normalized_delta,
+                      deltas)
+    return (deltas, [delta / refresh_period for delta in deltas])
+
+  @staticmethod
+  def _CalculateResults(refresh_period, timestamps, result_suffix):
+    """Returns a list of SurfaceStatsCollector.Result."""
+    frame_count = len(timestamps)
+    seconds = timestamps[-1] - timestamps[0]
+
+    frame_lengths, normalized_frame_lengths = \
+        SurfaceStatsCollector._GetNormalizedDeltas(
+            timestamps, refresh_period, _MIN_NORMALIZED_FRAME_LENGTH)
+    if len(frame_lengths) < frame_count - 1:
+      logging.warning('Skipping frame lengths that are too short.')
+      frame_count = len(frame_lengths) + 1
+    if len(frame_lengths) == 0:
+      raise Exception('No valid frames lengths found.')
+    _, normalized_changes = \
+        SurfaceStatsCollector._GetNormalizedDeltas(
+            frame_lengths, refresh_period)
+    jankiness = [max(0, round(change)) for change in normalized_changes]
+    pause_threshold = 20
+    jank_count = sum(1 for change in jankiness
+                     if change > 0 and change < pause_threshold)
+    return [
+        SurfaceStatsCollector.Result(
+            'avg_surface_fps' + result_suffix,
+            int(round((frame_count - 1) / seconds)), 'fps'),
+        SurfaceStatsCollector.Result(
+            'jank_count' + result_suffix, jank_count, 'janks'),
+        SurfaceStatsCollector.Result(
+            'max_frame_delay' + result_suffix,
+            round(max(normalized_frame_lengths)),
+            'vsyncs'),
+        SurfaceStatsCollector.Result(
+            'frame_lengths' + result_suffix, normalized_frame_lengths,
+            'vsyncs'),
+    ]
+
+  @staticmethod
+  def _CalculateBuckets(refresh_period, timestamps):
+    results = []
+    for pct in [0.99, 0.5]:
+      sliced = timestamps[min(int(-pct * len(timestamps)), -3) : ]
+      results += SurfaceStatsCollector._CalculateResults(
+          refresh_period, sliced, '_' + str(int(pct * 100)))
+    return results
+
+  def _StorePerfResults(self):
+    if self._use_legacy_method:
+      surface_after = self._GetSurfaceStatsLegacy()
+      td = surface_after['timestamp'] - self._surface_before['timestamp']
+      seconds = td.seconds + td.microseconds / 1e6
+      frame_count = (surface_after['page_flip_count'] -
+                     self._surface_before['page_flip_count'])
+      self._results.append(SurfaceStatsCollector.Result(
+          'avg_surface_fps', int(round(frame_count / seconds)), 'fps'))
+      return
+
+    # Non-legacy method.
+    assert self._collector_thread
+    (refresh_period, timestamps) = self._GetDataFromThread()
+    if not refresh_period or not len(timestamps) >= 3:
+      if self._warn_about_empty_data:
+        logging.warning('Surface stat data is empty')
+      return
+    self._results.append(SurfaceStatsCollector.Result(
+        'refresh_period', refresh_period, 'seconds'))
+    self._results += self._CalculateResults(refresh_period, timestamps, '')
+    self._results += self._CalculateBuckets(refresh_period, timestamps)
+
+  def _CollectorThread(self):
+    last_timestamp = 0
+    timestamps = []
+    retries = 0
+
+    while not self._stop_event.is_set():
+      self._get_data_event.wait(1)
+      try:
+        refresh_period, new_timestamps = self._GetSurfaceFlingerFrameData()
+        if refresh_period is None or timestamps is None:
+          retries += 1
+          if retries < 3:
+            continue
+          if last_timestamp:
+            # Some data has already been collected, but either the app
+            # was closed or there's no new data. Signal the main thread and
+            # wait.
+            self._data_queue.put((None, None))
+            self._stop_event.wait()
+            break
+          raise Exception('Unable to get surface flinger latency data')
+
+        timestamps += [timestamp for timestamp in new_timestamps
+                       if timestamp > last_timestamp]
+        if len(timestamps):
+          last_timestamp = timestamps[-1]
+
+        if self._get_data_event.is_set():
+          self._get_data_event.clear()
+          self._data_queue.put((refresh_period, timestamps))
+          timestamps = []
+      except Exception as e:
+        # On any error, before aborting, put the exception into _data_queue to
+        # prevent the main thread from waiting at _data_queue.get() infinitely.
+        self._data_queue.put(e)
+        raise
+
+  def _GetDataFromThread(self):
+    self._get_data_event.set()
+    ret = self._data_queue.get()
+    if isinstance(ret, Exception):
+      raise ret
+    return ret
+
+  def _ClearSurfaceFlingerLatencyData(self):
+    """Clears the SurfaceFlinger latency data.
+
+    Returns:
+      True if SurfaceFlinger latency is supported by the device, otherwise
+      False.
+    """
+    # The command returns nothing if it is supported, otherwise returns many
+    # lines of result just like 'dumpsys SurfaceFlinger'.
+    results = self._device.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency-clear SurfaceView')
+    return not len(results)
+
+  def _GetSurfaceFlingerFrameData(self):
+    """Returns collected SurfaceFlinger frame timing data.
+
+    Returns:
+      A tuple containing:
+      - The display's nominal refresh period in seconds.
+      - A list of timestamps signifying frame presentation times in seconds.
+      The return value may be (None, None) if there was no data collected (for
+      example, if the app was closed before the collector thread has finished).
+    """
+    # adb shell dumpsys SurfaceFlinger --latency <window name>
+    # prints some information about the last 128 frames displayed in
+    # that window.
+    # The data returned looks like this:
+    # 16954612
+    # 7657467895508   7657482691352   7657493499756
+    # 7657484466553   7657499645964   7657511077881
+    # 7657500793457   7657516600576   7657527404785
+    # (...)
+    #
+    # The first line is the refresh period (here 16.95 ms), it is followed
+    # by 128 lines w/ 3 timestamps in nanosecond each:
+    # A) when the app started to draw
+    # B) the vsync immediately preceding SF submitting the frame to the h/w
+    # C) timestamp immediately after SF submitted that frame to the h/w
+    #
+    # The difference between the 1st and 3rd timestamp is the frame-latency.
+    # An interesting data is when the frame latency crosses a refresh period
+    # boundary, this can be calculated this way:
+    #
+    # ceil((C - A) / refresh-period)
+    #
+    # (each time the number above changes, we have a "jank").
+    # If this happens a lot during an animation, the animation appears
+    # janky, even if it runs at 60 fps in average.
+    #
+    # We use the special "SurfaceView" window name because the statistics for
+    # the activity's main window are not updated when the main web content is
+    # composited into a SurfaceView.
+    results = self._device.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency SurfaceView')
+    if not len(results):
+      return (None, None)
+
+    timestamps = []
+    nanoseconds_per_second = 1e9
+    refresh_period = long(results[0]) / nanoseconds_per_second
+
+    # If a fence associated with a frame is still pending when we query the
+    # latency data, SurfaceFlinger gives the frame a timestamp of INT64_MAX.
+    # Since we only care about completed frames, we will ignore any timestamps
+    # with this value.
+    pending_fence_timestamp = (1 << 63) - 1
+
+    for line in results[1:]:
+      fields = line.split()
+      if len(fields) != 3:
+        continue
+      timestamp = long(fields[1])
+      if timestamp == pending_fence_timestamp:
+        continue
+      timestamp /= nanoseconds_per_second
+      timestamps.append(timestamp)
+
+    return (refresh_period, timestamps)
+
+  def _GetSurfaceStatsLegacy(self):
+    """Legacy method (before JellyBean), returns the current Surface index
+       and timestamp.
+
+    Calculate FPS by measuring the difference of Surface index returned by
+    SurfaceFlinger in a period of time.
+
+    Returns:
+      Dict of {page_flip_count (or 0 if there was an error), timestamp}.
+    """
+    results = self._device.RunShellCommand('service call SurfaceFlinger 1013')
+    assert len(results) == 1
+    match = re.search('^Result: Parcel\((\w+)', results[0])
+    cur_surface = 0
+    if match:
+      try:
+        cur_surface = int(match.group(1), 16)
+      except Exception:
+        logging.error('Failed to parse current surface from ' + match.group(1))
+    else:
+      logging.warning('Failed to call SurfaceFlinger surface ' + results[0])
+    return {
+        'page_flip_count': cur_surface,
+        'timestamp': datetime.datetime.now(),
+    }
diff --git a/build/android/pylib/perf/surface_stats_collector_unittest.py b/build/android/pylib/perf/surface_stats_collector_unittest.py
new file mode 100644
index 0000000..e905d73
--- /dev/null
+++ b/build/android/pylib/perf/surface_stats_collector_unittest.py
@@ -0,0 +1,64 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for SurfaceStatsCollector."""
+# pylint: disable=W0212
+
+import unittest
+
+from pylib.perf.surface_stats_collector import SurfaceStatsCollector
+
+
+class TestSurfaceStatsCollector(unittest.TestCase):
+  @staticmethod
+  def _CreateUniformTimestamps(base, num, delta):
+    return [base + i * delta for i in range(1, num + 1)]
+
+  @staticmethod
+  def _CreateDictionaryFromResults(results):
+    dictionary = {}
+    for result in results:
+      dictionary[result.name] = result
+    return dictionary
+
+  def setUp(self):
+    self.refresh_period = 0.1
+
+  def testOneFrameDelta(self):
+    timestamps = self._CreateUniformTimestamps(0, 10, self.refresh_period)
+    results = self._CreateDictionaryFromResults(
+                  SurfaceStatsCollector._CalculateResults(
+                      self.refresh_period, timestamps, ''))
+
+    self.assertEquals(results['avg_surface_fps'].value,
+                      int(round(1 / self.refresh_period)))
+    self.assertEquals(results['jank_count'].value, 0)
+    self.assertEquals(results['max_frame_delay'].value, 1)
+    self.assertEquals(len(results['frame_lengths'].value), len(timestamps) - 1)
+
+  def testAllFramesTooShort(self):
+    timestamps = self._CreateUniformTimestamps(0, 10, self.refresh_period / 100)
+    self.assertRaises(Exception,
+                      SurfaceStatsCollector._CalculateResults,
+                      [self.refresh_period, timestamps, ''])
+
+  def testSomeFramesTooShort(self):
+    timestamps = self._CreateUniformTimestamps(0, 5, self.refresh_period)
+    # The following timestamps should be skipped.
+    timestamps += self._CreateUniformTimestamps(timestamps[4],
+                                                5,
+                                                self.refresh_period / 100)
+    timestamps += self._CreateUniformTimestamps(timestamps[4],
+                                                5,
+                                                self.refresh_period)
+
+    results = self._CreateDictionaryFromResults(
+                  SurfaceStatsCollector._CalculateResults(
+                      self.refresh_period, timestamps, ''))
+
+    self.assertEquals(len(results['frame_lengths'].value), 9)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/perf/test_options.py b/build/android/pylib/perf/test_options.py
new file mode 100644
index 0000000..b04d748
--- /dev/null
+++ b/build/android/pylib/perf/test_options.py
@@ -0,0 +1,18 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the PerfOptions named tuple."""
+
+import collections
+
+PerfOptions = collections.namedtuple('PerfOptions', [
+    'steps',
+    'flaky_steps',
+    'output_json_list',
+    'print_step',
+    'no_timeout',
+    'test_filter',
+    'dry_run',
+    'single_step',
+])
diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py
new file mode 100644
index 0000000..d74499f
--- /dev/null
+++ b/build/android/pylib/perf/test_runner.py
@@ -0,0 +1,295 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs perf tests.
+
+Our buildbot infrastructure requires each slave to run steps serially.
+This is sub-optimal for android, where these steps can run independently on
+multiple connected devices.
+
+The buildbots will run this script multiple times per cycle:
+- First: all steps listed in --steps in will be executed in parallel using all
+connected devices. Step results will be pickled to disk. Each step has a unique
+name. The result code will be ignored if the step name is listed in
+--flaky-steps.
+The buildbot will treat this step as a regular step, and will not process any
+graph data.
+
+- Then, with -print-step STEP_NAME: at this stage, we'll simply print the file
+with the step results previously saved. The buildbot will then process the graph
+data accordingly.
+
+The JSON steps file contains a dictionary in the format:
+{ "version": int,
+  "steps": {
+    "foo": {
+      "device_affinity": int,
+      "cmd": "script_to_execute foo"
+    },
+    "bar": {
+      "device_affinity": int,
+      "cmd": "script_to_execute bar"
+    }
+  }
+}
+
+The JSON flaky steps file contains a list with step names which results should
+be ignored:
+[
+  "step_name_foo",
+  "step_name_bar"
+]
+
+Note that script_to_execute necessarily have to take at least the following
+option:
+  --device: the serial number to be passed to all adb commands.
+"""
+
+import collections
+import datetime
+import json
+import logging
+import os
+import pickle
+import sys
+import threading
+import time
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import forwarder
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+
+
+def OutputJsonList(json_input, json_output):
+  with file(json_input, 'r') as i:
+    all_steps = json.load(i)
+  step_names = all_steps['steps'].keys()
+  with file(json_output, 'w') as o:
+    o.write(json.dumps(step_names))
+  return 0
+
+
+def PrintTestOutput(test_name):
+  """Helper method to print the output of previously executed test_name.
+
+  Args:
+    test_name: name of the test that has been previously executed.
+
+  Returns:
+    exit code generated by the test step.
+  """
+  file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+  if not os.path.exists(file_name):
+    logging.error('File not found %s', file_name)
+    return 1
+
+  with file(file_name, 'r') as f:
+    persisted_result = pickle.loads(f.read())
+  logging.info('*' * 80)
+  logging.info('Output from:')
+  logging.info(persisted_result['cmd'])
+  logging.info('*' * 80)
+  print persisted_result['output']
+
+  return persisted_result['exit_code']
+
+
+def PrintSummary(test_names):
+  logging.info('*' * 80)
+  logging.info('Sharding summary')
+  device_total_time = collections.defaultdict(int)
+  for test_name in test_names:
+    file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+    if not os.path.exists(file_name):
+      logging.info('%s : No status file found', test_name)
+      continue
+    with file(file_name, 'r') as f:
+      result = pickle.loads(f.read())
+    logging.info('%s : exit_code=%d in %d secs at %s',
+                 result['name'], result['exit_code'], result['total_time'],
+                 result['device'])
+    device_total_time[result['device']] += result['total_time']
+  for device, device_time in device_total_time.iteritems():
+    logging.info('Total for device %s : %d secs', device, device_time)
+  logging.info('Total steps time: %d secs', sum(device_total_time.values()))
+
+
+class _HeartBeatLogger(object):
+  # How often to print the heartbeat on flush().
+  _PRINT_INTERVAL = 30.0
+
+  def __init__(self):
+    """A file-like class for keeping the buildbot alive."""
+    self._len = 0
+    self._tick = time.time()
+    self._stopped = threading.Event()
+    self._timer = threading.Thread(target=self._runner)
+    self._timer.start()
+
+  def _runner(self):
+    while not self._stopped.is_set():
+      self.flush()
+      self._stopped.wait(_HeartBeatLogger._PRINT_INTERVAL)
+
+  def write(self, data):
+    self._len += len(data)
+
+  def flush(self):
+    now = time.time()
+    if now - self._tick >= _HeartBeatLogger._PRINT_INTERVAL:
+      self._tick = now
+      print '--single-step output length %d' % self._len
+      sys.stdout.flush()
+
+  def stop(self):
+    self._stopped.set()
+
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  def __init__(self, test_options, device, shard_index, max_shard, tests,
+      flaky_tests):
+    """A TestRunner instance runs a perf test on a single device.
+
+    Args:
+      test_options: A PerfOptions object.
+      device: Device to run the tests.
+      shard_index: the index of this device.
+      max_shards: the maximum shard index.
+      tests: a dict mapping test_name to command.
+      flaky_tests: a list of flaky test_name.
+    """
+    super(TestRunner, self).__init__(device, None, 'Release')
+    self._options = test_options
+    self._shard_index = shard_index
+    self._max_shard = max_shard
+    self._tests = tests
+    self._flaky_tests = flaky_tests
+
+  @staticmethod
+  def _IsBetter(result):
+    if result['actual_exit_code'] == 0:
+      return True
+    pickled = os.path.join(constants.PERF_OUTPUT_DIR,
+                           result['name'])
+    if not os.path.exists(pickled):
+      return True
+    with file(pickled, 'r') as f:
+      previous = pickle.loads(f.read())
+    return result['actual_exit_code'] < previous['actual_exit_code']
+
+  @staticmethod
+  def _SaveResult(result):
+    if TestRunner._IsBetter(result):
+      with file(os.path.join(constants.PERF_OUTPUT_DIR,
+                             result['name']), 'w') as f:
+        f.write(pickle.dumps(result))
+
+  def _CheckDeviceAffinity(self, test_name):
+    """Returns True if test_name has affinity for this shard."""
+    affinity = (self._tests['steps'][test_name]['device_affinity'] %
+                self._max_shard)
+    if self._shard_index == affinity:
+      return True
+    logging.info('Skipping %s on %s (affinity is %s, device is %s)',
+                 test_name, self.device_serial, affinity, self._shard_index)
+    return False
+
+  def _LaunchPerfTest(self, test_name):
+    """Runs a perf test.
+
+    Args:
+      test_name: the name of the test to be executed.
+
+    Returns:
+      A tuple containing (Output, base_test_result.ResultType)
+    """
+    if not self._CheckDeviceAffinity(test_name):
+      return '', base_test_result.ResultType.PASS
+
+    try:
+      logging.warning('Unmapping device ports')
+      forwarder.Forwarder.UnmapAllDevicePorts(self.device)
+      self.device.old_interface.RestartAdbdOnDevice()
+    except Exception as e:
+      logging.error('Exception when tearing down device %s', e)
+
+    cmd = ('%s --device %s' %
+           (self._tests['steps'][test_name]['cmd'],
+            self.device_serial))
+    logging.info('%s : %s', test_name, cmd)
+    start_time = datetime.datetime.now()
+
+    timeout = 5400
+    if self._options.no_timeout:
+      timeout = None
+    full_cmd = cmd
+    if self._options.dry_run:
+      full_cmd = 'echo %s' % cmd
+
+    logfile = sys.stdout
+    if self._options.single_step:
+      # Just print a heart-beat so that the outer buildbot scripts won't timeout
+      # without response.
+      logfile = _HeartBeatLogger()
+    cwd = os.path.abspath(constants.DIR_SOURCE_ROOT)
+    if full_cmd.startswith('src/'):
+      cwd = os.path.abspath(os.path.join(constants.DIR_SOURCE_ROOT, os.pardir))
+    try:
+      exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+          full_cmd, timeout, cwd=cwd, shell=True, logfile=logfile)
+    except cmd_helper.TimeoutError as e:
+      exit_code = -1
+      output = str(e)
+    finally:
+      if self._options.single_step:
+        logfile.stop()
+    end_time = datetime.datetime.now()
+    if exit_code is None:
+      exit_code = -1
+    logging.info('%s : exit_code=%d in %d secs at %s',
+                 test_name, exit_code, (end_time - start_time).seconds,
+                 self.device_serial)
+    result_type = base_test_result.ResultType.FAIL
+    if exit_code == 0:
+      result_type = base_test_result.ResultType.PASS
+    actual_exit_code = exit_code
+    if test_name in self._flaky_tests:
+      # The exit_code is used at the second stage when printing the
+      # test output. If the test is flaky, force to "0" to get that step green
+      # whilst still gathering data to the perf dashboards.
+      # The result_type is used by the test_dispatcher to retry the test.
+      exit_code = 0
+
+    persisted_result = {
+        'name': test_name,
+        'output': output,
+        'exit_code': exit_code,
+        'actual_exit_code': actual_exit_code,
+        'result_type': result_type,
+        'total_time': (end_time - start_time).seconds,
+        'device': self.device_serial,
+        'cmd': cmd,
+    }
+    self._SaveResult(persisted_result)
+
+    return (output, result_type)
+
+  def RunTest(self, test_name):
+    """Run a perf test on the device.
+
+    Args:
+      test_name: String to use for logging the test result.
+
+    Returns:
+      A tuple of (TestRunResults, retry).
+    """
+    _, result_type = self._LaunchPerfTest(test_name)
+    results = base_test_result.TestRunResults()
+    results.AddResult(base_test_result.BaseTestResult(test_name, result_type))
+    retry = None
+    if not results.DidRunPass():
+      retry = test_name
+    return results, retry
diff --git a/build/android/pylib/perf/thermal_throttle.py b/build/android/pylib/perf/thermal_throttle.py
new file mode 100644
index 0000000..24e1ff4
--- /dev/null
+++ b/build/android/pylib/perf/thermal_throttle.py
@@ -0,0 +1,137 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+class OmapThrottlingDetector(object):
+  """Class to detect and track thermal throttling on an OMAP 4."""
+  OMAP_TEMP_FILE = ('/sys/devices/platform/omap/omap_temp_sensor.0/'
+                    'temperature')
+
+  @staticmethod
+  def IsSupported(device):
+    return device.FileExists(OmapThrottlingDetector.OMAP_TEMP_FILE)
+
+  def __init__(self, device):
+    self._device = device
+
+  @staticmethod
+  def BecameThrottled(log_line):
+    return 'omap_thermal_throttle' in log_line
+
+  @staticmethod
+  def BecameUnthrottled(log_line):
+    return 'omap_thermal_unthrottle' in log_line
+
+  @staticmethod
+  def GetThrottlingTemperature(log_line):
+    if 'throttle_delayed_work_fn' in log_line:
+      return float([s for s in log_line.split() if s.isdigit()][0]) / 1000.0
+
+  def GetCurrentTemperature(self):
+    tempdata = self._device.ReadFile(OmapThrottlingDetector.OMAP_TEMP_FILE)
+    return float(tempdata[0]) / 1000.0
+
+
+class ExynosThrottlingDetector(object):
+  """Class to detect and track thermal throttling on an Exynos 5."""
+  @staticmethod
+  def IsSupported(device):
+    return device.FileExists('/sys/bus/exynos5-core')
+
+  def __init__(self, device):
+    pass
+
+  @staticmethod
+  def BecameThrottled(log_line):
+    return 'exynos_tmu: Throttling interrupt' in log_line
+
+  @staticmethod
+  def BecameUnthrottled(log_line):
+    return 'exynos_thermal_unthrottle: not throttling' in log_line
+
+  @staticmethod
+  def GetThrottlingTemperature(_log_line):
+    return None
+
+  @staticmethod
+  def GetCurrentTemperature():
+    return None
+
+
+class ThermalThrottle(object):
+  """Class to detect and track thermal throttling.
+
+  Usage:
+    Wait for IsThrottled() to be False before running test
+    After running test call HasBeenThrottled() to find out if the
+    test run was affected by thermal throttling.
+  """
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    self._throttled = False
+    self._detector = None
+    if OmapThrottlingDetector.IsSupported(device):
+      self._detector = OmapThrottlingDetector(device)
+    elif ExynosThrottlingDetector.IsSupported(device):
+      self._detector = ExynosThrottlingDetector(device)
+
+  def HasBeenThrottled(self):
+    """True if there has been any throttling since the last call to
+       HasBeenThrottled or IsThrottled.
+    """
+    return self._ReadLog()
+
+  def IsThrottled(self):
+    """True if currently throttled."""
+    self._ReadLog()
+    return self._throttled
+
+  def _ReadLog(self):
+    if not self._detector:
+      return False
+    has_been_throttled = False
+    serial_number = str(self._device)
+    log = self._device.RunShellCommand('dmesg -c')
+    degree_symbol = unichr(0x00B0)
+    for line in log:
+      if self._detector.BecameThrottled(line):
+        if not self._throttled:
+          logging.warning('>>> Device %s thermally throttled', serial_number)
+        self._throttled = True
+        has_been_throttled = True
+      elif self._detector.BecameUnthrottled(line):
+        if self._throttled:
+          logging.warning('>>> Device %s thermally unthrottled', serial_number)
+        self._throttled = False
+        has_been_throttled = True
+      temperature = self._detector.GetThrottlingTemperature(line)
+      if temperature is not None:
+        logging.info(u'Device %s thermally throttled at %3.1f%sC',
+                     serial_number, temperature, degree_symbol)
+
+    if logging.getLogger().isEnabledFor(logging.DEBUG):
+      # Print current temperature of CPU SoC.
+      temperature = self._detector.GetCurrentTemperature()
+      if temperature is not None:
+        logging.debug(u'Current SoC temperature of %s = %3.1f%sC',
+                      serial_number, temperature, degree_symbol)
+
+      # Print temperature of battery, to give a system temperature
+      dumpsys_log = self._device.RunShellCommand('dumpsys battery')
+      for line in dumpsys_log:
+        if 'temperature' in line:
+          btemp = float([s for s in line.split() if s.isdigit()][0]) / 10.0
+          logging.debug(u'Current battery temperature of %s = %3.1f%sC',
+                        serial_number, btemp, degree_symbol)
+
+    return has_been_throttled
+
diff --git a/build/android/pylib/pexpect.py b/build/android/pylib/pexpect.py
new file mode 100644
index 0000000..cf59fb0
--- /dev/null
+++ b/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+  sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+  from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+  pass
diff --git a/build/android/pylib/ports.py b/build/android/pylib/ports.py
new file mode 100644
index 0000000..34efb52
--- /dev/null
+++ b/build/android/pylib/ports.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions that deal with local and device ports."""
+
+import contextlib
+import fcntl
+import httplib
+import logging
+import os
+import re
+import socket
+import traceback
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+# The following two methods are used to allocate the port source for various
+# types of test servers. Because some net-related tests can be run on shards at
+# same time, it's important to have a mechanism to allocate the port
+# process-safe. In here, we implement the safe port allocation by leveraging
+# flock.
+def ResetTestServerPortAllocation():
+  """Resets the port allocation to start from TEST_SERVER_PORT_FIRST.
+
+  Returns:
+    Returns True if reset successes. Otherwise returns False.
+  """
+  try:
+    with open(constants.TEST_SERVER_PORT_FILE, 'w') as fp:
+      fp.write('%d' % constants.TEST_SERVER_PORT_FIRST)
+    if os.path.exists(constants.TEST_SERVER_PORT_LOCKFILE):
+      os.unlink(constants.TEST_SERVER_PORT_LOCKFILE)
+    return True
+  except Exception as e:
+    logging.error(e)
+  return False
+
+
+def AllocateTestServerPort():
+  """Allocates a port incrementally.
+
+  Returns:
+    Returns a valid port which should be in between TEST_SERVER_PORT_FIRST and
+    TEST_SERVER_PORT_LAST. Returning 0 means no more valid port can be used.
+  """
+  port = 0
+  ports_tried = []
+  try:
+    fp_lock = open(constants.TEST_SERVER_PORT_LOCKFILE, 'w')
+    fcntl.flock(fp_lock, fcntl.LOCK_EX)
+    # Get current valid port and calculate next valid port.
+    if not os.path.exists(constants.TEST_SERVER_PORT_FILE):
+      ResetTestServerPortAllocation()
+    with open(constants.TEST_SERVER_PORT_FILE, 'r+') as fp:
+      port = int(fp.read())
+      ports_tried.append(port)
+      while IsHostPortUsed(port):
+        port += 1
+        ports_tried.append(port)
+      if (port > constants.TEST_SERVER_PORT_LAST or
+          port < constants.TEST_SERVER_PORT_FIRST):
+        port = 0
+      else:
+        fp.seek(0, os.SEEK_SET)
+        fp.write('%d' % (port + 1))
+  except Exception as e:
+    logging.info(e)
+  finally:
+    if fp_lock:
+      fcntl.flock(fp_lock, fcntl.LOCK_UN)
+      fp_lock.close()
+  if port:
+    logging.info('Allocate port %d for test server.', port)
+  else:
+    logging.error('Could not allocate port for test server. '
+                  'List of ports tried: %s', str(ports_tried))
+  return port
+
+
+def IsHostPortUsed(host_port):
+  """Checks whether the specified host port is used or not.
+
+  Uses -n -P to inhibit the conversion of host/port numbers to host/port names.
+
+  Args:
+    host_port: Port on host we want to check.
+
+  Returns:
+    True if the port on host is already used, otherwise returns False.
+  """
+  port_info = '(\*)|(127\.0\.0\.1)|(localhost):%d' % host_port
+  # TODO(jnd): Find a better way to filter the port. Note that connecting to the
+  # socket and closing it would leave it in the TIME_WAIT state. Setting
+  # SO_LINGER on it and then closing it makes the Python HTTP server crash.
+  re_port = re.compile(port_info, re.MULTILINE)
+  if re_port.search(cmd_helper.GetCmdOutput(['lsof', '-nPi:%d' % host_port])):
+    return True
+  return False
+
+
+def IsDevicePortUsed(device, device_port, state=''):
+  """Checks whether the specified device port is used or not.
+
+  Args:
+    device: A DeviceUtils instance.
+    device_port: Port on device we want to check.
+    state: String of the specified state. Default is empty string, which
+           means any state.
+
+  Returns:
+    True if the port on device is already used, otherwise returns False.
+  """
+  base_url = '127.0.0.1:%d' % device_port
+  netstat_results = device.RunShellCommand('netstat')
+  for single_connect in netstat_results:
+    # Column 3 is the local address which we want to check with.
+    connect_results = single_connect.split()
+    if connect_results[0] != 'tcp':
+      continue
+    if len(connect_results) < 6:
+      raise Exception('Unexpected format while parsing netstat line: ' +
+                      single_connect)
+    is_state_match = connect_results[5] == state if state else True
+    if connect_results[3] == base_url and is_state_match:
+      return True
+  return False
+
+
+def IsHttpServerConnectable(host, port, tries=3, command='GET', path='/',
+                            expected_read='', timeout=2):
+  """Checks whether the specified http server is ready to serve request or not.
+
+  Args:
+    host: Host name of the HTTP server.
+    port: Port number of the HTTP server.
+    tries: How many times we want to test the connection. The default value is
+           3.
+    command: The http command we use to connect to HTTP server. The default
+             command is 'GET'.
+    path: The path we use when connecting to HTTP server. The default path is
+          '/'.
+    expected_read: The content we expect to read from the response. The default
+                   value is ''.
+    timeout: Timeout (in seconds) for each http connection. The default is 2s.
+
+  Returns:
+    Tuple of (connect status, client error). connect status is a boolean value
+    to indicate whether the server is connectable. client_error is the error
+    message the server returns when connect status is false.
+  """
+  assert tries >= 1
+  for i in xrange(0, tries):
+    client_error = None
+    try:
+      with contextlib.closing(httplib.HTTPConnection(
+          host, port, timeout=timeout)) as http:
+        # Output some debug information when we have tried more than 2 times.
+        http.set_debuglevel(i >= 2)
+        http.request(command, path)
+        r = http.getresponse()
+        content = r.read()
+        if r.status == 200 and r.reason == 'OK' and content == expected_read:
+          return (True, '')
+        client_error = ('Bad response: %s %s version %s\n  ' %
+                        (r.status, r.reason, r.version) +
+                        '\n  '.join([': '.join(h) for h in r.getheaders()]))
+    except (httplib.HTTPException, socket.error) as e:
+      # Probably too quick connecting: try again.
+      exception_error_msgs = traceback.format_exception_only(type(e), e)
+      if exception_error_msgs:
+        client_error = ''.join(exception_error_msgs)
+  # Only returns last client_error.
+  return (False, client_error or 'Timeout')
diff --git a/build/android/pylib/restart_adbd.sh b/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000..393b2eb
--- /dev/null
+++ b/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+  stop adbd
+  start adbd
+}
+
+restart &
diff --git a/build/android/pylib/screenshot.py b/build/android/pylib/screenshot.py
new file mode 100644
index 0000000..e21d756
--- /dev/null
+++ b/build/android/pylib/screenshot.py
@@ -0,0 +1,91 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import signal
+import tempfile
+
+from pylib import cmd_helper
+
+# TODO(jbudorick) Remove once telemetry gets switched over.
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+class VideoRecorder(object):
+  """Records a screen capture video from an Android Device (KitKat or newer).
+
+  Args:
+    device: DeviceUtils instance.
+    host_file: Path to the video file to store on the host.
+    megabits_per_second: Video bitrate in megabits per second. Allowed range
+                         from 0.1 to 100 mbps.
+    size: Video frame size tuple (width, height) or None to use the device
+          default.
+    rotate: If True, the video will be rotated 90 degrees.
+  """
+  def __init__(self, device, megabits_per_second=4, size=None,
+               rotate=False):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    self._device = device
+    self._device_file = (
+        '%s/screen-recording.mp4' % device.GetExternalStoragePath())
+    self._recorder = None
+    self._recorder_stdout = None
+    self._is_started = False
+
+    self._args = ['adb']
+    if str(self._device):
+      self._args += ['-s', str(self._device)]
+    self._args += ['shell', 'screenrecord', '--verbose']
+    self._args += ['--bit-rate', str(megabits_per_second * 1000 * 1000)]
+    if size:
+      self._args += ['--size', '%dx%d' % size]
+    if rotate:
+      self._args += ['--rotate']
+    self._args += [self._device_file]
+
+  def Start(self):
+    """Start recording video."""
+    self._recorder_stdout = tempfile.mkstemp()[1]
+    self._recorder = cmd_helper.Popen(
+        self._args, stdout=open(self._recorder_stdout, 'w'))
+    if not self._device.GetPids('screenrecord'):
+      raise RuntimeError('Recording failed. Is your device running Android '
+                         'KitKat or later?')
+
+  def IsStarted(self):
+    if not self._is_started:
+      for line in open(self._recorder_stdout):
+        self._is_started = line.startswith('Content area is ')
+        if self._is_started:
+          break
+    return self._is_started
+
+  def Stop(self):
+    """Stop recording video."""
+    os.remove(self._recorder_stdout)
+    self._is_started = False
+    if not self._recorder:
+      return
+    self._device.KillAll('screenrecord', signum=signal.SIGINT)
+    self._recorder.wait()
+
+  def Pull(self, host_file=None):
+    """Pull resulting video file from the device.
+
+    Args:
+      host_file: Path to the video file to store on the host.
+    Returns:
+      Output video file name on the host.
+    """
+    host_file_name = host_file or ('screen-recording-%s.mp4' %
+                                   self._device.old_interface.GetTimestamp())
+    host_file_name = os.path.abspath(host_file_name)
+    self._device.old_interface.EnsureHostDirectory(host_file_name)
+    self._device.PullFile(self._device_file, host_file_name)
+    self._device.RunShellCommand('rm -f "%s"' % self._device_file)
+    return host_file_name
diff --git a/build/android/pylib/symbols/PRESUBMIT.py b/build/android/pylib/symbols/PRESUBMIT.py
new file mode 100644
index 0000000..b4d94ae
--- /dev/null
+++ b/build/android/pylib/symbols/PRESUBMIT.py
@@ -0,0 +1,21 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def CommonChecks(input_api, output_api):
+  output = []
+  output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+  output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+      input_api,
+      output_api,
+      input_api.PresubmitLocalPath(),
+      whitelist=[r'^.+_unittest\.py$']))
+  return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CommonChecks(input_api, output_api)
\ No newline at end of file
diff --git a/build/android/pylib/symbols/__init__.py b/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/symbols/__init__.py
diff --git a/build/android/pylib/symbols/elf_symbolizer.py b/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000..374063a
--- /dev/null
+++ b/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,467 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+class ELFSymbolizer(object):
+  """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+  This class is a frontend for addr2line (part of GNU binutils), designed to
+  symbolize batches of large numbers of symbols for a given ELF file. It
+  supports sharding symbolization against many addr2line instances and
+  pipelining of multiple requests per each instance (in order to hide addr2line
+  internals and OS pipe latencies).
+
+  The interface exhibited by this class is a very simple asynchronous interface,
+  which is based on the following three methods:
+  - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+  - The |callback| method: used to communicated back the symbol information.
+  - Join(): called to conclude the batch to gather the last outstanding results.
+  In essence, before the Join method returns, this class will have issued as
+  many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+  that due to multiprocess sharding, callbacks can be delivered out of order.
+
+  Some background about addr2line:
+  - it is invoked passing the elf path in the cmdline, piping the addresses in
+    its stdin and getting results on its stdout.
+  - it has pretty large response times for the first requests, but it
+    works very well in streaming mode once it has been warmed up.
+  - it doesn't scale by itself (on more cores). However, spawning multiple
+    instances at the same time on the same file is pretty efficient as they
+    keep hitting the pagecache and become mostly CPU bound.
+  - it might hang or crash, mostly for OOM. This class deals with both of these
+    problems.
+
+  Despite the "scary" imports and the multi* words above, (almost) no multi-
+  threading/processing is involved from the python viewpoint. Concurrency
+  here is achieved by spawning several addr2line subprocesses and handling their
+  output pipes asynchronously. Therefore, all the code here (with the exception
+  of the Queue instance in Addr2Line) should be free from mind-blowing
+  thread-safety concerns.
+
+  The multiprocess sharding works as follows:
+  The symbolizer tries to use the lowest number of addr2line instances as
+  possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+  in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+  worth the startup cost.
+  The multiprocess logic kicks in as soon as the queues for the existing
+  instances grow. Specifically, once all the existing instances reach the
+  |max_queue_size| bound, a new addr2line instance is kicked in.
+  In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+  have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+  blocking the SymbolizeAsync method.
+
+  This module has been deliberately designed to be dependency free (w.r.t. of
+  other modules in this project), to allow easy reuse in external projects.
+  """
+
+  def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+      max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+      source_root_path=None, strip_base_path=None):
+    """Args:
+      elf_file_path: path of the elf file to be symbolized.
+      addr2line_path: path of the toolchain's addr2line binary.
+      callback: a callback which will be invoked for each resolved symbol with
+          the two args (sym_info, callback_arg). The former is an instance of
+          |ELFSymbolInfo| and contains the symbol information. The latter is an
+          embedder-provided argument which is passed to SymbolizeAsync().
+      inlines: when True, the ELFSymbolInfo will contain also the details about
+          the outer inlining functions. When False, only the innermost function
+          will be provided.
+      max_concurrent_jobs: Max number of addr2line instances spawned.
+          Parallelize responsibly, addr2line is a memory and I/O monster.
+      max_queue_size: Max number of outstanding requests per addr2line instance.
+      addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+          After the timeout, the instance will be considered hung and respawned.
+      source_root_path: In some toolchains only the name of the source file is
+          is output, without any path information; disambiguation searches
+          through the source directory specified by |source_root_path| argument
+          for files whose name matches, adding the full path information to the
+          output. For example, if the toolchain outputs "unicode.cc" and there
+          is a file called "unicode.cc" located under |source_root_path|/foo,
+          the tool will replace "unicode.cc" with
+          "|source_root_path|/foo/unicode.cc". If there are multiple files with
+          the same name, disambiguation will fail because the tool cannot
+          determine which of the files was the source of the symbol.
+      strip_base_path: Rebases the symbols source paths onto |source_root_path|
+          (i.e replace |strip_base_path| with |source_root_path).
+    """
+    assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+    self.elf_file_path = elf_file_path
+    self.addr2line_path = addr2line_path
+    self.callback = callback
+    self.inlines = inlines
+    self.max_concurrent_jobs = (max_concurrent_jobs or
+                                min(multiprocessing.cpu_count(), 4))
+    self.max_queue_size = max_queue_size
+    self.addr2line_timeout = addr2line_timeout
+    self.requests_counter = 0  # For generating monotonic request IDs.
+    self._a2l_instances = []  # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+    # If necessary, create disambiguation lookup table
+    self.disambiguate = source_root_path is not None
+    self.disambiguation_table = {}
+    self.strip_base_path = strip_base_path
+    if(self.disambiguate):
+      self.source_root_path = os.path.abspath(source_root_path)
+      self._CreateDisambiguationTable()
+
+    # Create one addr2line instance. More instances will be created on demand
+    # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+    self._CreateNewA2LInstance()
+
+  def SymbolizeAsync(self, addr, callback_arg=None):
+    """Requests symbolization of a given address.
+
+    This method is not guaranteed to return immediately. It generally does, but
+    in some scenarios (e.g. all addr2line instances have full queues) it can
+    block to create back-pressure.
+
+    Args:
+      addr: address to symbolize.
+      callback_arg: optional argument which will be passed to the |callback|."""
+    assert(isinstance(addr, int))
+
+    # Process all the symbols that have been resolved in the meanwhile.
+    # Essentially, this drains all the addr2line(s) out queues.
+    for a2l_to_purge in self._a2l_instances:
+      a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+      a2l_to_purge.RecycleIfNecessary()
+
+    # Find the best instance according to this logic:
+    # 1. Find an existing instance with the shortest queue.
+    # 2. If all of instances' queues are full, but there is room in the pool,
+    #    (i.e. < |max_concurrent_jobs|) create a new instance.
+    # 3. If there were already |max_concurrent_jobs| instances and all of them
+    #    had full queues, make back-pressure.
+
+    # 1.
+    def _SortByQueueSizeAndReqID(a2l):
+      return (a2l.queue_size, a2l.first_request_id)
+    a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+    # 2.
+    if (a2l.queue_size >= self.max_queue_size and
+        len(self._a2l_instances) < self.max_concurrent_jobs):
+      a2l = self._CreateNewA2LInstance()
+
+    # 3.
+    if a2l.queue_size >= self.max_queue_size:
+      a2l.WaitForNextSymbolInQueue()
+
+    a2l.EnqueueRequest(addr, callback_arg)
+
+  def Join(self):
+    """Waits for all the outstanding requests to complete and terminates."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+      a2l.Terminate()
+
+  def _CreateNewA2LInstance(self):
+    assert(len(self._a2l_instances) < self.max_concurrent_jobs)
+    a2l = ELFSymbolizer.Addr2Line(self)
+    self._a2l_instances.append(a2l)
+    return a2l
+
+  def _CreateDisambiguationTable(self):
+    """ Non-unique file names will result in None entries"""
+    start_time = time.time()
+    logging.info('Collecting information about available source files...')
+    self.disambiguation_table = {}
+
+    for root, _, filenames in os.walk(self.source_root_path):
+      for f in filenames:
+        self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+                                       self.disambiguation_table) else None
+    logging.info('Finished collecting information about '
+                 'possible files (took %.1f s).',
+                 (time.time() - start_time))
+
+
+  class Addr2Line(object):
+    """A python wrapper around an addr2line instance.
+
+    The communication with the addr2line process looks as follows:
+      [STDIN]         [STDOUT]  (from addr2line's viewpoint)
+    > f001111
+    > f002222
+                    < Symbol::Name(foo, bar) for f001111
+                    < /path/to/source/file.c:line_number
+    > f003333
+                    < Symbol::Name2() for f002222
+                    < /path/to/source/file.c:line_number
+                    < Symbol::Name3() for f003333
+                    < /path/to/source/file.c:line_number
+    """
+
+    SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+    def __init__(self, symbolizer):
+      self._symbolizer = symbolizer
+      self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+      # The request queue (i.e. addresses pushed to addr2line's stdin and not
+      # yet retrieved on stdout)
+      self._request_queue = collections.deque()
+
+      # This is essentially len(self._request_queue). It has been optimized to a
+      # separate field because turned out to be a perf hot-spot.
+      self.queue_size = 0
+
+      # Keep track of the number of symbols a process has processed to
+      # avoid a single process growing too big and using all the memory.
+      self._processed_symbols_count = 0
+
+      # Objects required to handle the addr2line subprocess.
+      self._proc = None  # Subprocess.Popen(...) instance.
+      self._thread = None  # Threading.thread instance.
+      self._out_queue = None  # Queue.Queue instance (for buffering a2l stdout).
+      self._RestartAddr2LineProcess()
+
+    def EnqueueRequest(self, addr, callback_arg):
+      """Pushes an address to addr2line's stdin (and keeps track of it)."""
+      self._symbolizer.requests_counter += 1  # For global "age" of requests.
+      req_idx = self._symbolizer.requests_counter
+      self._request_queue.append((addr, callback_arg, req_idx))
+      self.queue_size += 1
+      self._WriteToA2lStdin(addr)
+
+    def WaitForIdle(self):
+      """Waits until all the pending requests have been symbolized."""
+      while self.queue_size > 0:
+        self.WaitForNextSymbolInQueue()
+
+    def WaitForNextSymbolInQueue(self):
+      """Waits for the next pending request to be symbolized."""
+      if not self.queue_size:
+        return
+
+      # This outer loop guards against a2l hanging (detecting stdout timeout).
+      while True:
+        start_time = datetime.datetime.now()
+        timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+        # The inner loop guards against a2l crashing (checking if it exited).
+        while (datetime.datetime.now() - start_time < timeout):
+          # poll() returns !None if the process exited. a2l should never exit.
+          if self._proc.poll():
+            logging.warning('addr2line crashed, respawning (lib: %s).' %
+                            self._lib_file_name)
+            self._RestartAddr2LineProcess()
+            # TODO(primiano): the best thing to do in this case would be
+            # shrinking the pool size as, very likely, addr2line is crashed
+            # due to low memory (and the respawned one will die again soon).
+
+          try:
+            lines = self._out_queue.get(block=True, timeout=0.25)
+          except Queue.Empty:
+            # On timeout (1/4 s.) repeat the inner loop and check if either the
+            # addr2line process did crash or we waited its output for too long.
+            continue
+
+          # In nominal conditions, we get straight to this point.
+          self._ProcessSymbolOutput(lines)
+          return
+
+        # If this point is reached, we waited more than |addr2line_timeout|.
+        logging.warning('Hung addr2line process, respawning (lib: %s).' %
+                        self._lib_file_name)
+        self._RestartAddr2LineProcess()
+
+    def ProcessAllResolvedSymbolsInQueue(self):
+      """Consumes all the addr2line output lines produced (without blocking)."""
+      if not self.queue_size:
+        return
+      while True:
+        try:
+          lines = self._out_queue.get_nowait()
+        except Queue.Empty:
+          break
+        self._ProcessSymbolOutput(lines)
+
+    def RecycleIfNecessary(self):
+      """Restarts the process if it has been used for too long.
+
+      A long running addr2line process will consume excessive amounts
+      of memory without any gain in performance."""
+      if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+        self._RestartAddr2LineProcess()
+
+
+    def Terminate(self):
+      """Kills the underlying addr2line process.
+
+      The poller |_thread| will terminate as well due to the broken pipe."""
+      try:
+        self._proc.kill()
+        self._proc.communicate()  # Essentially wait() without risking deadlock.
+      except Exception:  # An exception while terminating? How interesting.
+        pass
+      self._proc = None
+
+    def _WriteToA2lStdin(self, addr):
+      self._proc.stdin.write('%s\n' % hex(addr))
+      if self._symbolizer.inlines:
+        # In the case of inlines we output an extra blank line, which causes
+        # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+        self._proc.stdin.write('\n')
+      self._proc.stdin.flush()
+
+    def _ProcessSymbolOutput(self, lines):
+      """Parses an addr2line symbol output and triggers the client callback."""
+      (_, callback_arg, _) = self._request_queue.popleft()
+      self.queue_size -= 1
+
+      innermost_sym_info = None
+      sym_info = None
+      for (line1, line2) in lines:
+        prev_sym_info = sym_info
+        name = line1 if not line1.startswith('?') else None
+        source_path = None
+        source_line = None
+        m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+        if m:
+          if not m.group(1).startswith('?'):
+            source_path = m.group(1)
+            if not m.group(2).startswith('?'):
+              source_line = int(m.group(2))
+        else:
+          logging.warning('Got invalid symbol path from addr2line: %s' % line2)
+
+        # In case disambiguation is on, and needed
+        was_ambiguous = False
+        disambiguated = False
+        if self._symbolizer.disambiguate:
+          if source_path and not posixpath.isabs(source_path):
+            path = self._symbolizer.disambiguation_table.get(source_path)
+            was_ambiguous = True
+            disambiguated = path is not None
+            source_path = path if disambiguated else source_path
+
+          # Use absolute paths (so that paths are consistent, as disambiguation
+          # uses absolute paths)
+          if source_path and not was_ambiguous:
+            source_path = os.path.abspath(source_path)
+
+        if source_path and self._symbolizer.strip_base_path:
+          # Strip the base path
+          source_path = re.sub('^' + self._symbolizer.strip_base_path,
+              self._symbolizer.source_root_path or '', source_path)
+
+        sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+                                 disambiguated)
+        if prev_sym_info:
+          prev_sym_info.inlined_by = sym_info
+        if not innermost_sym_info:
+          innermost_sym_info = sym_info
+
+      self._processed_symbols_count += 1
+      self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+    def _RestartAddr2LineProcess(self):
+      if self._proc:
+        self.Terminate()
+
+      # The only reason of existence of this Queue (and the corresponding
+      # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+      # Essentially this is a pipe able to extract a couple of lines atomically.
+      self._out_queue = Queue.Queue()
+
+      # Start the underlying addr2line process in line buffered mode.
+
+      cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+          '--exe=' + self._symbolizer.elf_file_path]
+      if self._symbolizer.inlines:
+        cmd += ['--inlines']
+      self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+          stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+      # Start the poller thread, which simply moves atomically the lines read
+      # from the addr2line's stdout to the |_out_queue|.
+      self._thread = threading.Thread(
+          target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+          args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+      self._thread.daemon = True  # Don't prevent early process exit.
+      self._thread.start()
+
+      self._processed_symbols_count = 0
+
+      # Replay the pending requests on the new process (only for the case
+      # of a hung addr2line timing out during the game).
+      for (addr, _, _) in self._request_queue:
+        self._WriteToA2lStdin(addr)
+
+    @staticmethod
+    def StdoutReaderThread(process_pipe, queue, inlines):
+      """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+      This is the only piece of code not running on the main thread. It merely
+      writes to a Queue, which is thread-safe. In the case of inlines, it
+      detects the ??,??:0 marker and sends the lines atomically, such that the
+      main thread always receives all the lines corresponding to one symbol in
+      one shot."""
+      try:
+        lines_for_one_symbol = []
+        while True:
+          line1 = process_pipe.readline().rstrip('\r\n')
+          line2 = process_pipe.readline().rstrip('\r\n')
+          if not line1 or not line2:
+            break
+          inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+                                  (line1 != '??' and line2 != '??:0'))
+          if not inlines or inline_has_more_lines:
+            lines_for_one_symbol += [(line1, line2)]
+          if inline_has_more_lines:
+            continue
+          queue.put(lines_for_one_symbol)
+          lines_for_one_symbol = []
+        process_pipe.close()
+
+      # Every addr2line processes will die at some point, please die silently.
+      except (IOError, OSError):
+        pass
+
+    @property
+    def first_request_id(self):
+      """Returns the request_id of the oldest pending request in the queue."""
+      return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+  """The result of the symbolization passed as first arg. of each callback."""
+
+  def __init__(self, name, source_path, source_line, was_ambiguous=False,
+               disambiguated=False):
+    """All the fields here can be None (if addr2line replies with '??')."""
+    self.name = name
+    self.source_path = source_path
+    self.source_line = source_line
+    # In the case of |inlines|=True, the |inlined_by| points to the outer
+    # function inlining the current one (and so on, to form a chain).
+    self.inlined_by = None
+    self.disambiguated = disambiguated
+    self.was_ambiguous = was_ambiguous
+
+  def __str__(self):
+    return '%s [%s:%d]' % (
+        self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/build/android/pylib/symbols/elf_symbolizer_unittest.py b/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000..e963a34
--- /dev/null
+++ b/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(__file__))
+import elf_symbolizer
+import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+  def setUp(self):
+    self._callback = functools.partial(
+        ELFSymbolizerTest._SymbolizeCallback, self)
+    self._resolved_addresses = set()
+    # Mute warnings, we expect them due to the crash/hang tests.
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testParallelism1(self):
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+  def testParallelism4(self):
+    self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+  def testParallelism8(self):
+    self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+  def testCrash(self):
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+  def testHang(self):
+    os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+  def testInlines(self):
+    """Stimulate the inline processing logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        inlines=True,
+        max_concurrent_jobs=4)
+
+    for addr in xrange(1000):
+      exp_inline = False
+      exp_unknown = False
+
+      # First 100 addresses with inlines.
+      if addr < 100:
+        addr += _INLINE_MOCK_ADDR
+        exp_inline = True
+
+      # Followed by 100 without inlines.
+      elif addr < 200:
+        pass
+
+      # Followed by 100 interleaved inlines and not inlines.
+      elif addr < 300:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+
+      # Followed by 100 interleaved inlines and unknonwn.
+      elif addr < 400:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+        else:
+          addr += _UNKNOWN_MOCK_ADDR
+          exp_unknown = True
+
+      exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+      exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+      exp_source_line = addr if not exp_unknown else None
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testIncompleteSyminfo(self):
+    """Stimulate the symbol-not-resolved logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def _RunTest(self, max_concurrent_jobs, num_symbols):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=max_concurrent_jobs,
+        addr2line_timeout=0.5)
+
+    for addr in xrange(num_symbols):
+      exp_name = 'mock_sym_for_addr_%d' % addr
+      exp_source_path = 'mock_src/mock_lib1.so.c'
+      exp_source_line = addr
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+    # Check that all the expected callbacks have been received.
+    for addr in xrange(num_symbols):
+      self.assertIn(addr, self._resolved_addresses)
+      self._resolved_addresses.remove(addr)
+
+    # Check for unexpected callbacks.
+    self.assertEqual(len(self._resolved_addresses), 0)
+
+  def _SymbolizeCallback(self, sym_info, cb_arg):
+    self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+    self.assertTrue(isinstance(cb_arg, tuple))
+    self.assertEqual(len(cb_arg), 5)
+
+    # Unpack expectations from the callback extra argument.
+    (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+    if exp_name is None:
+      self.assertIsNone(sym_info.name)
+    else:
+      self.assertTrue(sym_info.name.startswith(exp_name))
+    self.assertEqual(sym_info.source_path, exp_source_path)
+    self.assertEqual(sym_info.source_line, exp_source_line)
+
+    if exp_inlines:
+      self.assertEqual(sym_info.name, exp_name + '_inner')
+      self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+      self.assertEqual(sym_info.inlined_by.inlined_by.name,
+                       exp_name + '_outer')
+
+    # Check against duplicate callbacks.
+    self.assertNotIn(addr, self._resolved_addresses)
+    self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/symbols/mock_addr2line/__init__.py b/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000..cd58f56
--- /dev/null
+++ b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-e', '--exe', dest='exe')  # Path of the debug-library.so.
+  # Silently swallow the other unnecessary arguments.
+  parser.add_option('-C', '--demangle', action='store_true')
+  parser.add_option('-f', '--functions', action='store_true')
+  parser.add_option('-i', '--inlines', action='store_true')
+  options, _ = parser.parse_args(argv[1:])
+  lib_file_name = posixpath.basename(options.exe)
+  processed_sym_count = 0
+  crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+  hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+  while(True):
+    line = sys.stdin.readline().rstrip('\r')
+    if not line:
+      break
+
+    # An empty line should generate '??,??:0' (is used as marker for inlines).
+    if line == '\n':
+      print '??'
+      print '??:0'
+      sys.stdout.flush()
+      continue
+
+    addr = int(line, 16)
+    processed_sym_count += 1
+    if crash_every and processed_sym_count % crash_every == 0:
+      sys.exit(1)
+    if hang_every and processed_sym_count % hang_every == 0:
+      time.sleep(1)
+
+    # Addresses < 1M will return good mock symbol information.
+    if addr < 1024 * 1024:
+      print 'mock_sym_for_addr_%d' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+    # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+    elif addr < 2 * 1024 * 1024:
+      print 'mock_sym_for_addr_%d' % addr
+      print '??:0'
+
+    # Addresses 2M <= x < 3M will return unknown symbol information.
+    elif addr < 3 * 1024 * 1024:
+      print '??'
+      print '??'
+
+    # Addresses 3M <= x < 4M will return inlines.
+    elif addr < 4 * 1024 * 1024:
+      print 'mock_sym_for_addr_%d_inner' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+      print 'mock_sym_for_addr_%d_middle' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+      print 'mock_sym_for_addr_%d_outer' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+    sys.stdout.flush()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
\ No newline at end of file
diff --git a/build/android/pylib/system_properties.py b/build/android/pylib/system_properties.py
new file mode 100644
index 0000000..3f16f86
--- /dev/null
+++ b/build/android/pylib/system_properties.py
@@ -0,0 +1,40 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class SystemProperties(dict):
+
+  """A dict interface to interact with device system properties.
+
+  System properties are key/value pairs as exposed by adb shell getprop/setprop.
+
+  This implementation minimizes interaction with the physical device. It is
+  valid for the lifetime of a boot.
+  """
+
+  def __init__(self, android_commands):
+    super(SystemProperties, self).__init__()
+    self._adb = android_commands
+    self._cached_static_properties = {}
+
+  def __getitem__(self, key):
+    if self._IsStatic(key):
+      if key not in self._cached_static_properties:
+        self._cached_static_properties[key] = self._GetProperty(key)
+      return self._cached_static_properties[key]
+    return self._GetProperty(key)
+
+  def __setitem__(self, key, value):
+    # TODO(tonyg): This can fail with no root. Verify that it succeeds.
+    self._adb.SendShellCommand('setprop %s "%s"' % (key, value), retry_count=3)
+
+  @staticmethod
+  def _IsStatic(key):
+    # TODO(tonyg): This list is conservative and could be expanded as needed.
+    return (key.startswith('ro.boot.') or
+            key.startswith('ro.build.') or
+            key.startswith('ro.product.'))
+
+  def _GetProperty(self, key):
+    return self._adb.SendShellCommand('getprop %s' % key, retry_count=3).strip()
diff --git a/build/android/pylib/uiautomator/__init__.py b/build/android/pylib/uiautomator/__init__.py
new file mode 100644
index 0000000..cda7672
--- /dev/null
+++ b/build/android/pylib/uiautomator/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/uiautomator/setup.py b/build/android/pylib/uiautomator/setup.py
new file mode 100644
index 0000000..bd8ffc7
--- /dev/null
+++ b/build/android/pylib/uiautomator/setup.py
@@ -0,0 +1,35 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for uiautomator tests."""
+
+import logging
+
+from pylib.uiautomator import test_package
+from pylib.uiautomator import test_runner
+
+
+def Setup(test_options):
+  """Runs uiautomator tests on connected device(s).
+
+  Args:
+    test_options: A UIAutomatorOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_pkg = test_package.TestPackage(test_options.uiautomator_jar,
+                                      test_options.uiautomator_info_jar)
+  tests = test_pkg.GetAllMatchingTests(test_options.annotations,
+                                       test_options.exclude_annotations,
+                                       test_options.test_filter)
+
+  if not tests:
+    logging.error('No uiautomator tests to run with current args.')
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index, test_pkg)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/uiautomator/test_options.py b/build/android/pylib/uiautomator/test_options.py
new file mode 100644
index 0000000..f3f648b
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_options.py
@@ -0,0 +1,21 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the UIAutomatorOptions named tuple."""
+
+import collections
+
+UIAutomatorOptions = collections.namedtuple('UIAutomatorOptions', [
+    'tool',
+    'cleanup_test_files',
+    'push_deps',
+    'annotations',
+    'exclude_annotations',
+    'test_filter',
+    'test_data',
+    'save_perf_json',
+    'screenshot_failures',
+    'uiautomator_jar',
+    'uiautomator_info_jar',
+    'package'])
diff --git a/build/android/pylib/uiautomator/test_package.py b/build/android/pylib/uiautomator/test_package.py
new file mode 100644
index 0000000..d8558c1
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_package.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class representing uiautomator test package."""
+
+import os
+
+from pylib import constants
+from pylib.instrumentation import test_jar
+
+
+class TestPackage(test_jar.TestJar):
+  def __init__(self, jar_path, jar_info_path):
+    test_jar.TestJar.__init__(self, jar_info_path)
+
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+    self._jar_path = jar_path
+
+  def GetPackageName(self):
+    """Returns the JAR named that is installed on the device."""
+    return os.path.basename(self._jar_path)
+
+  # Override.
+  def Install(self, device):
+    device.PushChangedFiles(self._jar_path, constants.TEST_EXECUTABLE_DIR)
diff --git a/build/android/pylib/uiautomator/test_runner.py b/build/android/pylib/uiautomator/test_runner.py
new file mode 100644
index 0000000..fa0725d
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_runner.py
@@ -0,0 +1,86 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class for running uiautomator tests on a single device."""
+
+from pylib import constants
+from pylib import flag_changer
+from pylib.device import intent
+from pylib.instrumentation import test_options as instr_test_options
+from pylib.instrumentation import test_runner as instr_test_runner
+
+
+class TestRunner(instr_test_runner.TestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  def __init__(self, test_options, device, shard_index, test_pkg):
+    """Create a new TestRunner.
+
+    Args:
+      test_options: A UIAutomatorOptions object.
+      device: Attached android device.
+      shard_index: Shard index.
+      test_pkg: A TestPackage object.
+    """
+    # Create an InstrumentationOptions object to pass to the super class
+    instrumentation_options = instr_test_options.InstrumentationOptions(
+        test_options.tool,
+        test_options.cleanup_test_files,
+        test_options.push_deps,
+        test_options.annotations,
+        test_options.exclude_annotations,
+        test_options.test_filter,
+        test_options.test_data,
+        test_options.save_perf_json,
+        test_options.screenshot_failures,
+        wait_for_debugger=False,
+        coverage_dir=None,
+        test_apk=None,
+        test_apk_path=None,
+        test_apk_jar_path=None,
+        test_runner=None,
+        test_support_apk_path=None,
+        device_flags=None)
+    super(TestRunner, self).__init__(instrumentation_options, device,
+                                     shard_index, test_pkg)
+
+    cmdline_file = constants.PACKAGE_INFO[test_options.package].cmdline_file
+    self.flags = None
+    if cmdline_file:
+      self.flags = flag_changer.FlagChanger(self.device, cmdline_file)
+    self._package = constants.PACKAGE_INFO[test_options.package].package
+    self._activity = constants.PACKAGE_INFO[test_options.package].activity
+
+  #override
+  def InstallTestPackage(self):
+    self.test_pkg.Install(self.device)
+
+  #override
+  def PushDataDeps(self):
+    pass
+
+  #override
+  def _RunTest(self, test, timeout):
+    self.device.ClearApplicationState(self._package)
+    if self.flags:
+      if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test):
+        self.flags.RemoveFlags(['--disable-fre'])
+      else:
+        self.flags.AddFlags(['--disable-fre'])
+    self.device.StartActivity(
+        intent.Intent(action='android.intent.action.MAIN',
+                      activity=self._activity,
+                      package=self._package),
+        blocking=True,
+        force_stop=True)
+    cmd = ['uiautomator', 'runtest', self.test_pkg.GetPackageName(),
+           '-e', 'class', test]
+    return self.device.RunShellCommand(cmd, timeout=timeout, retries=0)
+
+  #override
+  def _GenerateTestResult(self, test, instr_statuses, start_ms, duration_ms):
+    # uiautomator emits its summary status with INSTRUMENTATION_STATUS_CODE,
+    # not INSTRUMENTATION_CODE, so we have to drop if off the list of statuses.
+    return super(TestRunner, self)._GenerateTestResult(
+        test, instr_statuses[:-1], start_ms, duration_ms)
diff --git a/build/android/pylib/utils/__init__.py b/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/utils/__init__.py
diff --git a/build/android/pylib/utils/apk_helper.py b/build/android/pylib/utils/apk_helper.py
new file mode 100644
index 0000000..f5e9cd3
--- /dev/null
+++ b/build/android/pylib/utils/apk_helper.py
@@ -0,0 +1,76 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utilities for apk packages."""
+
+import os.path
+import re
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+_MANIFEST_ATTRIBUTE_RE = re.compile(
+    r'\s*A: ([^\(\)= ]*)\([^\(\)= ]*\)="(.*)" \(Raw: .*\)$')
+_MANIFEST_ELEMENT_RE = re.compile(r'\s*(?:E|N): (\S*) .*$')
+
+
+def GetPackageName(apk_path):
+  """Returns the package name of the apk."""
+  aapt_cmd = [_AAPT_PATH, 'dump', 'badging', apk_path]
+  aapt_output = cmd_helper.GetCmdOutput(aapt_cmd).split('\n')
+  package_name_re = re.compile(r'package: .*name=\'(\S*)\'')
+  for line in aapt_output:
+    m = package_name_re.match(line)
+    if m:
+      return m.group(1)
+  raise Exception('Failed to determine package name of %s' % apk_path)
+
+
+def _ParseManifestFromApk(apk_path):
+  aapt_cmd = [_AAPT_PATH, 'dump', 'xmltree', apk_path, 'AndroidManifest.xml']
+  aapt_output = cmd_helper.GetCmdOutput(aapt_cmd).split('\n')
+
+  parsed_manifest = {}
+  node_stack = [parsed_manifest]
+  indent = '  '
+
+  for line in aapt_output[1:]:
+    if len(line) == 0:
+      continue
+
+    indent_depth = 0
+    while line[(len(indent) * indent_depth):].startswith(indent):
+      indent_depth += 1
+
+    node_stack = node_stack[:indent_depth]
+    node = node_stack[-1]
+
+    m = _MANIFEST_ELEMENT_RE.match(line[len(indent) * indent_depth:])
+    if m:
+      if not m.group(1) in node:
+        node[m.group(1)] = {}
+      node_stack += [node[m.group(1)]]
+      continue
+
+    m = _MANIFEST_ATTRIBUTE_RE.match(line[len(indent) * indent_depth:])
+    if m:
+      if not m.group(1) in node:
+        node[m.group(1)] = []
+      node[m.group(1)].append(m.group(2))
+      continue
+
+  return parsed_manifest
+
+
+def GetInstrumentationName(
+    apk_path, default='android.test.InstrumentationTestRunner'):
+  """Returns the name of the Instrumentation in the apk."""
+
+  try:
+    manifest_info = _ParseManifestFromApk(apk_path)
+    return manifest_info['manifest']['instrumentation']['android:name'][0]
+  except KeyError:
+    return default
diff --git a/build/android/pylib/utils/command_option_parser.py b/build/android/pylib/utils/command_option_parser.py
new file mode 100644
index 0000000..cf501d0
--- /dev/null
+++ b/build/android/pylib/utils/command_option_parser.py
@@ -0,0 +1,75 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An option parser which handles the first arg as a command.
+
+Add other nice functionality such as printing a list of commands
+and an example in usage.
+"""
+
+import optparse
+import sys
+
+
+class CommandOptionParser(optparse.OptionParser):
+  """Wrapper class for OptionParser to help with listing commands."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a CommandOptionParser.
+
+    Args:
+      commands_dict: A dictionary mapping command strings to an object defining
+          - add_options_func: Adds options to the option parser
+          - run_command_func: Runs the command itself.
+      example: An example command.
+      everything else: Passed to optparse.OptionParser contructor.
+    """
+    self.commands_dict = kwargs.pop('commands_dict', {})
+    self.example = kwargs.pop('example', '')
+    if not 'usage' in kwargs:
+      kwargs['usage'] = 'Usage: %prog <command> [options]'
+    optparse.OptionParser.__init__(self, *args, **kwargs)
+
+  #override
+  def get_usage(self):
+    normal_usage = optparse.OptionParser.get_usage(self)
+    command_list = self.get_command_list()
+    example = self.get_example()
+    return self.expand_prog_name(normal_usage + example + command_list)
+
+  #override
+  def get_command_list(self):
+    if self.commands_dict.keys():
+      return '\nCommands:\n  %s\n' % '\n  '.join(
+          sorted(self.commands_dict.keys()))
+    return ''
+
+  def get_example(self):
+    if self.example:
+      return '\nExample:\n  %s\n' % self.example
+    return ''
+
+
+def ParseAndExecute(option_parser, argv=None):
+  """Parses options/args from argv and runs the specified command.
+
+  Args:
+    option_parser: A CommandOptionParser object.
+    argv: Command line arguments. If None, automatically draw from sys.argv.
+
+  Returns:
+    An exit code.
+  """
+  if not argv:
+    argv = sys.argv
+
+    if len(argv) < 2 or argv[1] not in option_parser.commands_dict:
+      # Parse args first, if this is '--help', optparse will print help and exit
+      option_parser.parse_args(argv)
+      option_parser.error('Invalid command.')
+
+    cmd = option_parser.commands_dict[argv[1]]
+    cmd.add_options_func(option_parser)
+    options, args = option_parser.parse_args(argv)
+    return cmd.run_command_func(argv[1], options, args, option_parser)
diff --git a/build/android/pylib/utils/emulator.py b/build/android/pylib/utils/emulator.py
new file mode 100644
index 0000000..81b9c98
--- /dev/null
+++ b/build/android/pylib/utils/emulator.py
@@ -0,0 +1,444 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to start and stop Android emulator.
+
+  Emulator: The class provides the methods to launch/shutdown the emulator with
+            the android virtual device named 'avd_armeabi' .
+"""
+
+import logging
+import os
+import signal
+import subprocess
+import time
+
+# TODO(craigdh): Move these pylib dependencies to pylib/utils/.
+from pylib import android_commands
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_utils
+from pylib.utils import time_profile
+
+import errors
+import run_command
+
+# SD card size
+SDCARD_SIZE = '512M'
+
+# Template used to generate config.ini files for the emulator
+CONFIG_TEMPLATE = """avd.ini.encoding=ISO-8859-1
+hw.dPad=no
+hw.lcd.density=320
+sdcard.size=512M
+hw.cpu.arch={hw.cpu.arch}
+hw.device.hash=-708107041
+hw.camera.back=none
+disk.dataPartition.size=800M
+hw.gpu.enabled=yes
+skin.path=720x1280
+skin.dynamic=yes
+hw.keyboard=yes
+hw.ramSize=1024
+hw.device.manufacturer=Google
+hw.sdCard=yes
+hw.mainKeys=no
+hw.accelerometer=yes
+skin.name=720x1280
+abi.type={abi.type}
+hw.trackBall=no
+hw.device.name=Galaxy Nexus
+hw.battery=yes
+hw.sensors.proximity=yes
+image.sysdir.1=system-images/android-{api.level}/{abi.type}/
+hw.sensors.orientation=yes
+hw.audioInput=yes
+hw.camera.front=none
+hw.gps=yes
+vm.heapSize=128
+{extras}"""
+
+CONFIG_REPLACEMENTS = {
+  'x86': {
+    '{hw.cpu.arch}': 'x86',
+    '{abi.type}': 'x86',
+    '{extras}': ''
+  },
+  'arm': {
+    '{hw.cpu.arch}': 'arm',
+    '{abi.type}': 'armeabi-v7a',
+    '{extras}': 'hw.cpu.model=cortex-a8\n'
+  },
+  'mips': {
+    '{hw.cpu.arch}': 'mips',
+    '{abi.type}': 'mips',
+    '{extras}': ''
+  }
+}
+
+class EmulatorLaunchException(Exception):
+  """Emulator failed to launch."""
+  pass
+
+def _KillAllEmulators():
+  """Kill all running emulators that look like ones we started.
+
+  There are odd 'sticky' cases where there can be no emulator process
+  running but a device slot is taken.  A little bot trouble and and
+  we're out of room forever.
+  """
+  emulators = android_commands.GetAttachedDevices(hardware=False)
+  if not emulators:
+    return
+  for emu_name in emulators:
+    cmd_helper.RunCmd(['adb', '-s', emu_name, 'emu', 'kill'])
+  logging.info('Emulator killing is async; give a few seconds for all to die.')
+  for _ in range(5):
+    if not android_commands.GetAttachedDevices(hardware=False):
+      return
+    time.sleep(1)
+
+
+def DeleteAllTempAVDs():
+  """Delete all temporary AVDs which are created for tests.
+
+  If the test exits abnormally and some temporary AVDs created when testing may
+  be left in the system. Clean these AVDs.
+  """
+  avds = device_utils.GetAVDs()
+  if not avds:
+    return
+  for avd_name in avds:
+    if 'run_tests_avd' in avd_name:
+      cmd = ['android', '-s', 'delete', 'avd', '--name', avd_name]
+      cmd_helper.RunCmd(cmd)
+      logging.info('Delete AVD %s' % avd_name)
+
+
+class PortPool(object):
+  """Pool for emulator port starting position that changes over time."""
+  _port_min = 5554
+  _port_max = 5585
+  _port_current_index = 0
+
+  @classmethod
+  def port_range(cls):
+    """Return a range of valid ports for emulator use.
+
+    The port must be an even number between 5554 and 5584.  Sometimes
+    a killed emulator "hangs on" to a port long enough to prevent
+    relaunch.  This is especially true on slow machines (like a bot).
+    Cycling through a port start position helps make us resilient."""
+    ports = range(cls._port_min, cls._port_max, 2)
+    n = cls._port_current_index
+    cls._port_current_index = (n + 1) % len(ports)
+    return ports[n:] + ports[:n]
+
+
+def _GetAvailablePort():
+  """Returns an available TCP port for the console."""
+  used_ports = []
+  emulators = android_commands.GetAttachedDevices(hardware=False)
+  for emulator in emulators:
+    used_ports.append(emulator.split('-')[1])
+  for port in PortPool.port_range():
+    if str(port) not in used_ports:
+      return port
+
+
+def LaunchTempEmulators(emulator_count, abi, api_level, wait_for_boot=True):
+  """Create and launch temporary emulators and wait for them to boot.
+
+  Args:
+    emulator_count: number of emulators to launch.
+    abi: the emulator target platform
+    api_level: the api level (e.g., 19 for Android v4.4 - KitKat release)
+    wait_for_boot: whether or not to wait for emulators to boot up
+
+  Returns:
+    List of emulators.
+  """
+  emulators = []
+  for n in xrange(emulator_count):
+    t = time_profile.TimeProfile('Emulator launch %d' % n)
+    # Creates a temporary AVD.
+    avd_name = 'run_tests_avd_%d' % n
+    logging.info('Emulator launch %d with avd_name=%s and api=%d',
+        n, avd_name, api_level)
+    emulator = Emulator(avd_name, abi)
+    emulator.CreateAVD(api_level)
+    emulator.Launch(kill_all_emulators=n == 0)
+    t.Stop()
+    emulators.append(emulator)
+  # Wait for all emulators to boot completed.
+  if wait_for_boot:
+    for emulator in emulators:
+      emulator.ConfirmLaunch(True)
+  return emulators
+
+
+def LaunchEmulator(avd_name, abi):
+  """Launch an existing emulator with name avd_name.
+
+  Args:
+    avd_name: name of existing emulator
+    abi: the emulator target platform
+
+  Returns:
+    emulator object.
+  """
+  logging.info('Specified emulator named avd_name=%s launched', avd_name)
+  emulator = Emulator(avd_name, abi)
+  emulator.Launch(kill_all_emulators=True)
+  emulator.ConfirmLaunch(True)
+  return emulator
+
+
+class Emulator(object):
+  """Provides the methods to launch/shutdown the emulator.
+
+  The emulator has the android virtual device named 'avd_armeabi'.
+
+  The emulator could use any even TCP port between 5554 and 5584 for the
+  console communication, and this port will be part of the device name like
+  'emulator-5554'. Assume it is always True, as the device name is the id of
+  emulator managed in this class.
+
+  Attributes:
+    emulator: Path of Android's emulator tool.
+    popen: Popen object of the running emulator process.
+    device: Device name of this emulator.
+  """
+
+  # Signals we listen for to kill the emulator on
+  _SIGNALS = (signal.SIGINT, signal.SIGHUP)
+
+  # Time to wait for an emulator launch, in seconds.  This includes
+  # the time to launch the emulator and a wait-for-device command.
+  _LAUNCH_TIMEOUT = 120
+
+  # Timeout interval of wait-for-device command before bouncing to a a
+  # process life check.
+  _WAITFORDEVICE_TIMEOUT = 5
+
+  # Time to wait for a "wait for boot complete" (property set on device).
+  _WAITFORBOOT_TIMEOUT = 300
+
+  def __init__(self, avd_name, abi):
+    """Init an Emulator.
+
+    Args:
+      avd_name: name of the AVD to create
+      abi: target platform for emulator being created, defaults to x86
+    """
+    android_sdk_root = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
+    self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator')
+    self.android = os.path.join(android_sdk_root, 'tools', 'android')
+    self.popen = None
+    self.device_serial = None
+    self.abi = abi
+    self.avd_name = avd_name
+
+  @staticmethod
+  def _DeviceName():
+    """Return our device name."""
+    port = _GetAvailablePort()
+    return ('emulator-%d' % port, port)
+
+  def CreateAVD(self, api_level):
+    """Creates an AVD with the given name.
+
+    Args:
+      api_level: the api level of the image
+
+    Return avd_name.
+    """
+
+    if self.abi == 'arm':
+      abi_option = 'armeabi-v7a'
+    elif self.abi == 'mips':
+      abi_option = 'mips'
+    else:
+      abi_option = 'x86'
+
+    api_target = 'android-%s' % api_level
+
+    avd_command = [
+        self.android,
+        '--silent',
+        'create', 'avd',
+        '--name', self.avd_name,
+        '--abi', abi_option,
+        '--target', api_target,
+        '--sdcard', SDCARD_SIZE,
+        '--force',
+    ]
+    avd_cmd_str = ' '.join(avd_command)
+    logging.info('Create AVD command: %s', avd_cmd_str)
+    avd_process = pexpect.spawn(avd_cmd_str)
+
+    # Instead of creating a custom profile, we overwrite config files.
+    avd_process.expect('Do you wish to create a custom hardware profile')
+    avd_process.sendline('no\n')
+    avd_process.expect('Created AVD \'%s\'' % self.avd_name)
+
+    # Replace current configuration with default Galaxy Nexus config.
+    avds_dir = os.path.join(os.path.expanduser('~'), '.android', 'avd')
+    ini_file = os.path.join(avds_dir, '%s.ini' % self.avd_name)
+    new_config_ini = os.path.join(avds_dir, '%s.avd' % self.avd_name,
+                                  'config.ini')
+
+    # Remove config files with defaults to replace with Google's GN settings.
+    os.unlink(ini_file)
+    os.unlink(new_config_ini)
+
+    # Create new configuration files with Galaxy Nexus by Google settings.
+    with open(ini_file, 'w') as new_ini:
+      new_ini.write('avd.ini.encoding=ISO-8859-1\n')
+      new_ini.write('target=%s\n' % api_target)
+      new_ini.write('path=%s/%s.avd\n' % (avds_dir, self.avd_name))
+      new_ini.write('path.rel=avd/%s.avd\n' % self.avd_name)
+
+    custom_config = CONFIG_TEMPLATE
+    replacements = CONFIG_REPLACEMENTS[self.abi]
+    for key in replacements:
+      custom_config = custom_config.replace(key, replacements[key])
+    custom_config = custom_config.replace('{api.level}', str(api_level))
+
+    with open(new_config_ini, 'w') as new_config_ini:
+      new_config_ini.write(custom_config)
+
+    return self.avd_name
+
+
+  def _DeleteAVD(self):
+    """Delete the AVD of this emulator."""
+    avd_command = [
+        self.android,
+        '--silent',
+        'delete',
+        'avd',
+        '--name', self.avd_name,
+    ]
+    logging.info('Delete AVD command: %s', ' '.join(avd_command))
+    cmd_helper.RunCmd(avd_command)
+
+
+  def Launch(self, kill_all_emulators):
+    """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the
+    emulator is ready for use.
+
+    If fails, an exception will be raised.
+    """
+    if kill_all_emulators:
+      _KillAllEmulators()  # just to be sure
+    self._AggressiveImageCleanup()
+    (self.device_serial, port) = self._DeviceName()
+    emulator_command = [
+        self.emulator,
+        # Speed up emulator launch by 40%.  Really.
+        '-no-boot-anim',
+        # The default /data size is 64M.
+        # That's not enough for 8 unit test bundles and their data.
+        '-partition-size', '512',
+        # Use a familiar name and port.
+        '-avd', self.avd_name,
+        '-port', str(port),
+        # Wipe the data.  We've seen cases where an emulator gets 'stuck' if we
+        # don't do this (every thousand runs or so).
+        '-wipe-data',
+        # Enable GPU by default.
+        '-gpu', 'on',
+        '-qemu', '-m', '1024',
+        ]
+    if self.abi == 'x86':
+      emulator_command.extend([
+          # For x86 emulator --enable-kvm will fail early, avoiding accidental
+          # runs in a slow mode (i.e. without hardware virtualization support).
+          '--enable-kvm',
+          ])
+
+    logging.info('Emulator launch command: %s', ' '.join(emulator_command))
+    self.popen = subprocess.Popen(args=emulator_command,
+                                  stderr=subprocess.STDOUT)
+    self._InstallKillHandler()
+
+  @staticmethod
+  def _AggressiveImageCleanup():
+    """Aggressive cleanup of emulator images.
+
+    Experimentally it looks like our current emulator use on the bot
+    leaves image files around in /tmp/android-$USER.  If a "random"
+    name gets reused, we choke with a 'File exists' error.
+    TODO(jrg): is there a less hacky way to accomplish the same goal?
+    """
+    logging.info('Aggressive Image Cleanup')
+    emulator_imagedir = '/tmp/android-%s' % os.environ['USER']
+    if not os.path.exists(emulator_imagedir):
+      return
+    for image in os.listdir(emulator_imagedir):
+      full_name = os.path.join(emulator_imagedir, image)
+      if 'emulator' in full_name:
+        logging.info('Deleting emulator image %s', full_name)
+        os.unlink(full_name)
+
+  def ConfirmLaunch(self, wait_for_boot=False):
+    """Confirm the emulator launched properly.
+
+    Loop on a wait-for-device with a very small timeout.  On each
+    timeout, check the emulator process is still alive.
+    After confirming a wait-for-device can be successful, make sure
+    it returns the right answer.
+    """
+    seconds_waited = 0
+    number_of_waits = 2  # Make sure we can wfd twice
+    # TODO(jbudorick) Un-handroll this in the implementation switch.
+    adb_cmd = "adb -s %s %s" % (self.device_serial, 'wait-for-device')
+    while seconds_waited < self._LAUNCH_TIMEOUT:
+      try:
+        run_command.RunCommand(adb_cmd,
+                               timeout_time=self._WAITFORDEVICE_TIMEOUT,
+                               retry_count=1)
+        number_of_waits -= 1
+        if not number_of_waits:
+          break
+      except errors.WaitForResponseTimedOutError:
+        seconds_waited += self._WAITFORDEVICE_TIMEOUT
+        adb_cmd = "adb -s %s %s" % (self.device_serial, 'kill-server')
+        run_command.RunCommand(adb_cmd)
+      self.popen.poll()
+      if self.popen.returncode != None:
+        raise EmulatorLaunchException('EMULATOR DIED')
+    if seconds_waited >= self._LAUNCH_TIMEOUT:
+      raise EmulatorLaunchException('TIMEOUT with wait-for-device')
+    logging.info('Seconds waited on wait-for-device: %d', seconds_waited)
+    if wait_for_boot:
+      # Now that we checked for obvious problems, wait for a boot complete.
+      # Waiting for the package manager is sometimes problematic.
+      # TODO(jbudorick) Convert this once waiting for the package manager and
+      #                 the external storage is no longer problematic.
+      d = device_utils.DeviceUtils(self.device_serial)
+      d.old_interface.WaitForSystemBootCompleted(self._WAITFORBOOT_TIMEOUT)
+
+  def Shutdown(self):
+    """Shuts down the process started by launch."""
+    self._DeleteAVD()
+    if self.popen:
+      self.popen.poll()
+      if self.popen.returncode == None:
+        self.popen.kill()
+      self.popen = None
+
+  def _ShutdownOnSignal(self, _signum, _frame):
+    logging.critical('emulator _ShutdownOnSignal')
+    for sig in self._SIGNALS:
+      signal.signal(sig, signal.SIG_DFL)
+    self.Shutdown()
+    raise KeyboardInterrupt  # print a stack
+
+  def _InstallKillHandler(self):
+    """Install a handler to kill the emulator when we exit unexpectedly."""
+    for sig in self._SIGNALS:
+      signal.signal(sig, self._ShutdownOnSignal)
diff --git a/build/android/pylib/utils/findbugs.py b/build/android/pylib/utils/findbugs.py
new file mode 100644
index 0000000..d946f31
--- /dev/null
+++ b/build/android/pylib/utils/findbugs.py
@@ -0,0 +1,252 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import re
+import shlex
+import subprocess
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+def _PrintMessage(warnings, title, action, known_bugs_file):
+  if warnings:
+    print
+    print '*' * 80
+    print '%s warnings.' % title
+    print '%s %s' % (action, known_bugs_file)
+    print '-' * 80
+    for warning in warnings:
+      print warning
+    print '-' * 80
+    print
+
+
+def _StripLineNumbers(current_warnings):
+  re_line = r':\[line.*?\]$'
+  return [re.sub(re_line, '', x) for x in current_warnings]
+
+
+def _DiffKnownWarnings(current_warnings_set, known_bugs_file):
+  with open(known_bugs_file, 'r') as known_bugs:
+    known_bugs_set = set(known_bugs.read().splitlines())
+
+  new_warnings = current_warnings_set - known_bugs_set
+  _PrintMessage(sorted(new_warnings), 'New', 'Please fix, or perhaps add to',
+                known_bugs_file)
+
+  obsolete_warnings = known_bugs_set - current_warnings_set
+  _PrintMessage(sorted(obsolete_warnings), 'Obsolete', 'Please remove from',
+                known_bugs_file)
+
+  count = len(new_warnings) + len(obsolete_warnings)
+  if count:
+    print '*** %d FindBugs warning%s! ***' % (count, 's' * (count > 1))
+    if len(new_warnings):
+      print '*** %d: new ***' % len(new_warnings)
+    if len(obsolete_warnings):
+      print '*** %d: obsolete ***' % len(obsolete_warnings)
+    print
+    print 'Alternatively,  rebaseline with --rebaseline command option'
+    print
+  else:
+    print 'No new FindBugs warnings.'
+  print
+  return count
+
+
+def _Rebaseline(current_warnings_set, known_bugs_file):
+  with file(known_bugs_file, 'w') as known_bugs:
+    for warning in sorted(current_warnings_set):
+      print >> known_bugs, warning
+  return 0
+
+
+def _GetChromeJars(release_version):
+  version = 'Debug'
+  if release_version:
+    version = 'Release'
+  path = os.path.join(constants.DIR_SOURCE_ROOT,
+                      os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                      version,
+                      'lib.java')
+  cmd = 'find %s -name "*.jar"' % path
+  out = cmd_helper.GetCmdOutput(shlex.split(cmd))
+  out = [p for p in out.splitlines() if not p.endswith('.dex.jar')]
+  if not out:
+    print 'No classes found in %s' % path
+  return ' '.join(out)
+
+
+def _Run(exclude, known_bugs, classes_to_analyze, auxiliary_classes,
+        rebaseline, release_version, findbug_args):
+  """Run the FindBugs.
+
+  Args:
+    exclude: the exclude xml file, refer to FindBugs's -exclude command option.
+    known_bugs: the text file of known bugs. The bugs in it will not be
+                reported.
+    classes_to_analyze: the list of classes need to analyze, refer to FindBug's
+                        -onlyAnalyze command line option.
+    auxiliary_classes: the classes help to analyze, refer to FindBug's
+                       -auxclasspath command line option.
+    rebaseline: True if the known_bugs file needs rebaseline.
+    release_version: True if the release version needs check, otherwise check
+                     debug version.
+    findbug_args: addtional command line options needs pass to Findbugs.
+  """
+
+  chrome_src = constants.DIR_SOURCE_ROOT
+  sdk_root = constants.ANDROID_SDK_ROOT
+  sdk_version = constants.ANDROID_SDK_VERSION
+
+  system_classes = []
+  system_classes.append(os.path.join(sdk_root, 'platforms',
+                                     'android-%s' % sdk_version, 'android.jar'))
+  if auxiliary_classes:
+    for classes in auxiliary_classes:
+      system_classes.append(os.path.abspath(classes))
+
+  findbugs_javacmd = 'java'
+  findbugs_home = os.path.join(chrome_src, 'third_party', 'findbugs')
+  findbugs_jar = os.path.join(findbugs_home, 'lib', 'findbugs.jar')
+  findbugs_pathsep = ':'
+  findbugs_maxheap = '768'
+
+  cmd = '%s ' % findbugs_javacmd
+  cmd = '%s -classpath %s%s' % (cmd, findbugs_jar, findbugs_pathsep)
+  cmd = '%s -Xmx%sm ' % (cmd, findbugs_maxheap)
+  cmd = '%s -Dfindbugs.home="%s" ' % (cmd, findbugs_home)
+  cmd = '%s -jar %s ' % (cmd, findbugs_jar)
+
+  cmd = '%s -textui -sortByClass ' % cmd
+  cmd = '%s -pluginList %s' % (cmd, os.path.join(chrome_src, 'tools', 'android',
+                                                 'findbugs_plugin', 'lib',
+                                                 'chromiumPlugin.jar'))
+  if len(system_classes):
+    cmd = '%s -auxclasspath %s ' % (cmd, ':'.join(system_classes))
+
+  if classes_to_analyze:
+    cmd = '%s -onlyAnalyze %s ' % (cmd, classes_to_analyze)
+
+  if exclude:
+    cmd = '%s -exclude %s ' % (cmd, os.path.abspath(exclude))
+
+  if findbug_args:
+    cmd = '%s %s ' % (cmd, findbug_args)
+
+  chrome_classes = _GetChromeJars(release_version)
+  if not chrome_classes:
+    return 1
+  cmd = '%s %s ' % (cmd, chrome_classes)
+
+  print
+  print '*' * 80
+  print 'Command used to run findbugs:'
+  print cmd
+  print '*' * 80
+  print
+
+  proc = subprocess.Popen(shlex.split(cmd),
+                          stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  out, _err = proc.communicate()
+  current_warnings_set = set(_StripLineNumbers(filter(None, out.splitlines())))
+
+  if rebaseline:
+    return _Rebaseline(current_warnings_set, known_bugs)
+  else:
+    return _DiffKnownWarnings(current_warnings_set, known_bugs)
+
+def Run(options):
+  exclude_file = None
+  known_bugs_file = None
+
+  if options.exclude:
+    exclude_file = options.exclude
+  elif options.base_dir:
+    exclude_file = os.path.join(options.base_dir, 'findbugs_exclude.xml')
+
+  if options.known_bugs:
+    known_bugs_file = options.known_bugs
+  elif options.base_dir:
+    known_bugs_file = os.path.join(options.base_dir, 'findbugs_known_bugs.txt')
+
+  auxclasspath = None
+  if options.auxclasspath:
+    auxclasspath = options.auxclasspath.split(':')
+  return _Run(exclude_file, known_bugs_file, options.only_analyze, auxclasspath,
+              options.rebaseline, options.release_build, options.findbug_args)
+
+
+def GetCommonParser():
+  parser = optparse.OptionParser()
+  parser.add_option('-r',
+                    '--rebaseline',
+                    action='store_true',
+                    dest='rebaseline',
+                    help='Rebaseline known findbugs issues.')
+
+  parser.add_option('-a',
+                    '--auxclasspath',
+                    action='store',
+                    default=None,
+                    dest='auxclasspath',
+                    help='Set aux classpath for analysis.')
+
+  parser.add_option('-o',
+                    '--only-analyze',
+                    action='store',
+                    default=None,
+                    dest='only_analyze',
+                    help='Only analyze the given classes and packages.')
+
+  parser.add_option('-e',
+                    '--exclude',
+                    action='store',
+                    default=None,
+                    dest='exclude',
+                    help='Exclude bugs matching given filter.')
+
+  parser.add_option('-k',
+                    '--known-bugs',
+                    action='store',
+                    default=None,
+                    dest='known_bugs',
+                    help='Not report the bugs in the given file.')
+
+  parser.add_option('-l',
+                    '--release-build',
+                    action='store_true',
+                    dest='release_build',
+                    help='Analyze release build instead of debug.')
+
+  parser.add_option('-f',
+                    '--findbug-args',
+                    action='store',
+                    default=None,
+                    dest='findbug_args',
+                    help='Additional findbug arguments.')
+
+  parser.add_option('-b',
+                    '--base-dir',
+                    action='store',
+                    default=None,
+                    dest='base_dir',
+                    help='Base directory for configuration file.')
+
+  return parser
+
+
+def main():
+  parser = GetCommonParser()
+  options, _ = parser.parse_args()
+
+  return Run(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/pylib/utils/flakiness_dashboard_results_uploader.py b/build/android/pylib/utils/flakiness_dashboard_results_uploader.py
new file mode 100644
index 0000000..246c83b
--- /dev/null
+++ b/build/android/pylib/utils/flakiness_dashboard_results_uploader.py
@@ -0,0 +1,179 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+#TODO(craigdh): pylib/utils/ should not depend on pylib/.
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+  """Writes test results to a JSON file and handles uploading that file to
+  the test results server.
+  """
+  def __init__(self, builder_name, build_name, build_number, tmp_folder,
+               test_results_map, test_results_server, test_type, master_name):
+    super(JSONResultsGenerator, self).__init__(
+        builder_name=builder_name,
+        build_name=build_name,
+        build_number=build_number,
+        results_file_base_path=tmp_folder,
+        builder_base_url=None,
+        test_results_map=test_results_map,
+        svn_repositories=(('webkit', 'third_party/WebKit'),
+                          ('chrome', '.')),
+        test_results_server=test_results_server,
+        test_type=test_type,
+        master_name=master_name)
+
+  #override
+  def _GetModifierChar(self, test_name):
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    return self._test_results_map[test_name].modifier
+
+  #override
+  def _GetSVNRevision(self, in_directory):
+    """Returns the git/svn revision for the given directory.
+
+    Args:
+      in_directory: The directory relative to src.
+    """
+    def _is_git_directory(in_directory):
+      """Returns true if the given directory is in a git repository.
+
+      Args:
+        in_directory: The directory path to be tested.
+      """
+      if os.path.exists(os.path.join(in_directory, '.git')):
+        return True
+      parent = os.path.dirname(in_directory)
+      if parent == constants.DIR_SOURCE_ROOT or parent == in_directory:
+        return False
+      return _is_git_directory(parent)
+
+    in_directory = os.path.join(constants.DIR_SOURCE_ROOT, in_directory)
+
+    if not os.path.exists(os.path.join(in_directory, '.svn')):
+      if _is_git_directory(in_directory):
+        return repo_utils.GetGitHeadSHA1(in_directory)
+      else:
+        return ''
+
+    output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+    try:
+      dom = xml.dom.minidom.parseString(output)
+      return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+    except xml.parsers.expat.ExpatError:
+      return ''
+    return ''
+
+
+class ResultsUploader(object):
+  """Handles uploading buildbot tests results to the flakiness dashboard."""
+  def __init__(self, tests_type):
+    self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+    self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    self._tests_type = tests_type
+
+    if not self._build_number or not self._builder_name:
+      raise Exception('You should not be uploading tests results to the server'
+                      'from your local machine.')
+
+    upstream = (tests_type != 'Chromium_Android_Instrumentation')
+    if upstream:
+      # TODO(frankf): Use factory properties (see buildbot/bb_device_steps.py)
+      # This requires passing the actual master name (e.g. 'ChromiumFYI' not
+      # 'chromium.fyi').
+      from slave import slave_utils # pylint: disable=F0401
+      self._build_name = slave_utils.SlaveBuildName(constants.DIR_SOURCE_ROOT)
+      self._master_name = slave_utils.GetActiveMaster()
+    else:
+      self._build_name = 'chromium-android'
+      buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+      if not buildbot_branch:
+        buildbot_branch = 'master'
+      self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+    self._test_results_map = {}
+
+  def AddResults(self, test_results):
+    # TODO(frankf): Differentiate between fail/crash/timeouts.
+    conversion_map = [
+        (test_results.GetPass(), False,
+            json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+        (test_results.GetFail(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetCrash(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetTimeout(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetUnknown(), True,
+            json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+        ]
+
+    for results_list, failed, modifier in conversion_map:
+      for single_test_result in results_list:
+        test_result = json_results_generator.TestResult(
+            test=single_test_result.GetName(),
+            failed=failed,
+            elapsed_time=single_test_result.GetDur() / 1000)
+        # The WebKit TestResult object sets the modifier it based on test name.
+        # Since we don't use the same test naming convention as WebKit the
+        # modifier will be wrong, so we need to overwrite it.
+        test_result.modifier = modifier
+
+        self._test_results_map[single_test_result.GetName()] = test_result
+
+  def Upload(self, test_results_server):
+    if not self._test_results_map:
+      return
+
+    tmp_folder = tempfile.mkdtemp()
+
+    try:
+      results_generator = JSONResultsGenerator(
+          builder_name=self._builder_name,
+          build_name=self._build_name,
+          build_number=self._build_number,
+          tmp_folder=tmp_folder,
+          test_results_map=self._test_results_map,
+          test_results_server=test_results_server,
+          test_type=self._tests_type,
+          master_name=self._master_name)
+
+      json_files = ["incremental_results.json", "times_ms.json"]
+      results_generator.GenerateJSONOutput()
+      results_generator.GenerateTimesMSFile()
+      results_generator.UploadJSONFiles(json_files)
+    except Exception as e:
+      logging.error("Uploading results to test server failed: %s." % e)
+    finally:
+      shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+  """Reports test results to the flakiness dashboard for Chrome for Android.
+
+  Args:
+    results: test results.
+    flakiness_dashboard_server: the server to upload the results to.
+    test_type: the type of the tests (as displayed by the flakiness dashboard).
+  """
+  uploader = ResultsUploader(test_type)
+  uploader.AddResults(results)
+  uploader.Upload(flakiness_dashboard_server)
diff --git a/build/android/pylib/utils/host_path_finder.py b/build/android/pylib/utils/host_path_finder.py
new file mode 100644
index 0000000..aea51a9
--- /dev/null
+++ b/build/android/pylib/utils/host_path_finder.py
@@ -0,0 +1,23 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import constants
+
+
+def GetMostRecentHostPath(file_name):
+  """Returns the most recent existing full path for the given file name.
+
+  Returns: An empty string if no path could be found.
+  """
+  out_dir = os.path.join(
+      constants.DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'))
+  candidate_paths = map(
+      lambda build_type: os.path.join(out_dir, build_type, file_name),
+      ['Debug', 'Release'])
+  candidate_paths = filter(os.path.exists, candidate_paths)
+  candidate_paths = sorted(candidate_paths, key=os.path.getmtime, reverse=True)
+  candidate_paths.append('')
+  return candidate_paths[0]
diff --git a/build/android/pylib/utils/host_utils.py b/build/android/pylib/utils/host_utils.py
new file mode 100644
index 0000000..580721f
--- /dev/null
+++ b/build/android/pylib/utils/host_utils.py
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+
+def GetRecursiveDiskUsage(path):
+  """Returns the disk usage in bytes of |path|. Similar to `du -sb |path|`."""
+  running_size = os.path.getsize(path)
+  if os.path.isdir(path):
+    for root, dirs, files in os.walk(path):
+      running_size += sum([os.path.getsize(os.path.join(root, f))
+                           for f in files + dirs])
+  return running_size
+
diff --git a/build/android/pylib/utils/json_results_generator.py b/build/android/pylib/utils/json_results_generator.py
new file mode 100644
index 0000000..52446b1
--- /dev/null
+++ b/build/android/pylib/utils/json_results_generator.py
@@ -0,0 +1,697 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
+# Tools/Scripts/webkitpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+  return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+  # FIXME: Kill this code once the server returns json instead of jsonp.
+  if HasJSONWrapper(json_content):
+    return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+  return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+  # Specify separators in order to get compact encoding.
+  json_string = json.dumps(json_object, separators=(',', ':'))
+  if callback:
+    json_string = callback + '(' + json_string + ');'
+  with open(file_path, 'w') as fp:
+    fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flattens the trie of paths, prepending a prefix to each."""
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and not 'results' in data:
+      result.update(ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def AddPathToTrie(path, value, trie):
+  """Inserts a single path and value into a directory trie structure."""
+  if not '/' in path:
+    trie[path] = value
+    return
+
+  directory, _slash, rest = path.partition('/')
+  if not directory in trie:
+    trie[directory] = {}
+  AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+  """Breaks a test name into dicts by directory
+
+  foo/bar/baz.html: 1ms
+  foo/bar/baz1.html: 3ms
+
+  becomes
+  foo: {
+      bar: {
+          baz.html: 1,
+          baz1.html: 3
+      }
+  }
+  """
+  trie = {}
+  for test_result in individual_test_timings:
+    test = test_result.test_name
+
+    AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+  return trie
+
+
+class TestResult(object):
+  """A simple class that represents a single test result."""
+
+  # Test modifier constants.
+  (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+  def __init__(self, test, failed=False, elapsed_time=0):
+    self.test_name = test
+    self.failed = failed
+    self.test_run_time = elapsed_time
+
+    test_name = test
+    try:
+      test_name = test.split('.')[1]
+    except IndexError:
+      _log.warn('Invalid test name: %s.', test)
+
+    if test_name.startswith('FAILS_'):
+      self.modifier = self.FAILS
+    elif test_name.startswith('FLAKY_'):
+      self.modifier = self.FLAKY
+    elif test_name.startswith('DISABLED_'):
+      self.modifier = self.DISABLED
+    else:
+      self.modifier = self.NONE
+
+  def Fixable(self):
+    return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+  """A JSON results generator for generic tests."""
+
+  MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+  # Min time (seconds) that will be added to the JSON.
+  MIN_TIME = 1
+
+  # Note that in non-chromium tests those chars are used to indicate
+  # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+  PASS_RESULT = 'P'
+  SKIP_RESULT = 'X'
+  FAIL_RESULT = 'F'
+  FLAKY_RESULT = 'L'
+  NO_DATA_RESULT = 'N'
+
+  MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+                      TestResult.DISABLED: SKIP_RESULT,
+                      TestResult.FAILS: FAIL_RESULT,
+                      TestResult.FLAKY: FLAKY_RESULT}
+
+  VERSION = 4
+  VERSION_KEY = 'version'
+  RESULTS = 'results'
+  TIMES = 'times'
+  BUILD_NUMBERS = 'buildNumbers'
+  TIME = 'secondsSinceEpoch'
+  TESTS = 'tests'
+
+  FIXABLE_COUNT = 'fixableCount'
+  FIXABLE = 'fixableCounts'
+  ALL_FIXABLE_COUNT = 'allFixableCount'
+
+  RESULTS_FILENAME = 'results.json'
+  TIMES_MS_FILENAME = 'times_ms.json'
+  INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+  # line too long pylint: disable=C0301
+  URL_FOR_TEST_LIST_JSON = (
+      'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s')
+  # pylint: enable=C0301
+
+  def __init__(self, builder_name, build_name, build_number,
+               results_file_base_path, builder_base_url,
+               test_results_map, svn_repositories=None,
+               test_results_server=None,
+               test_type='',
+               master_name=''):
+    """Modifies the results.json file. Grabs it off the archive directory
+    if it is not found locally.
+
+    Args
+      builder_name: the builder name (e.g. Webkit).
+      build_name: the build name (e.g. webkit-rel).
+      build_number: the build number.
+      results_file_base_path: Absolute path to the directory containing the
+          results json file.
+      builder_base_url: the URL where we have the archived test results.
+          If this is None no archived results will be retrieved.
+      test_results_map: A dictionary that maps test_name to TestResult.
+      svn_repositories: A (json_field_name, svn_path) pair for SVN
+          repositories that tests rely on.  The SVN revision will be
+          included in the JSON with the given json_field_name.
+      test_results_server: server that hosts test results json.
+      test_type: test type string (e.g. 'layout-tests').
+      master_name: the name of the buildbot master.
+    """
+    self._builder_name = builder_name
+    self._build_name = build_name
+    self._build_number = build_number
+    self._builder_base_url = builder_base_url
+    self._results_directory = results_file_base_path
+
+    self._test_results_map = test_results_map
+    self._test_results = test_results_map.values()
+
+    self._svn_repositories = svn_repositories
+    if not self._svn_repositories:
+      self._svn_repositories = {}
+
+    self._test_results_server = test_results_server
+    self._test_type = test_type
+    self._master_name = master_name
+
+    self._archived_results = None
+
+  def GenerateJSONOutput(self):
+    json_object = self.GetJSON()
+    if json_object:
+      file_path = (
+          os.path.join(
+              self._results_directory,
+              self.INCREMENTAL_RESULTS_FILENAME))
+      WriteJSON(json_object, file_path)
+
+  def GenerateTimesMSFile(self):
+    times = TestTimingsTrie(self._test_results_map.values())
+    file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+    WriteJSON(times, file_path)
+
+  def GetJSON(self):
+    """Gets the results for the results.json file."""
+    results_json = {}
+
+    if not results_json:
+      results_json, error = self._GetArchivedJSONResults()
+      if error:
+        # If there was an error don't write a results.json
+        # file at all as it would lose all the information on the
+        # bot.
+        _log.error('Archive directory is inaccessible. Not '
+                   'modifying or clobbering the results.json '
+                   'file: ' + str(error))
+        return None
+
+    builder_name = self._builder_name
+    if results_json and builder_name not in results_json:
+      _log.debug('Builder name (%s) is not in the results.json file.'
+                 % builder_name)
+
+    self._ConvertJSONToCurrentVersion(results_json)
+
+    if builder_name not in results_json:
+      results_json[builder_name] = (
+          self._CreateResultsForBuilderJSON())
+
+    results_for_builder = results_json[builder_name]
+
+    if builder_name:
+      self._InsertGenericMetaData(results_for_builder)
+
+    self._InsertFailureSummaries(results_for_builder)
+
+    # Update the all failing tests with result type and time.
+    tests = results_for_builder[self.TESTS]
+    all_failing_tests = self._GetFailedTestNames()
+    all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+    for test in all_failing_tests:
+      self._InsertTestTimeAndResult(test, tests)
+
+    return results_json
+
+  def SetArchivedResults(self, archived_results):
+    self._archived_results = archived_results
+
+  def UploadJSONFiles(self, json_files):
+    """Uploads the given json_files to the test_results_server (if the
+    test_results_server is given)."""
+    if not self._test_results_server:
+      return
+
+    if not self._master_name:
+      _log.error(
+          '--test-results-server was set, but --master-name was not.  Not '
+          'uploading JSON files.')
+      return
+
+    _log.info('Uploading JSON files for builder: %s', self._builder_name)
+    attrs = [('builder', self._builder_name),
+             ('testtype', self._test_type),
+             ('master', self._master_name)]
+
+    files = [(json_file, os.path.join(self._results_directory, json_file))
+             for json_file in json_files]
+
+    url = 'http://%s/testfile/upload' % self._test_results_server
+    # Set uploading timeout in case appengine server is having problems.
+    # 120 seconds are more than enough to upload test results.
+    uploader = _FileUploader(url, 120)
+    try:
+      response = uploader.UploadAsMultipartFormData(files, attrs)
+      if response:
+        if response.code == 200:
+          _log.info('JSON uploaded.')
+        else:
+          _log.debug(
+              "JSON upload failed, %d: '%s'" %
+              (response.code, response.read()))
+      else:
+        _log.error('JSON upload failed; no response returned')
+    except Exception, err:
+      _log.error('Upload failed: %s' % err)
+      return
+
+  def _GetTestTiming(self, test_name):
+    """Returns test timing data (elapsed time) in second
+    for the given test_name."""
+    if test_name in self._test_results_map:
+      # Floor for now to get time in seconds.
+      return int(self._test_results_map[test_name].test_run_time)
+    return 0
+
+  def _GetFailedTestNames(self):
+    """Returns a set of failed test names."""
+    return set([r.test_name for r in self._test_results if r.failed])
+
+  def _GetModifierChar(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+      return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+    return self.__class__.PASS_RESULT
+
+  def _get_result_char(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier == TestResult.DISABLED:
+      return self.__class__.SKIP_RESULT
+
+    if test_result.failed:
+      return self.__class__.FAIL_RESULT
+
+    return self.__class__.PASS_RESULT
+
+  def _GetSVNRevision(self, in_directory):
+    """Returns the svn revision for the given directory.
+
+    Args:
+      in_directory: The directory where svn is to be run.
+    """
+    # This is overridden in flakiness_dashboard_results_uploader.py.
+    raise NotImplementedError()
+
+  def _GetArchivedJSONResults(self):
+    """Download JSON file that only contains test
+    name list from test-results server. This is for generating incremental
+    JSON so the file generated has info for tests that failed before but
+    pass or are skipped from current run.
+
+    Returns (archived_results, error) tuple where error is None if results
+    were successfully read.
+    """
+    results_json = {}
+    old_results = None
+    error = None
+
+    if not self._test_results_server:
+      return {}, None
+
+    results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+                        (urllib2.quote(self._test_results_server),
+                         urllib2.quote(self._builder_name),
+                         self.RESULTS_FILENAME,
+                         urllib2.quote(self._test_type),
+                         urllib2.quote(self._master_name)))
+
+    try:
+      # FIXME: We should talk to the network via a Host object.
+      results_file = urllib2.urlopen(results_file_url)
+      old_results = results_file.read()
+    except urllib2.HTTPError, http_error:
+      # A non-4xx status code means the bot is hosed for some reason
+      # and we can't grab the results.json file off of it.
+      if (http_error.code < 400 and http_error.code >= 500):
+        error = http_error
+    except urllib2.URLError, url_error:
+      error = url_error
+
+    if old_results:
+      # Strip the prefix and suffix so we can get the actual JSON object.
+      old_results = StripJSONWrapper(old_results)
+
+      try:
+        results_json = json.loads(old_results)
+      except Exception:
+        _log.debug('results.json was not valid JSON. Clobbering.')
+        # The JSON file is not valid JSON. Just clobber the results.
+        results_json = {}
+    else:
+      _log.debug('Old JSON results do not exist. Starting fresh.')
+      results_json = {}
+
+    return results_json, error
+
+  def _InsertFailureSummaries(self, results_for_builder):
+    """Inserts aggregate pass/failure statistics into the JSON.
+    This method reads self._test_results and generates
+    FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+    """
+    # Insert the number of tests that failed or skipped.
+    fixable_count = len([r for r in self._test_results if r.Fixable()])
+    self._InsertItemIntoRawList(results_for_builder,
+                                fixable_count, self.FIXABLE_COUNT)
+
+    # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+    entry = {}
+    for test_name in self._test_results_map.iterkeys():
+      result_char = self._GetModifierChar(test_name)
+      entry[result_char] = entry.get(result_char, 0) + 1
+
+    # Insert the pass/skip/failure summary dictionary.
+    self._InsertItemIntoRawList(results_for_builder, entry,
+                                self.FIXABLE)
+
+    # Insert the number of all the tests that are supposed to pass.
+    all_test_count = len(self._test_results)
+    self._InsertItemIntoRawList(results_for_builder,
+                                all_test_count, self.ALL_FIXABLE_COUNT)
+
+  def _InsertItemIntoRawList(self, results_for_builder, item, key):
+    """Inserts the item into the list with the given key in the results for
+    this builder. Creates the list if no such list exists.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+      item: Number or string to insert into the list.
+      key: Key in results_for_builder for the list to insert into.
+    """
+    if key in results_for_builder:
+      raw_list = results_for_builder[key]
+    else:
+      raw_list = []
+
+    raw_list.insert(0, item)
+    raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+    results_for_builder[key] = raw_list
+
+  def _InsertItemRunLengthEncoded(self, item, encoded_results):
+    """Inserts the item into the run-length encoded results.
+
+    Args:
+      item: String or number to insert.
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    if len(encoded_results) and item == encoded_results[0][1]:
+      num_results = encoded_results[0][0]
+      if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        encoded_results[0][0] = num_results + 1
+    else:
+      # Use a list instead of a class for the run-length encoding since
+      # we want the serialized form to be concise.
+      encoded_results.insert(0, [1, item])
+
+  def _InsertGenericMetaData(self, results_for_builder):
+    """ Inserts generic metadata (such as version number, current time etc)
+    into the JSON.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for
+          a single builder.
+    """
+    self._InsertItemIntoRawList(results_for_builder,
+                                self._build_number, self.BUILD_NUMBERS)
+
+    # Include SVN revisions for the given repositories.
+    for (name, path) in self._svn_repositories:
+      # Note: for JSON file's backward-compatibility we use 'chrome' rather
+      # than 'chromium' here.
+      lowercase_name = name.lower()
+      if lowercase_name == 'chromium':
+        lowercase_name = 'chrome'
+      self._InsertItemIntoRawList(results_for_builder,
+                                  self._GetSVNRevision(path),
+                                  lowercase_name + 'Revision')
+
+    self._InsertItemIntoRawList(results_for_builder,
+                                int(time.time()),
+                                self.TIME)
+
+  def _InsertTestTimeAndResult(self, test_name, tests):
+    """ Insert a test item with its results to the given tests dictionary.
+
+    Args:
+      tests: Dictionary containing test result entries.
+    """
+
+    result = self._get_result_char(test_name)
+    test_time = self._GetTestTiming(test_name)
+
+    this_test = tests
+    for segment in test_name.split('/'):
+      if segment not in this_test:
+        this_test[segment] = {}
+      this_test = this_test[segment]
+
+    if not len(this_test):
+      self._PopulateResutlsAndTimesJSON(this_test)
+
+    if self.RESULTS in this_test:
+      self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+    else:
+      this_test[self.RESULTS] = [[1, result]]
+
+    if self.TIMES in this_test:
+      self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+    else:
+      this_test[self.TIMES] = [[1, test_time]]
+
+  def _ConvertJSONToCurrentVersion(self, results_json):
+    """If the JSON does not match the current version, converts it to the
+    current version and adds in the new version number.
+    """
+    if self.VERSION_KEY in results_json:
+      archive_version = results_json[self.VERSION_KEY]
+      if archive_version == self.VERSION:
+        return
+    else:
+      archive_version = 3
+
+    # version 3->4
+    if archive_version == 3:
+      for results in results_json.values():
+        self._ConvertTestsToTrie(results)
+
+    results_json[self.VERSION_KEY] = self.VERSION
+
+  def _ConvertTestsToTrie(self, results):
+    if not self.TESTS in results:
+      return
+
+    test_results = results[self.TESTS]
+    test_results_trie = {}
+    for test in test_results.iterkeys():
+      single_test_result = test_results[test]
+      AddPathToTrie(test, single_test_result, test_results_trie)
+
+    results[self.TESTS] = test_results_trie
+
+  def _PopulateResutlsAndTimesJSON(self, results_and_times):
+    results_and_times[self.RESULTS] = []
+    results_and_times[self.TIMES] = []
+    return results_and_times
+
+  def _CreateResultsForBuilderJSON(self):
+    results_for_builder = {}
+    results_for_builder[self.TESTS] = {}
+    return results_for_builder
+
+  def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+    """Removes items from the run-length encoded list after the final
+    item that exceeds the max number of builds to track.
+
+    Args:
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    num_builds = 0
+    index = 0
+    for result in encoded_list:
+      num_builds = num_builds + result[0]
+      index = index + 1
+      if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        return encoded_list[:index]
+    return encoded_list
+
+  def _NormalizeResultsJSON(self, test, test_name, tests):
+    """ Prune tests where all runs pass or tests that no longer exist and
+    truncate all results to maxNumberOfBuilds.
+
+    Args:
+      test: ResultsAndTimes object for this test.
+      test_name: Name of the test.
+      tests: The JSON object with all the test results for this builder.
+    """
+    test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.RESULTS])
+    test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.TIMES])
+
+    is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+                                           self.PASS_RESULT)
+    is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+                                              self.NO_DATA_RESULT)
+    max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+    # Remove all passes/no-data from the results to reduce noise and
+    # filesize. If a test passes every run, but takes > MIN_TIME to run,
+    # don't throw away the data.
+    if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+      del tests[test_name]
+
+  # method could be a function pylint: disable=R0201
+  def _IsResultsAllOfType(self, results, result_type):
+    """Returns whether all the results are of the given type
+    (e.g. all passes)."""
+    return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+  def __init__(self, url, timeout_seconds):
+    self._url = url
+    self._timeout_seconds = timeout_seconds
+
+  def UploadAsMultipartFormData(self, files, attrs):
+    file_objs = []
+    for filename, path in files:
+      with file(path, 'rb') as fp:
+        file_objs.append(('file', filename, fp.read()))
+
+    # FIXME: We should use the same variable names for the formal and actual
+    # parameters.
+    content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+    return self._UploadData(content_type, data)
+
+  def _UploadData(self, content_type, data):
+    start = time.time()
+    end = start + self._timeout_seconds
+    while time.time() < end:
+      try:
+        request = urllib2.Request(self._url, data,
+                                  {'Content-Type': content_type})
+        return urllib2.urlopen(request)
+      except urllib2.HTTPError as e:
+        _log.warn("Received HTTP status %s loading \"%s\".  "
+                  'Retrying in 10 seconds...' % (e.code, e.filename))
+        time.sleep(10)
+
+
+def _GetMIMEType(filename):
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+
+  for key, value in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  for key, filename, value in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; '
+                 'filename="%s"' % (key, filename))
+    lines.append('Content-Type: %s' % _GetMIMEType(filename))
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
diff --git a/build/android/pylib/utils/json_results_generator_unittest.py b/build/android/pylib/utils/json_results_generator_unittest.py
new file mode 100644
index 0000000..41ab77b
--- /dev/null
+++ b/build/android/pylib/utils/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.utils import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+  def setUp(self):
+    self.builder_name = 'DUMMY_BUILDER_NAME'
+    self.build_name = 'DUMMY_BUILD_NAME'
+    self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+    # For archived results.
+    self._json = None
+    self._num_runs = 0
+    self._tests_set = set([])
+    self._test_timings = {}
+    self._failed_count_map = {}
+
+    self._PASS_count = 0
+    self._DISABLED_count = 0
+    self._FLAKY_count = 0
+    self._FAILS_count = 0
+    self._fixable_count = 0
+
+    self._orig_write_json = json_results_generator.WriteJSON
+
+    # unused arguments ... pylint: disable=W0613
+    def _WriteJSONStub(json_object, file_path, callback=None):
+      pass
+
+    json_results_generator.WriteJSON = _WriteJSONStub
+
+  def tearDown(self):
+    json_results_generator.WriteJSON = self._orig_write_json
+
+  def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+    tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+    DISABLED_tests = set([t for t in tests_set
+                          if t.startswith('DISABLED_')])
+    FLAKY_tests = set([t for t in tests_set
+                       if t.startswith('FLAKY_')])
+    FAILS_tests = set([t for t in tests_set
+                       if t.startswith('FAILS_')])
+    PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+    failed_tests = set(failed_tests_list) - DISABLED_tests
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    test_timings = {}
+    i = 0
+    for test in tests_set:
+      test_timings[test] = float(self._num_runs * 100 + i)
+      i += 1
+
+    test_results_map = dict()
+    for test in tests_set:
+      test_results_map[test] = json_results_generator.TestResult(
+          test, failed=(test in failed_tests),
+          elapsed_time=test_timings[test])
+
+    generator = json_results_generator.JSONResultsGeneratorBase(
+        self.builder_name, self.build_name, self.build_number,
+        '',
+        None,   # don't fetch past json results archive
+        test_results_map)
+
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    # Test incremental json results
+    incremental_json = generator.GetJSON()
+    self._VerifyJSONResults(
+        tests_set,
+        test_timings,
+        failed_count_map,
+        len(PASS_tests),
+        len(DISABLED_tests),
+        len(FLAKY_tests),
+        len(DISABLED_tests | failed_tests),
+        incremental_json,
+        1)
+
+    # We don't verify the results here, but at least we make sure the code
+    # runs without errors.
+    generator.GenerateJSONOutput()
+    generator.GenerateTimesMSFile()
+
+  def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+                         PASS_count, DISABLED_count, FLAKY_count,
+                         fixable_count, json_obj, num_runs):
+    # Aliasing to a short name for better access to its constants.
+    JRG = json_results_generator.JSONResultsGeneratorBase
+
+    self.assertIn(JRG.VERSION_KEY, json_obj)
+    self.assertIn(self.builder_name, json_obj)
+
+    buildinfo = json_obj[self.builder_name]
+    self.assertIn(JRG.FIXABLE, buildinfo)
+    self.assertIn(JRG.TESTS, buildinfo)
+    self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+    self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+    if tests_set or DISABLED_count:
+      fixable = {}
+      for fixable_items in buildinfo[JRG.FIXABLE]:
+        for (result_type, count) in fixable_items.iteritems():
+          if result_type in fixable:
+            fixable[result_type] = fixable[result_type] + count
+          else:
+            fixable[result_type] = count
+
+      if PASS_count:
+        self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+      else:
+        self.assertTrue(JRG.PASS_RESULT not in fixable or
+                        fixable[JRG.PASS_RESULT] == 0)
+      if DISABLED_count:
+        self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+      else:
+        self.assertTrue(JRG.SKIP_RESULT not in fixable or
+                        fixable[JRG.SKIP_RESULT] == 0)
+      if FLAKY_count:
+        self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+      else:
+        self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+                        fixable[JRG.FLAKY_RESULT] == 0)
+
+    if failed_count_map:
+      tests = buildinfo[JRG.TESTS]
+      for test_name in failed_count_map.iterkeys():
+        test = self._FindTestInTrie(test_name, tests)
+
+        failed = 0
+        for result in test[JRG.RESULTS]:
+          if result[1] == JRG.FAIL_RESULT:
+            failed += result[0]
+        self.assertEqual(failed_count_map[test_name], failed)
+
+        timing_count = 0
+        for timings in test[JRG.TIMES]:
+          if timings[1] == test_timings[test_name]:
+            timing_count = timings[0]
+        self.assertEqual(1, timing_count)
+
+    if fixable_count:
+      self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+  def _FindTestInTrie(self, path, trie):
+    nodes = path.split('/')
+    sub_trie = trie
+    for node in nodes:
+      self.assertIn(node, sub_trie)
+      sub_trie = sub_trie[node]
+    return sub_trie
+
+  def testJSONGeneration(self):
+    self._TestJSONGeneration([], [])
+    self._TestJSONGeneration(['A1', 'B1'], [])
+    self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+    self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+    self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+    self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+    self._TestJSONGeneration(
+        ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+        ['FAILS_D6'])
+
+    # Generate JSON with the same test sets. (Both incremental results and
+    # archived results must be updated appropriately.)
+    self._TestJSONGeneration(
+        ['A', 'FLAKY_B', 'DISABLED_C'],
+        ['FAILS_D', 'FLAKY_E'])
+    self._TestJSONGeneration(
+        ['A', 'DISABLED_C', 'FLAKY_E'],
+        ['FLAKY_B', 'FAILS_D'])
+    self._TestJSONGeneration(
+        ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+        ['A', 'FLAKY_E'])
+
+  def testHierarchicalJSNGeneration(self):
+    # FIXME: Re-work tests to be more comprehensible and comprehensive.
+    self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+  def testTestTimingsTrie(self):
+    individual_test_timings = []
+    individual_test_timings.append(
+        json_results_generator.TestResult(
+            'foo/bar/baz.html',
+            elapsed_time=1.2))
+    individual_test_timings.append(
+        json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+    trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+    expected_trie = {
+        'bar.html': 0,
+        'foo': {
+            'bar': {
+                'baz.html': 1200,
+            }
+        }
+    }
+
+    self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/build/android/pylib/utils/parallelizer.py b/build/android/pylib/utils/parallelizer.py
new file mode 100644
index 0000000..9323c21
--- /dev/null
+++ b/build/android/pylib/utils/parallelizer.py
@@ -0,0 +1,242 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Wrapper that allows method execution in parallel.
+
+This class wraps a list of objects of the same type, emulates their
+interface, and executes any functions called on the objects in parallel
+in ReraiserThreads.
+
+This means that, given a list of objects:
+
+  class Foo:
+    def __init__(self):
+      self.baz = Baz()
+
+    def bar(self, my_param):
+      // do something
+
+  list_of_foos = [Foo(1), Foo(2), Foo(3)]
+
+we can take a sequential operation on that list of objects:
+
+  for f in list_of_foos:
+    f.bar('Hello')
+
+and run it in parallel across all of the objects:
+
+  Parallelizer(list_of_foos).bar('Hello')
+
+It can also handle (non-method) attributes of objects, so that this:
+
+  for f in list_of_foos:
+    f.baz.myBazMethod()
+
+can be run in parallel with:
+
+  Parallelizer(list_of_foos).baz.myBazMethod()
+
+Because it emulates the interface of the wrapped objects, a Parallelizer
+can be passed to a method or function that takes objects of that type:
+
+  def DoesSomethingWithFoo(the_foo):
+    the_foo.bar('Hello')
+    the_foo.bar('world')
+    the_foo.baz.myBazMethod
+
+  DoesSomethingWithFoo(Parallelizer(list_of_foos))
+
+Note that this class spins up a thread for each object. Using this class
+to parallelize operations that are already fast will incur a net performance
+penalty.
+
+"""
+# pylint: disable=W0613
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+
+class Parallelizer(object):
+  """Allows parallel execution of method calls across a group of objects."""
+
+  def __init__(self, objs):
+    assert (objs is not None and len(objs) > 0), (
+        "Passed empty list to 'Parallelizer'")
+    self._orig_objs = objs
+    self._objs = objs
+
+  def __getattr__(self, name):
+    """Emulate getting the |name| attribute of |self|.
+
+    Args:
+      name: The name of the attribute to retrieve.
+    Returns:
+      A Parallelizer emulating the |name| attribute of |self|.
+    """
+    self.pGet(None)
+
+    r = type(self)(self._orig_objs)
+    r._objs = [getattr(o, name) for o in self._objs]
+    return r
+
+  def __getitem__(self, index):
+    """Emulate getting the value of |self| at |index|.
+
+    Returns:
+      A Parallelizer emulating the value of |self| at |index|.
+    """
+    self.pGet(None)
+
+    r = type(self)(self._orig_objs)
+    r._objs = [o[index] for o in self._objs]
+    return r
+
+  def __call__(self, *args, **kwargs):
+    """Emulate calling |self| with |args| and |kwargs|.
+
+    Note that this call is asynchronous. Call pFinish on the return value to
+    block until the call finishes.
+
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the call in
+      parallel.
+    Raises:
+      AttributeError if the wrapped objects aren't callable.
+    """
+    self.pGet(None)
+
+    if not self._objs:
+      raise AttributeError('Nothing to call.')
+    for o in self._objs:
+      if not callable(o):
+        raise AttributeError("'%s' is not callable" % o.__name__)
+
+    r = type(self)(self._orig_objs)
+    r._objs = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(
+            o, args=args, kwargs=kwargs,
+            name='%s.%s' % (str(d), o.__name__))
+         for d, o in zip(self._orig_objs, self._objs)])
+    r._objs.StartAll() # pylint: disable=W0212
+    return r
+
+  def pFinish(self, timeout):
+    """Finish any outstanding asynchronous operations.
+
+    Args:
+      timeout: The maximum number of seconds to wait for an individual
+               result to return, or None to wait forever.
+    Returns:
+      self, now emulating the return values.
+    """
+    self._assertNoShadow('pFinish')
+    if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
+      self._objs.JoinAll()
+      self._objs = self._objs.GetAllReturnValues(
+          watchdog_timer.WatchdogTimer(timeout))
+    return self
+
+  def pGet(self, timeout):
+    """Get the current wrapped objects.
+
+    Args:
+      timeout: Same as |pFinish|.
+    Returns:
+      A list of the results, in order of the provided devices.
+    Raises:
+      Any exception raised by any of the called functions.
+    """
+    self._assertNoShadow('pGet')
+    self.pFinish(timeout)
+    return self._objs
+
+  def pMap(self, f, *args, **kwargs):
+    """Map a function across the current wrapped objects in parallel.
+
+    This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+    Note that this call is asynchronous. Call pFinish on the return value to
+    block until the call finishes.
+
+    Args:
+      f: The function to call.
+      args: The positional args to pass to f.
+      kwargs: The keyword args to pass to f.
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the map in
+      parallel.
+    """
+    self._assertNoShadow('pMap')
+    r = type(self)(self._orig_objs)
+    r._objs = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(
+            f, args=tuple([o] + list(args)), kwargs=kwargs,
+            name='%s(%s)' % (f.__name__, d))
+         for d, o in zip(self._orig_objs, self._objs)])
+    r._objs.StartAll() # pylint: disable=W0212
+    return r
+
+  def _assertNoShadow(self, attr_name):
+    """Ensures that |attr_name| isn't shadowing part of the wrapped obejcts.
+
+    If the wrapped objects _do_ have an |attr_name| attribute, it will be
+    inaccessible to clients.
+
+    Args:
+      attr_name: The attribute to check.
+    Raises:
+      AssertionError if the wrapped objects have an attribute named 'attr_name'
+      or '_assertNoShadow'.
+    """
+    if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
+      assert(not hasattr(self._objs, '_assertNoShadow'))
+      assert(not hasattr(self._objs, 'pGet'))
+    else:
+      assert(not any(hasattr(o, '_assertNoShadow') for o in self._objs))
+      assert(not any(hasattr(o, 'pGet') for o in self._objs))
+
+
+class SyncParallelizer(Parallelizer):
+  """A Parallelizer that blocks on function calls."""
+
+  #override
+  def __call__(self, *args, **kwargs):
+    """Emulate calling |self| with |args| and |kwargs|.
+
+    Note that this call is synchronous.
+
+    Returns:
+      A Parallelizer emulating the value returned from calling |self| with
+      |args| and |kwargs|.
+    Raises:
+      AttributeError if the wrapped objects aren't callable.
+    """
+    r = super(SyncParallelizer, self).__call__(*args, **kwargs)
+    r.pFinish(None)
+    return r
+
+  #override
+  def pMap(self, f, *args, **kwargs):
+    """Map a function across the current wrapped objects in parallel.
+
+    This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+    Note that this call is synchronous.
+
+    Args:
+      f: The function to call.
+      args: The positional args to pass to f.
+      kwargs: The keyword args to pass to f.
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the map in
+      parallel.
+    """
+    r = super(SyncParallelizer, self).pMap(f, *args, **kwargs)
+    r.pFinish(None)
+    return r
+
diff --git a/build/android/pylib/utils/parallelizer_test.py b/build/android/pylib/utils/parallelizer_test.py
new file mode 100644
index 0000000..6e0c7e7
--- /dev/null
+++ b/build/android/pylib/utils/parallelizer_test.py
@@ -0,0 +1,166 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the contents of parallelizer.py."""
+
+# pylint: disable=W0212
+# pylint: disable=W0613
+
+import os
+import tempfile
+import time
+import unittest
+
+from pylib.utils import parallelizer
+
+
+class ParallelizerTestObject(object):
+  """Class used to test parallelizer.Parallelizer."""
+
+  parallel = parallelizer.Parallelizer
+
+  def __init__(self, thing, completion_file_name=None):
+    self._thing = thing
+    self._completion_file_name = completion_file_name
+    self.helper = ParallelizerTestObjectHelper(thing)
+
+  @staticmethod
+  def doReturn(what):
+    return what
+
+  @classmethod
+  def doRaise(cls, what):
+    raise what
+
+  def doSetTheThing(self, new_thing):
+    self._thing = new_thing
+
+  def doReturnTheThing(self):
+    return self._thing
+
+  def doRaiseTheThing(self):
+    raise self._thing
+
+  def doRaiseIfExceptionElseSleepFor(self, sleep_duration):
+    if isinstance(self._thing, Exception):
+      raise self._thing
+    time.sleep(sleep_duration)
+    self._write_completion_file()
+    return self._thing
+
+  def _write_completion_file(self):
+    if self._completion_file_name and len(self._completion_file_name):
+      with open(self._completion_file_name, 'w+b') as completion_file:
+        completion_file.write('complete')
+
+  def __getitem__(self, index):
+    return self._thing[index]
+
+  def __str__(self):
+    return type(self).__name__
+
+
+class ParallelizerTestObjectHelper(object):
+
+  def __init__(self, thing):
+    self._thing = thing
+
+  def doReturnStringThing(self):
+    return str(self._thing)
+
+
+class ParallelizerTest(unittest.TestCase):
+
+  def testInitWithNone(self):
+    with self.assertRaises(AssertionError):
+      parallelizer.Parallelizer(None)
+
+  def testInitEmptyList(self):
+    with self.assertRaises(AssertionError):
+      parallelizer.Parallelizer([])
+
+  def testMethodCall(self):
+    test_data = ['abc_foo', 'def_foo', 'ghi_foo']
+    expected = ['abc_bar', 'def_bar', 'ghi_bar']
+    r = parallelizer.Parallelizer(test_data).replace('_foo', '_bar').pGet(0.1)
+    self.assertEquals(expected, r)
+
+  def testMutate(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    self.assertTrue(all(d.doReturnTheThing() for d in devices))
+    ParallelizerTestObject.parallel(devices).doSetTheThing(False).pFinish(1)
+    self.assertTrue(not any(d.doReturnTheThing() for d in devices))
+
+  def testAllReturn(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    results = ParallelizerTestObject.parallel(
+        devices).doReturnTheThing().pGet(1)
+    self.assertTrue(isinstance(results, list))
+    self.assertEquals(10, len(results))
+    self.assertTrue(all(results))
+
+  def testAllRaise(self):
+    devices = [ParallelizerTestObject(Exception('thing %d' % i))
+               for i in xrange(0, 10)]
+    p = ParallelizerTestObject.parallel(devices).doRaiseTheThing()
+    with self.assertRaises(Exception):
+      p.pGet(1)
+
+  def testOneFailOthersComplete(self):
+    parallel_device_count = 10
+    exception_index = 7
+    exception_msg = 'thing %d' % exception_index
+
+    try:
+      completion_files = [tempfile.NamedTemporaryFile(delete=False)
+                          for _ in xrange(0, parallel_device_count)]
+      devices = [
+          ParallelizerTestObject(
+              i if i != exception_index else Exception(exception_msg),
+              completion_files[i].name)
+          for i in xrange(0, parallel_device_count)]
+      for f in completion_files:
+        f.close()
+      p = ParallelizerTestObject.parallel(devices)
+      with self.assertRaises(Exception) as e:
+        p.doRaiseIfExceptionElseSleepFor(2).pGet(3)
+      self.assertTrue(exception_msg in str(e.exception))
+      for i in xrange(0, parallel_device_count):
+        with open(completion_files[i].name) as f:
+          if i == exception_index:
+            self.assertEquals('', f.read())
+          else:
+            self.assertEquals('complete', f.read())
+    finally:
+      for f in completion_files:
+        os.remove(f.name)
+
+  def testReusable(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    p = ParallelizerTestObject.parallel(devices)
+    results = p.doReturn(True).pGet(1)
+    self.assertTrue(all(results))
+    results = p.doReturn(True).pGet(1)
+    self.assertTrue(all(results))
+    with self.assertRaises(Exception):
+      results = p.doRaise(Exception('reusableTest')).pGet(1)
+
+  def testContained(self):
+    devices = [ParallelizerTestObject(i) for i in xrange(0, 10)]
+    results = (ParallelizerTestObject.parallel(devices).helper
+        .doReturnStringThing().pGet(1))
+    self.assertTrue(isinstance(results, list))
+    self.assertEquals(10, len(results))
+    for i in xrange(0, 10):
+      self.assertEquals(str(i), results[i])
+
+  def testGetItem(self):
+    devices = [ParallelizerTestObject(range(i, i+10)) for i in xrange(0, 10)]
+    results = ParallelizerTestObject.parallel(devices)[9].pGet(1)
+    self.assertEquals(range(9, 19), results)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/repo_utils.py b/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000..e0c7d2c
--- /dev/null
+++ b/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+  """Returns the git hash tag for the given directory.
+
+  Args:
+    in_directory: The directory where git is to be run.
+  """
+  command_line = ['git', 'log', '-1', '--pretty=format:%H']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output[0:40]
diff --git a/build/android/pylib/utils/report_results.py b/build/android/pylib/utils/report_results.py
new file mode 100644
index 0000000..b13b9bc
--- /dev/null
+++ b/build/android/pylib/utils/report_results.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.utils import flakiness_dashboard_results_uploader
+
+
+def _LogToFile(results, test_type, suite_name):
+  """Log results to local files which can be used for aggregation later."""
+  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+  if not os.path.exists(log_file_path):
+    os.mkdir(log_file_path)
+  full_file_name = os.path.join(
+      log_file_path, re.sub('\W', '_', test_type).lower() + '.log')
+  if not os.path.exists(full_file_name):
+    with open(full_file_name, 'w') as log_file:
+      print >> log_file, '\n%s results for %s build %s:' % (
+          test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+          os.environ.get('BUILDBOT_BUILDNUMBER'))
+    logging.info('Writing results to %s.' % full_file_name)
+
+  logging.info('Writing results to %s.' % full_file_name)
+  with open(full_file_name, 'a') as log_file:
+    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+    print >> log_file, '%s%s' % (shortened_suite_name.ljust(30),
+                                 results.GetShortForm())
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+                             flakiness_server):
+  """Upload results to the flakiness dashboard"""
+  logging.info('Upload results for test type "%s", test package "%s" to %s' %
+               (test_type, test_package, flakiness_server))
+
+  # TODO(frankf): Enable uploading for gtests.
+  if test_type != 'Instrumentation':
+    logging.warning('Invalid test type.')
+    return
+
+  try:
+    if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+      assert test_package in ['ContentShellTest',
+                                'ChromeShellTest',
+                              'AndroidWebViewTest']
+      dashboard_test_type = ('%s_instrumentation_tests' %
+                             test_package.lower().rstrip('test'))
+    # Downstream server.
+    else:
+      dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+    flakiness_dashboard_results_uploader.Upload(
+        results, flakiness_server, dashboard_test_type)
+  except Exception as e:
+    logging.error(e)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+            flakiness_server=None):
+  """Log the tests results for the test suite.
+
+  The results will be logged three different ways:
+    1. Log to stdout.
+    2. Log to local files for aggregating multiple test steps
+       (on buildbots only).
+    3. Log to flakiness dashboard (on buildbots only).
+
+  Args:
+    results: An instance of TestRunResults object.
+    test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+    test_package: Test package name (e.g. 'ipc_tests' for gtests,
+                  'ContentShellTest' for instrumentation tests)
+    annotation: If instrumenation test type, this is a list of annotations
+                (e.g. ['Smoke', 'SmallTest']).
+    flakiness_server: If provider, upload the results to flakiness dashboard
+                      with this URL.
+    """
+  if not results.DidRunPass():
+    logging.critical('*' * 80)
+    logging.critical('Detailed Logs')
+    logging.critical('*' * 80)
+    for line in results.GetLogs().splitlines():
+      logging.critical(line)
+  logging.critical('*' * 80)
+  logging.critical('Summary')
+  logging.critical('*' * 80)
+  for line in results.GetGtestForm().splitlines():
+    logging.critical(line)
+  logging.critical('*' * 80)
+
+  if os.environ.get('BUILDBOT_BUILDERNAME'):
+    # It is possible to have multiple buildbot steps for the same
+    # instrumenation test package using different annotations.
+    if annotation and len(annotation) == 1:
+      suite_name = annotation[0]
+    else:
+      suite_name = test_package
+    _LogToFile(results, test_type, suite_name)
+
+    if flakiness_server:
+      _LogToFlakinessDashboard(results, test_type, test_package,
+                               flakiness_server)
diff --git a/build/android/pylib/utils/reraiser_thread.py b/build/android/pylib/utils/reraiser_thread.py
new file mode 100644
index 0000000..2964bef
--- /dev/null
+++ b/build/android/pylib/utils/reraiser_thread.py
@@ -0,0 +1,156 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Thread and ThreadGroup that reraise exceptions on the main thread."""
+# pylint: disable=W0212
+
+import logging
+import sys
+import threading
+import traceback
+
+from pylib.utils import watchdog_timer
+
+
+class TimeoutError(Exception):
+  """Module-specific timeout exception."""
+  pass
+
+
+def LogThreadStack(thread):
+  """Log the stack for the given thread.
+
+  Args:
+    thread: a threading.Thread instance.
+  """
+  stack = sys._current_frames()[thread.ident]
+  logging.critical('*' * 80)
+  logging.critical('Stack dump for thread \'%s\'', thread.name)
+  logging.critical('*' * 80)
+  for filename, lineno, name, line in traceback.extract_stack(stack):
+    logging.critical('File: "%s", line %d, in %s', filename, lineno, name)
+    if line:
+      logging.critical('  %s', line.strip())
+  logging.critical('*' * 80)
+
+
+class ReraiserThread(threading.Thread):
+  """Thread class that can reraise exceptions."""
+
+  def __init__(self, func, args=None, kwargs=None, name=None):
+    """Initialize thread.
+
+    Args:
+      func: callable to call on a new thread.
+      args: list of positional arguments for callable, defaults to empty.
+      kwargs: dictionary of keyword arguments for callable, defaults to empty.
+      name: thread name, defaults to Thread-N.
+    """
+    super(ReraiserThread, self).__init__(name=name)
+    if not args:
+      args = []
+    if not kwargs:
+      kwargs = {}
+    self.daemon = True
+    self._func = func
+    self._args = args
+    self._kwargs = kwargs
+    self._ret = None
+    self._exc_info = None
+
+  def ReraiseIfException(self):
+    """Reraise exception if an exception was raised in the thread."""
+    if self._exc_info:
+      raise self._exc_info[0], self._exc_info[1], self._exc_info[2]
+
+  def GetReturnValue(self):
+    """Reraise exception if present, otherwise get the return value."""
+    self.ReraiseIfException()
+    return self._ret
+
+  #override
+  def run(self):
+    """Overrides Thread.run() to add support for reraising exceptions."""
+    try:
+      self._ret = self._func(*self._args, **self._kwargs)
+    except: # pylint: disable=W0702
+      self._exc_info = sys.exc_info()
+
+
+class ReraiserThreadGroup(object):
+  """A group of ReraiserThread objects."""
+
+  def __init__(self, threads=None):
+    """Initialize thread group.
+
+    Args:
+      threads: a list of ReraiserThread objects; defaults to empty.
+    """
+    if not threads:
+      threads = []
+    self._threads = threads
+
+  def Add(self, thread):
+    """Add a thread to the group.
+
+    Args:
+      thread: a ReraiserThread object.
+    """
+    self._threads.append(thread)
+
+  def StartAll(self):
+    """Start all threads."""
+    for thread in self._threads:
+      thread.start()
+
+  def _JoinAll(self, watcher=watchdog_timer.WatchdogTimer(None)):
+    """Join all threads without stack dumps.
+
+    Reraises exceptions raised by the child threads and supports breaking
+    immediately on exceptions raised on the main thread.
+
+    Args:
+      watcher: Watchdog object providing timeout, by default waits forever.
+    """
+    alive_threads = self._threads[:]
+    while alive_threads:
+      for thread in alive_threads[:]:
+        if watcher.IsTimedOut():
+          raise TimeoutError('Timed out waiting for %d of %d threads.' %
+                             (len(alive_threads), len(self._threads)))
+        # Allow the main thread to periodically check for interrupts.
+        thread.join(0.1)
+        if not thread.isAlive():
+          alive_threads.remove(thread)
+    # All threads are allowed to complete before reraising exceptions.
+    for thread in self._threads:
+      thread.ReraiseIfException()
+
+  def JoinAll(self, watcher=watchdog_timer.WatchdogTimer(None)):
+    """Join all threads.
+
+    Reraises exceptions raised by the child threads and supports breaking
+    immediately on exceptions raised on the main thread. Unfinished threads'
+    stacks will be logged on watchdog timeout.
+
+    Args:
+      watcher: Watchdog object providing timeout, by default waits forever.
+    """
+    try:
+      self._JoinAll(watcher)
+    except TimeoutError:
+      for thread in (t for t in self._threads if t.isAlive()):
+        LogThreadStack(thread)
+      raise
+
+  def GetAllReturnValues(self, watcher=watchdog_timer.WatchdogTimer(None)):
+    """Get all return values, joining all threads if necessary.
+
+    Args:
+      watcher: same as in |JoinAll|. Only used if threads are alive.
+    """
+    if any([t.isAlive() for t in self._threads]):
+      self.JoinAll(watcher)
+    return [t.GetReturnValue() for t in self._threads]
+
diff --git a/build/android/pylib/utils/reraiser_thread_unittest.py b/build/android/pylib/utils/reraiser_thread_unittest.py
new file mode 100644
index 0000000..2392d0e
--- /dev/null
+++ b/build/android/pylib/utils/reraiser_thread_unittest.py
@@ -0,0 +1,96 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for reraiser_thread.py."""
+
+import threading
+import unittest
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+class TestException(Exception):
+  pass
+
+
+class TestReraiserThread(unittest.TestCase):
+  """Tests for reraiser_thread.ReraiserThread."""
+  def testNominal(self):
+    result = [None, None]
+
+    def f(a, b=None):
+      result[0] = a
+      result[1] = b
+
+    thread = reraiser_thread.ReraiserThread(f, [1], {'b': 2})
+    thread.start()
+    thread.join()
+    self.assertEqual(result[0], 1)
+    self.assertEqual(result[1], 2)
+
+  def testRaise(self):
+    def f():
+      raise TestException
+
+    thread = reraiser_thread.ReraiserThread(f)
+    thread.start()
+    thread.join()
+    with self.assertRaises(TestException):
+      thread.ReraiseIfException()
+
+
+class TestReraiserThreadGroup(unittest.TestCase):
+  """Tests for reraiser_thread.ReraiserThreadGroup."""
+  def testInit(self):
+    ran = [False] * 5
+    def f(i):
+      ran[i] = True
+
+    group = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(f, args=[i]) for i in range(5)])
+    group.StartAll()
+    group.JoinAll()
+    for v in ran:
+      self.assertTrue(v)
+
+  def testAdd(self):
+    ran = [False] * 5
+    def f(i):
+      ran[i] = True
+
+    group = reraiser_thread.ReraiserThreadGroup()
+    for i in xrange(5):
+      group.Add(reraiser_thread.ReraiserThread(f, args=[i]))
+    group.StartAll()
+    group.JoinAll()
+    for v in ran:
+      self.assertTrue(v)
+
+  def testJoinRaise(self):
+    def f():
+      raise TestException
+    group = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(f) for _ in xrange(5)])
+    group.StartAll()
+    with self.assertRaises(TestException):
+      group.JoinAll()
+
+  def testJoinTimeout(self):
+    def f():
+      pass
+    event = threading.Event()
+    def g():
+      event.wait()
+    group = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(g),
+         reraiser_thread.ReraiserThread(f)])
+    group.StartAll()
+    with self.assertRaises(reraiser_thread.TimeoutError):
+      group.JoinAll(watchdog_timer.WatchdogTimer(0.01))
+    event.set()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/utils/run_tests_helper.py b/build/android/pylib/utils/run_tests_helper.py
new file mode 100644
index 0000000..43f654d
--- /dev/null
+++ b/build/android/pylib/utils/run_tests_helper.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions common to native, java and host-driven test runners."""
+
+import logging
+import sys
+import time
+
+
+class CustomFormatter(logging.Formatter):
+  """Custom log formatter."""
+
+  #override
+  def __init__(self, fmt='%(threadName)-4s  %(message)s'):
+    # Can't use super() because in older Python versions logging.Formatter does
+    # not inherit from object.
+    logging.Formatter.__init__(self, fmt=fmt)
+    self._creation_time = time.time()
+
+  #override
+  def format(self, record):
+    # Can't use super() because in older Python versions logging.Formatter does
+    # not inherit from object.
+    msg = logging.Formatter.format(self, record)
+    if 'MainThread' in msg[:19]:
+      msg = msg.replace('MainThread', 'Main', 1)
+    timediff = time.time() - self._creation_time
+    return '%s %8.3fs %s' % (record.levelname[0], timediff, msg)
+
+
+def SetLogLevel(verbose_count):
+  """Sets log level as |verbose_count|."""
+  log_level = logging.WARNING  # Default.
+  if verbose_count == 1:
+    log_level = logging.INFO
+  elif verbose_count >= 2:
+    log_level = logging.DEBUG
+  logger = logging.getLogger()
+  logger.setLevel(log_level)
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
diff --git a/build/android/pylib/utils/test_environment.py b/build/android/pylib/utils/test_environment.py
new file mode 100644
index 0000000..4d88a45
--- /dev/null
+++ b/build/android/pylib/utils/test_environment.py
@@ -0,0 +1,46 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import psutil
+import signal
+
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+
+def _KillWebServers():
+  for s in [signal.SIGTERM, signal.SIGINT, signal.SIGQUIT, signal.SIGKILL]:
+    signalled = []
+    for server in ['lighttpd', 'webpagereplay']:
+      for p in psutil.process_iter():
+        try:
+          if not server in ' '.join(p.cmdline):
+            continue
+          logging.info('Killing %s %s %s', s, server, p.pid)
+          p.send_signal(s)
+          signalled.append(p)
+        except Exception as e:
+          logging.warning('Failed killing %s %s %s', server, p.pid, e)
+    for p in signalled:
+      try:
+        p.wait(1)
+      except Exception as e:
+        logging.warning('Failed waiting for %s to die. %s', p.pid, e)
+
+
+def CleanupLeftoverProcesses():
+  """Clean up the test environment, restarting fresh adb and HTTP daemons."""
+  _KillWebServers()
+  device_utils.RestartServer()
+  p = device_utils.DeviceUtils.parallel()
+  p.old_interface.RestartAdbdOnDevice()
+  try:
+    p.EnableRoot()
+  except device_errors.CommandFailedError as e:
+    # TODO(jbudorick) Handle this exception appropriately after interface
+    #                 conversions are finished.
+    logging.error(str(e))
+  p.WaitUntilFullyBooted()
+
diff --git a/build/android/pylib/utils/time_profile.py b/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000..45da7ff
--- /dev/null
+++ b/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description):
+    self._starttime = None
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+
+  def Stop(self):
+    """Stop profiling and dump a log."""
+    if self._starttime:
+      stoptime = time.time()
+      logging.info('%fsec to perform %s',
+                   stoptime - self._starttime, self._description)
+      self._starttime = None
diff --git a/build/android/pylib/utils/timeout_retry.py b/build/android/pylib/utils/timeout_retry.py
new file mode 100644
index 0000000..bc7e891
--- /dev/null
+++ b/build/android/pylib/utils/timeout_retry.py
@@ -0,0 +1,50 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility to run functions with timeouts and retries."""
+# pylint: disable=W0702
+
+import threading
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+def Run(func, timeout, retries, args=None, kwargs=None):
+  """Runs the passed function in a separate thread with timeouts and retries.
+
+  Args:
+    func: the function to be wrapped.
+    timeout: the timeout in seconds for each try.
+    retries: the number of retries.
+    args: list of positional args to pass to |func|.
+    kwargs: dictionary of keyword args to pass to |func|.
+
+  Returns:
+    The return value of func(*args, **kwargs).
+  """
+  if not args:
+    args = []
+  if not kwargs:
+    kwargs = {}
+
+  # The return value uses a list because Python variables are references, not
+  # values. Closures make a copy of the reference, so updating the closure's
+  # reference wouldn't update where the original reference pointed.
+  ret = [None]
+  def RunOnTimeoutThread():
+    ret[0] = func(*args, **kwargs)
+
+  while True:
+    try:
+      name = 'TimeoutThread-for-%s' % threading.current_thread().name
+      thread_group = reraiser_thread.ReraiserThreadGroup(
+          [reraiser_thread.ReraiserThread(RunOnTimeoutThread, name=name)])
+      thread_group.StartAll()
+      thread_group.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+      return ret[0]
+    except:
+      if retries <= 0:
+        raise
+      retries -= 1
diff --git a/build/android/pylib/utils/timeout_retry_unittest.py b/build/android/pylib/utils/timeout_retry_unittest.py
new file mode 100644
index 0000000..dc36c42
--- /dev/null
+++ b/build/android/pylib/utils/timeout_retry_unittest.py
@@ -0,0 +1,52 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for timeout_and_retry.py."""
+
+import unittest
+
+from pylib.utils import reraiser_thread
+from pylib.utils import timeout_retry
+
+
+class TestException(Exception):
+  pass
+
+
+def _NeverEnding(tries):
+  tries[0] += 1
+  while True:
+    pass
+
+
+def _CountTries(tries):
+  tries[0] += 1
+  raise TestException
+
+
+class TestRun(unittest.TestCase):
+  """Tests for timeout_retry.Run."""
+
+  def testRun(self):
+    self.assertTrue(timeout_retry.Run(
+        lambda x: x, 30, 3, [True], {}))
+
+  def testTimeout(self):
+    tries = [0]
+    self.assertRaises(reraiser_thread.TimeoutError,
+        timeout_retry.Run, lambda: _NeverEnding(tries), 0, 3)
+    self.assertEqual(tries[0], 4)
+
+  def testRetries(self):
+    tries = [0]
+    self.assertRaises(TestException,
+        timeout_retry.Run, lambda: _CountTries(tries), 30, 3)
+    self.assertEqual(tries[0], 4)
+
+  def testReturnValue(self):
+    self.assertTrue(timeout_retry.Run(lambda: True, 30, 3))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/utils/watchdog_timer.py b/build/android/pylib/utils/watchdog_timer.py
new file mode 100644
index 0000000..d14dabb
--- /dev/null
+++ b/build/android/pylib/utils/watchdog_timer.py
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""WatchdogTimer timeout objects."""
+
+import time
+
+
+class WatchdogTimer(object):
+  """A resetable timeout-based watchdog.
+
+  This object is threadsafe.
+  """
+
+  def __init__(self, timeout):
+    """Initializes the watchdog.
+
+    Args:
+      timeout: The timeout in seconds. If timeout is None it will never timeout.
+    """
+    self._start_time = time.time()
+    self._timeout = timeout
+
+  def Reset(self):
+    """Resets the timeout countdown."""
+    self._start_time = time.time()
+
+  def IsTimedOut(self):
+    """Whether the watchdog has timed out.
+
+    Returns:
+      True if the watchdog has timed out, False otherwise.
+    """
+    if self._timeout is None:
+      return False
+    return time.time() - self._start_time > self._timeout
diff --git a/build/android/pylib/utils/xvfb.py b/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000..cb9d50e
--- /dev/null
+++ b/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+  """Return True if on Linux; else False."""
+  return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not _IsLinux():
+      return
+    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+                             '-ac'],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ':9'
+
+    # Now confirm, giving a chance for it to start if needed.
+    for _ in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      _, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
diff --git a/build/android/pylib/valgrind_tools.py b/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000..69f351a
--- /dev/null
+++ b/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,278 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Classes in this file define additional actions that need to be taken to run a
+test under some kind of runtime error detection tool.
+
+The interface is intended to be used as follows.
+
+1. For tests that simply run a native process (i.e. no activity is spawned):
+
+Call tool.CopyFiles().
+Prepend test command line with tool.GetTestWrapper().
+
+2. For tests that spawn an activity:
+
+Call tool.CopyFiles().
+Call tool.SetupEnvironment().
+Run the test as usual.
+Call tool.CleanUpEnvironment().
+"""
+# pylint: disable=R0201
+
+import glob
+import logging
+import os.path
+import subprocess
+import sys
+
+from pylib.constants import DIR_SOURCE_ROOT
+from pylib.device import device_errors
+
+
+def SetChromeTimeoutScale(device, scale):
+  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+  path = '/data/local/tmp/chrome_timeout_scale'
+  if not scale or scale == 1.0:
+    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+    device.RunShellCommand('rm %s' % path)
+  else:
+    device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+class BaseTool(object):
+  """A tool that does nothing."""
+
+  def __init__(self):
+    """Does nothing."""
+    pass
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ''
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper name for the utilities.
+
+    Returns:
+       A string that is to be prepended to the command line of utility
+    processes (forwarder, etc.).
+    """
+    return ''
+
+  def CopyFiles(self):
+    """Copies tool-specific files to the device, create directories, etc."""
+    pass
+
+  def SetupEnvironment(self):
+    """Sets up the system environment for a test.
+
+    This is a good place to set system properties.
+    """
+    pass
+
+  def CleanUpEnvironment(self):
+    """Cleans up environment."""
+    pass
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 1.0
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return False
+
+
+class AddressSanitizerTool(BaseTool):
+  """AddressSanitizer tool."""
+
+  WRAPPER_NAME = '/system/bin/asanwrapper'
+  # Disable memcmp overlap check.There are blobs (gl drivers)
+  # on some android devices that use memcmp on overlapping regions,
+  # nothing we can do about that.
+  EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+  def __init__(self, device):
+    super(AddressSanitizerTool, self).__init__()
+    self._device = device
+    # Configure AndroidCommands to run utils (such as md5sum_bin) under ASan.
+    # This is required because ASan is a compiler-based tool, and md5sum
+    # includes instrumented code from base.
+    device.old_interface.SetUtilWrapper(self.GetUtilWrapper())
+    libs = glob.glob(os.path.join(DIR_SOURCE_ROOT,
+                                  'third_party/llvm-build/Release+Asserts/',
+                                  'lib/clang/*/lib/linux/',
+                                  'libclang_rt.asan-arm-android.so'))
+    assert len(libs) == 1
+    self._lib = libs[0]
+
+  def CopyFiles(self):
+    """Copies ASan tools to the device."""
+    subprocess.call([os.path.join(DIR_SOURCE_ROOT,
+                                  'tools/android/asan/asan_device_setup.sh'),
+                     '--device', str(self._device),
+                     '--lib', self._lib,
+                     '--extra-options', AddressSanitizerTool.EXTRA_OPTIONS])
+    self._device.WaitUntilFullyBooted()
+
+  def GetTestWrapper(self):
+    return AddressSanitizerTool.WRAPPER_NAME
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper for utilities, such as forwarder.
+
+    AddressSanitizer wrapper must be added to all instrumented binaries,
+    including forwarder and the like. This can be removed if such binaries
+    were built without instrumentation. """
+    return self.GetTestWrapper()
+
+  def SetupEnvironment(self):
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      # Try to set the timeout scale anyway.
+      # TODO(jbudorick) Handle this exception appropriately after interface
+      #                 conversions are finished.
+      logging.error(str(e))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    SetChromeTimeoutScale(self._device, None)
+
+  def GetTimeoutScale(self):
+    # Very slow startup.
+    return 20.0
+
+
+class ValgrindTool(BaseTool):
+  """Base abstract class for Valgrind tools."""
+
+  VG_DIR = '/data/local/tmp/valgrind'
+  VGLOGS_DIR = '/data/local/tmp/vglogs'
+
+  def __init__(self, device):
+    super(ValgrindTool, self).__init__()
+    self._device = device
+    # exactly 31 chars, SystemProperties::PROP_NAME_MAX
+    self._wrap_properties = ['wrap.com.google.android.apps.ch',
+                             'wrap.org.chromium.native_test']
+
+  def CopyFiles(self):
+    """Copies Valgrind tools to the device."""
+    self._device.RunShellCommand(
+        'rm -r %s; mkdir %s' % (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
+    self._device.RunShellCommand(
+        'rm -r %s; mkdir %s' % (ValgrindTool.VGLOGS_DIR,
+                                ValgrindTool.VGLOGS_DIR))
+    files = self.GetFilesForTool()
+    for f in files:
+      self._device.PushChangedFiles(
+          os.path.join(DIR_SOURCE_ROOT, f),
+          os.path.join(ValgrindTool.VG_DIR, os.path.basename(f)))
+
+  def SetupEnvironment(self):
+    """Sets up device environment."""
+    self._device.RunShellCommand('chmod 777 /data/local/tmp')
+    self._device.RunShellCommand('setenforce 0')
+    for prop in self._wrap_properties:
+      self._device.RunShellCommand(
+          'setprop %s "logwrapper %s"' % (prop, self.GetTestWrapper()))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    """Cleans up device environment."""
+    for prop in self._wrap_properties:
+      self._device.RunShellCommand('setprop %s ""' % (prop,))
+    SetChromeTimeoutScale(self._device, None)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    raise NotImplementedError()
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return True
+
+
+class MemcheckTool(ValgrindTool):
+  """Memcheck tool."""
+
+  def __init__(self, device):
+    super(MemcheckTool, self).__init__(device)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper.sh',
+            'tools/valgrind/memcheck/suppressions.txt',
+            'tools/valgrind/memcheck/suppressions_android.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30
+
+
+class TSanTool(ValgrindTool):
+  """ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
+
+  def __init__(self, device):
+    super(TSanTool, self).__init__(device)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
+            'tools/valgrind/tsan/suppressions.txt',
+            'tools/valgrind/tsan/suppressions_android.txt',
+            'tools/valgrind/tsan/ignores.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30.0
+
+
+TOOL_REGISTRY = {
+    'memcheck': MemcheckTool,
+    'memcheck-renderer': MemcheckTool,
+    'tsan': TSanTool,
+    'tsan-renderer': TSanTool,
+    'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  Returns:
+    A tool for the specified tool_name.
+  """
+  if not tool_name:
+    return BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(device)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+        tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
diff --git a/build/android/rezip.gyp b/build/android/rezip.gyp
new file mode 100644
index 0000000..0dacffc
--- /dev/null
+++ b/build/android/rezip.gyp
@@ -0,0 +1,44 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build the rezip build tool.
+{
+  'targets': [
+    {
+      'target_name': 'rezip_apk_jar',
+      'type': 'none',
+      'variables': {
+        'java_in_dir': 'rezip',
+        'compile_stamp': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/compile.stamp',
+        'javac_jar_path': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar',
+      },
+      'actions': [
+        {
+          'action_name': 'javac_<(_target_name)',
+          'message': 'Compiling <(_target_name) java sources',
+          'variables': {
+            'java_sources': ['>!@(find >(java_in_dir) -name "*.java")'],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/javac.py',
+            '>@(java_sources)',
+          ],
+          'outputs': [
+            '<(compile_stamp)',
+            '<(javac_jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/javac.py',
+            '--classpath=',
+            '--classes-dir=<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+            '--jar-path=<(javac_jar_path)',
+            '--stamp=<(compile_stamp)',
+            '>@(java_sources)',
+          ]
+        },
+      ],
+    }
+  ],
+}
diff --git a/build/android/rezip/RezipApk.java b/build/android/rezip/RezipApk.java
new file mode 100644
index 0000000..fcb0703
--- /dev/null
+++ b/build/android/rezip/RezipApk.java
@@ -0,0 +1,454 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.jar.JarOutputStream;
+import java.util.regex.Pattern;
+import java.util.zip.CRC32;
+
+/**
+ * Command line tool used to build APKs which support loading the native code library
+ * directly from the APK file. To construct the APK we rename the native library by
+ * adding the prefix "crazy." to the filename. This is done to prevent the Android
+ * Package Manager from extracting the library. The native code must be page aligned
+ * and uncompressed. The page alignment is implemented by adding a zero filled file
+ * in front of the the native code library. This tool is designed so that running
+ * SignApk and/or zipalign on the resulting APK does not break the page alignment.
+ * This is achieved by outputing the filenames in the same canonical order used
+ * by SignApk and adding the same alignment fields added by zipalign.
+ */
+class RezipApk {
+    // Alignment to use for non-compressed files (must match zipalign).
+    private static final int ALIGNMENT = 4;
+
+    // Alignment to use for non-compressed *.so files
+    private static final int LIBRARY_ALIGNMENT = 4096;
+
+    // Files matching this pattern are not copied to the output when adding alignment.
+    // When reordering and verifying the APK they are copied to the end of the file.
+    private static Pattern sMetaFilePattern =
+            Pattern.compile("^(META-INF/((.*)[.](SF|RSA|DSA)|com/android/otacert))|(" +
+                            Pattern.quote(JarFile.MANIFEST_NAME) + ")$");
+
+    // Pattern for matching a shared library in the APK
+    private static Pattern sLibraryPattern = Pattern.compile("^lib/[^/]*/lib.*[.]so$");
+    // Pattern for match the crazy linker in the APK
+    private static Pattern sCrazyLinkerPattern =
+            Pattern.compile("^lib/[^/]*/libchromium_android_linker.so$");
+    // Pattern for matching a crazy loaded shared library in the APK
+    private static Pattern sCrazyLibraryPattern =
+            Pattern.compile("^lib/[^/]*/crazy.lib.*[.]so$");
+
+    private static boolean isLibraryFilename(String filename) {
+        return sLibraryPattern.matcher(filename).matches() &&
+                !sCrazyLinkerPattern.matcher(filename).matches();
+    }
+
+    private static boolean isCrazyLibraryFilename(String filename) {
+        return sCrazyLibraryPattern.matcher(filename).matches();
+    }
+
+    private static String renameLibraryForCrazyLinker(String filename) {
+        int lastSlash = filename.lastIndexOf('/');
+        // We rename the library, so that the Android Package Manager
+        // no longer extracts the library.
+        return filename.substring(0, lastSlash + 1) + "crazy." + filename.substring(lastSlash + 1);
+    }
+
+    /**
+     * Wraps another output stream, counting the number of bytes written.
+     */
+    private static class CountingOutputStream extends OutputStream {
+        private long mCount = 0;
+        private OutputStream mOut;
+
+        public CountingOutputStream(OutputStream out) {
+            this.mOut = out;
+        }
+
+        /** Returns the number of bytes written. */
+        public long getCount() {
+            return mCount;
+        }
+
+        @Override public void write(byte[] b, int off, int len) throws IOException {
+            mOut.write(b, off, len);
+            mCount += len;
+        }
+
+        @Override public void write(int b) throws IOException {
+            mOut.write(b);
+            mCount++;
+        }
+
+        @Override public void close() throws IOException {
+            mOut.close();
+        }
+
+        @Override public void flush() throws IOException {
+            mOut.flush();
+        }
+    }
+
+    private static String outputName(JarEntry entry, boolean rename) {
+        String inName = entry.getName();
+        if (rename && entry.getSize() > 0 && isLibraryFilename(inName)) {
+            return renameLibraryForCrazyLinker(inName);
+        }
+        return inName;
+    }
+
+    /**
+     * Comparator used to sort jar entries from the input file.
+     * Sorting is done based on the output filename (which maybe renamed).
+     * Filenames are in natural string order, except that filenames matching
+     * the meta-file pattern are always after other files. This is so the manifest
+     * and signature are at the end of the file after any alignment file.
+     */
+    private static class EntryComparator implements Comparator<JarEntry> {
+        private boolean mRename;
+
+        public EntryComparator(boolean rename) {
+            mRename = rename;
+        }
+
+        @Override
+        public int compare(JarEntry j1, JarEntry j2) {
+            String o1 = outputName(j1, mRename);
+            String o2 = outputName(j2, mRename);
+            boolean o1Matches = sMetaFilePattern.matcher(o1).matches();
+            boolean o2Matches = sMetaFilePattern.matcher(o2).matches();
+            if (o1Matches != o2Matches) {
+                return o1Matches ? 1 : -1;
+            } else {
+                return o1.compareTo(o2);
+            }
+        }
+    }
+
+    // Build an ordered list of jar entries. The jar entries from the input are
+    // sorted based on the output filenames (which maybe renamed). If |omitMetaFiles|
+    // is true do not include the jar entries for the META-INF files.
+    // Entries are ordered in the deterministic order used by SignApk.
+    private static List<JarEntry> getOutputFileOrderEntries(
+            JarFile jar, boolean omitMetaFiles, boolean rename) {
+        List<JarEntry> entries = new ArrayList<JarEntry>();
+        for (Enumeration<JarEntry> e = jar.entries(); e.hasMoreElements(); ) {
+            JarEntry entry = e.nextElement();
+            if (entry.isDirectory()) {
+                continue;
+            }
+            if (omitMetaFiles &&
+                sMetaFilePattern.matcher(entry.getName()).matches()) {
+                continue;
+            }
+            entries.add(entry);
+        }
+
+        // We sort the input entries by name. When present META-INF files
+        // are sorted to the end.
+        Collections.sort(entries, new EntryComparator(rename));
+        return entries;
+    }
+
+    /**
+     * Add a zero filled alignment file at this point in the zip file,
+     * The added file will be added before |name| and after |prevName|.
+     * The size of the alignment file is such that the location of the
+     * file |name| will be on a LIBRARY_ALIGNMENT boundary.
+     *
+     * Note this arrangement is devised so that running SignApk and/or zipalign on the resulting
+     * file will not alter the alignment.
+     *
+     * @param offset number of bytes into the output file at this point.
+     * @param timestamp time in millis since the epoch to include in the header.
+     * @param name the name of the library filename.
+     * @param prevName the name of the previous file in the archive (or null).
+     * @param out jar output stream to write the alignment file to.
+     *
+     * @throws IOException if the output file can not be written.
+     */
+    private static void addAlignmentFile(
+            long offset, long timestamp, String name, String prevName,
+            JarOutputStream out) throws IOException {
+
+        // Compute the start and alignment of the library, as if it was next.
+        int headerSize = JarFile.LOCHDR + name.length();
+        long libOffset = offset + headerSize;
+        int libNeeded = LIBRARY_ALIGNMENT - (int) (libOffset % LIBRARY_ALIGNMENT);
+        if (libNeeded == LIBRARY_ALIGNMENT) {
+            // Already aligned, no need to added alignment file.
+            return;
+        }
+
+        // Check that there is not another file between the library and the
+        // alignment file.
+        String alignName = name.substring(0, name.length() - 2) + "align";
+        if (prevName != null && prevName.compareTo(alignName) >= 0) {
+            throw new UnsupportedOperationException(
+                "Unable to insert alignment file, because there is "
+                + "another file in front of the file to be aligned. "
+                + "Other file: " + prevName + " Alignment file: " + alignName
+                + " file: " + name);
+        }
+
+        // Compute the size of the alignment file header.
+        headerSize = JarFile.LOCHDR + alignName.length();
+        // We are going to add an alignment file of type STORED. This file
+        // will itself induce a zipalign alignment adjustment.
+        int extraNeeded =
+                (ALIGNMENT - (int) ((offset + headerSize) % ALIGNMENT)) % ALIGNMENT;
+        headerSize += extraNeeded;
+
+        if (libNeeded < headerSize + 1) {
+            // The header was bigger than the alignment that we need, add another page.
+            libNeeded += LIBRARY_ALIGNMENT;
+        }
+        // Compute the size of the alignment file.
+        libNeeded -= headerSize;
+
+        // Build the header for the alignment file.
+        byte[] zeroBuffer = new byte[libNeeded];
+        JarEntry alignEntry = new JarEntry(alignName);
+        alignEntry.setMethod(JarEntry.STORED);
+        alignEntry.setSize(libNeeded);
+        alignEntry.setTime(timestamp);
+        CRC32 crc = new CRC32();
+        crc.update(zeroBuffer);
+        alignEntry.setCrc(crc.getValue());
+
+        if (extraNeeded != 0) {
+            alignEntry.setExtra(new byte[extraNeeded]);
+        }
+
+        // Output the alignment file.
+        out.putNextEntry(alignEntry);
+        out.write(zeroBuffer);
+        out.closeEntry();
+        out.flush();
+    }
+
+    // Make a JarEntry for the output file which corresponds to the input
+    // file. The output file will be called |name|. The output file will always
+    // be uncompressed (STORED). If the input is not STORED it is necessary to inflate
+    // it to compute the CRC and size of the output entry.
+    private static JarEntry makeStoredEntry(String name, JarEntry inEntry, JarFile in)
+            throws IOException {
+        JarEntry outEntry = new JarEntry(name);
+        outEntry.setMethod(JarEntry.STORED);
+
+        if (inEntry.getMethod() == JarEntry.STORED) {
+            outEntry.setCrc(inEntry.getCrc());
+            outEntry.setSize(inEntry.getSize());
+        } else {
+            // We are inflating the file. We need to compute the CRC and size.
+            byte[] buffer = new byte[4096];
+            CRC32 crc = new CRC32();
+            int size = 0;
+            int num;
+            InputStream data = in.getInputStream(inEntry);
+            while ((num = data.read(buffer)) > 0) {
+                crc.update(buffer, 0, num);
+                size += num;
+            }
+            data.close();
+            outEntry.setCrc(crc.getValue());
+            outEntry.setSize(size);
+        }
+        return outEntry;
+    }
+
+    /**
+     * Copy the contents of the input APK file to the output APK file. If |rename| is
+     * true then non-empty libraries (*.so) in the input will be renamed by prefixing
+     * "crazy.". This is done to prevent the Android Package Manager extracting the
+     * library. Note the crazy linker itself is not renamed, for bootstrapping reasons.
+     * Empty libraries are not renamed (they are in the APK to workaround a bug where
+     * the Android Package Manager fails to delete old versions when upgrading).
+     * There must be exactly one "crazy" library in the output stream. The "crazy"
+     * library will be uncompressed and page aligned in the output stream. Page
+     * alignment is implemented by adding a zero filled file, regular alignment is
+     * implemented by adding a zero filled extra field to the zip file header. If
+     * |addAlignment| is true a page alignment file is added, otherwise the "crazy"
+     * library must already be page aligned. Care is taken so that the output is generated
+     * in the same way as SignApk. This is important so that running SignApk and
+     * zipalign on the output does not break the page alignment. The archive may not
+     * contain a "*.apk" as SignApk has special nested signing logic that we do not
+     * support.
+     *
+     * @param in The input APK File.
+     * @param out The output APK stream.
+     * @param countOut Counting output stream (to measure the current offset).
+     * @param addAlignment Whether to add the alignment file or just check.
+     * @param rename Whether to rename libraries to be "crazy".
+     *
+     * @throws IOException if the output file can not be written.
+     */
+    private static void rezip(
+            JarFile in, JarOutputStream out, CountingOutputStream countOut,
+            boolean addAlignment, boolean rename) throws IOException {
+
+        List<JarEntry> entries = getOutputFileOrderEntries(in, addAlignment, rename);
+        long timestamp = System.currentTimeMillis();
+        byte[] buffer = new byte[4096];
+        boolean firstEntry = true;
+        String prevName = null;
+        int numCrazy = 0;
+        for (JarEntry inEntry : entries) {
+            // Rename files, if specied.
+            String name = outputName(inEntry, rename);
+            if (name.endsWith(".apk")) {
+                throw new UnsupportedOperationException(
+                        "Nested APKs are not supported: " + name);
+            }
+
+            // Build the header.
+            JarEntry outEntry = null;
+            boolean isCrazy = isCrazyLibraryFilename(name);
+            if (isCrazy) {
+                // "crazy" libraries are alway output uncompressed (STORED).
+                outEntry = makeStoredEntry(name, inEntry, in);
+                numCrazy++;
+                if (numCrazy > 1) {
+                    throw new UnsupportedOperationException(
+                            "Found more than one library\n"
+                            + "Multiple libraries are not supported for APKs that use "
+                            + "'load_library_from_zip_file'.\n"
+                            + "See crbug/388223.\n"
+                            + "Note, check that your build is clean.\n"
+                            + "An unclean build can incorrectly incorporate old "
+                            + "libraries in the APK.");
+                }
+            } else if (inEntry.getMethod() == JarEntry.STORED) {
+                // Preserve the STORED method of the input entry.
+                outEntry = new JarEntry(inEntry);
+                outEntry.setExtra(null);
+            } else {
+                // Create a new entry so that the compressed len is recomputed.
+                outEntry = new JarEntry(name);
+            }
+            outEntry.setTime(timestamp);
+
+            // Compute and add alignment
+            long offset = countOut.getCount();
+            if (firstEntry) {
+                // The first entry in a jar file has an extra field of
+                // four bytes that you can't get rid of; any extra
+                // data you specify in the JarEntry is appended to
+                // these forced four bytes.  This is JAR_MAGIC in
+                // JarOutputStream; the bytes are 0xfeca0000.
+                firstEntry = false;
+                offset += 4;
+            }
+            if (outEntry.getMethod() == JarEntry.STORED) {
+                if (isCrazy) {
+                    if (addAlignment) {
+                        addAlignmentFile(offset, timestamp, name, prevName, out);
+                    }
+                    // We check that we did indeed get to a page boundary.
+                    offset = countOut.getCount() + JarFile.LOCHDR + name.length();
+                    if ((offset % LIBRARY_ALIGNMENT) != 0) {
+                        throw new AssertionError(
+                                "Library was not page aligned when verifying page alignment. "
+                                + "Library name: " + name + " Expected alignment: "
+                                + LIBRARY_ALIGNMENT + "Offset: " + offset + " Error: "
+                                + (offset % LIBRARY_ALIGNMENT));
+                    }
+                } else {
+                    // This is equivalent to zipalign.
+                    offset += JarFile.LOCHDR + name.length();
+                    int needed = (ALIGNMENT - (int) (offset % ALIGNMENT)) % ALIGNMENT;
+                    if (needed != 0) {
+                        outEntry.setExtra(new byte[needed]);
+                    }
+                }
+            }
+            out.putNextEntry(outEntry);
+
+            // Copy the data from the input to the output
+            int num;
+            InputStream data = in.getInputStream(inEntry);
+            while ((num = data.read(buffer)) > 0) {
+                out.write(buffer, 0, num);
+            }
+            data.close();
+            out.closeEntry();
+            out.flush();
+            prevName = name;
+        }
+        if (numCrazy == 0) {
+            throw new AssertionError("There was no crazy library in the archive");
+        }
+    }
+
+    private static void usage() {
+        System.err.println(
+                "Usage: prealignapk (addalignment|reorder) input.apk output.apk");
+        System.err.println(
+                "\"crazy\" libraries are always inflated in the output");
+        System.err.println(
+                "  renamealign  - rename libraries with \"crazy.\" prefix and add alignment file");
+        System.err.println(
+                "  align        - add alignment file");
+        System.err.println(
+                "  reorder      - re-creates canonical ordering and checks alignment");
+        System.exit(2);
+    }
+
+    public static void main(String[] args) throws IOException {
+        if (args.length != 3) usage();
+
+        boolean addAlignment = false;
+        boolean rename = false;
+        if (args[0].equals("renamealign")) {
+            // Normal case. Before signing we rename the library and add an alignment file.
+            addAlignment = true;
+            rename = true;
+        } else if (args[0].equals("align")) {
+            // LGPL compliance case. Before signing, we add an alignment file to a
+            // reconstructed APK which already contains the "crazy" library.
+            addAlignment = true;
+            rename = false;
+        } else if (args[0].equals("reorder")) {
+            // Normal case. After jarsigning we write the file in the canonical order and check.
+            addAlignment = false;
+        } else {
+            usage();
+        }
+
+        String inputFilename = args[1];
+        String outputFilename = args[2];
+
+        JarFile inputJar = null;
+        FileOutputStream outputFile = null;
+
+        try {
+            inputJar = new JarFile(new File(inputFilename), true);
+            outputFile = new FileOutputStream(outputFilename);
+
+            CountingOutputStream outCount = new CountingOutputStream(outputFile);
+            JarOutputStream outputJar = new JarOutputStream(outCount);
+
+            // Match the compression level used by SignApk.
+            outputJar.setLevel(9);
+
+            rezip(inputJar, outputJar, outCount, addAlignment, rename);
+            outputJar.close();
+        } finally {
+            if (inputJar != null) inputJar.close();
+            if (outputFile != null) outputFile.close();
+        }
+    }
+}
diff --git a/build/android/screenshot.py b/build/android/screenshot.py
new file mode 100755
index 0000000..fb1aee1
--- /dev/null
+++ b/build/android/screenshot.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes a screenshot or a screen video capture from an Android device."""
+
+import logging
+import optparse
+import os
+import sys
+
+from pylib import android_commands
+from pylib import screenshot
+from pylib.device import device_utils
+
+def _PrintMessage(heading, eol='\n'):
+  sys.stdout.write('%s%s' % (heading, eol))
+  sys.stdout.flush()
+
+
+def _CaptureScreenshot(device, host_file):
+  host_file = device.TakeScreenshot(host_file)
+  _PrintMessage('Screenshot written to %s' % os.path.abspath(host_file))
+
+
+def _CaptureVideo(device, host_file, options):
+  size = tuple(map(int, options.size.split('x'))) if options.size else None
+  recorder = screenshot.VideoRecorder(device,
+                                      megabits_per_second=options.bitrate,
+                                      size=size,
+                                      rotate=options.rotate)
+  try:
+    recorder.Start()
+    _PrintMessage('Recording. Press Enter to stop...', eol='')
+    raw_input()
+  finally:
+    recorder.Stop()
+  host_file = recorder.Pull(host_file)
+  _PrintMessage('Video written to %s' % os.path.abspath(host_file))
+
+
+def main():
+  # Parse options.
+  parser = optparse.OptionParser(description=__doc__,
+                                 usage='screenshot.py [options] [filename]')
+  parser.add_option('-d', '--device', metavar='ANDROID_DEVICE', help='Serial '
+                    'number of Android device to use.', default=None)
+  parser.add_option('-f', '--file', help='Save result to file instead of '
+                    'generating a timestamped file name.', metavar='FILE')
+  parser.add_option('-v', '--verbose', help='Verbose logging.',
+                    action='store_true')
+  video_options = optparse.OptionGroup(parser, 'Video capture')
+  video_options.add_option('--video', help='Enable video capturing. Requires '
+                           'Android KitKat or later', action='store_true')
+  video_options.add_option('-b', '--bitrate', help='Bitrate in megabits/s, '
+                           'from 0.1 to 100 mbps, %default mbps by default.',
+                           default=4, type='float')
+  video_options.add_option('-r', '--rotate', help='Rotate video by 90 degrees.',
+                           default=False, action='store_true')
+  video_options.add_option('-s', '--size', metavar='WIDTHxHEIGHT',
+                           help='Frame size to use instead of the device '
+                           'screen size.', default=None)
+  parser.add_option_group(video_options)
+
+  (options, args) = parser.parse_args()
+
+  if options.verbose:
+    logging.getLogger().setLevel(logging.DEBUG)
+
+  if not options.device and len(android_commands.GetAttachedDevices()) > 1:
+    parser.error('Multiple devices are attached. '
+                 'Please specify device serial number with --device.')
+
+  if len(args) > 1:
+    parser.error('Too many positional arguments.')
+  host_file = args[0] if args else options.file
+  device = device_utils.DeviceUtils(options.device)
+
+  if options.video:
+    _CaptureVideo(device, host_file, options)
+  else:
+    _CaptureScreenshot(device, host_file)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/setup.gyp b/build/android/setup.gyp
new file mode 100644
index 0000000..7dce19d
--- /dev/null
+++ b/build/android/setup.gyp
@@ -0,0 +1,82 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'conditions': [
+    ['component == "shared_library"', {
+      'targets': [
+        {
+          # These libraries from the Android ndk are required to be packaged with
+          # any APK that is built with them. build/java_apk.gypi expects any
+          # libraries that should be packaged with the apk to be in
+          # <(SHARED_LIB_DIR)
+          'target_name': 'copy_system_libraries',
+          'type': 'none',
+          'copies': [
+            {
+              'destination': '<(SHARED_LIB_DIR)/',
+              'files': [
+                '<(android_stlport_libs_dir)/libstlport_shared.so',
+              ],
+            },
+          ],
+        },
+      ],
+    }],
+  ],
+  'targets': [
+    {
+      'target_name': 'get_build_device_configurations',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'get configurations',
+          'inputs': [
+            'gyp/util/build_device.py',
+            'gyp/get_device_configuration.py',
+          ],
+          'outputs': [
+            '<(build_device_config_path)',
+            '<(build_device_config_path).fake',
+          ],
+          'action': [
+            'python', 'gyp/get_device_configuration.py',
+            '--output=<(build_device_config_path)',
+          ],
+        }
+      ],
+    },
+    {
+      # Target for creating common output build directories. Creating output
+      # dirs beforehand ensures that build scripts can assume these folders to
+      # exist and there are no race conditions resulting from build scripts
+      # trying to create these directories.
+      # The build/java.gypi target depends on this target.
+      'target_name': 'build_output_dirs',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'create_java_output_dirs',
+          'variables' : {
+            'output_dirs' : [
+              '<(PRODUCT_DIR)/apks',
+              '<(PRODUCT_DIR)/lib.java',
+              '<(PRODUCT_DIR)/test.lib.java',
+            ]
+          },
+          'inputs' : [],
+          # By not specifying any outputs, we ensure that this command isn't
+          # re-run when the output directories are touched (i.e. apks are
+          # written to them).
+          'outputs': [''],
+          'action': [
+            'mkdir',
+            '-p',
+            '<@(output_dirs)',
+          ],
+        },
+      ],
+    }, # build_output_dirs
+  ]
+}
+
diff --git a/build/android/strip_native_libraries.gypi b/build/android/strip_native_libraries.gypi
new file mode 100644
index 0000000..bdffcfd
--- /dev/null
+++ b/build/android/strip_native_libraries.gypi
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that strips
+# native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'strip_native_libraries',
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'stripped_libraries_dir': 'directory to store stripped libraries',
+#      },
+#      'includes': [ '../../build/android/strip_native_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Stripping libraries for <(_target_name)',
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/strip_library_for_device.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'conditions': [
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/strip_library_for_device.py',
+    '--android-strip=<(android_strip)',
+    '--android-strip-arg=--strip-unneeded',
+    '--stripped-libraries-dir=<(stripped_libraries_dir)',
+    '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--stamp=<(stamp)',
+  ],
+}
diff --git a/build/android/surface_stats.py b/build/android/surface_stats.py
new file mode 100755
index 0000000..74bfdce
--- /dev/null
+++ b/build/android/surface_stats.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for continuously printing Android graphics surface
+statistics on the console.
+"""
+
+import collections
+import optparse
+import sys
+import time
+
+from pylib.device import device_utils
+from pylib.perf import surface_stats_collector
+from pylib.utils import run_tests_helper
+
+
+_FIELD_FORMAT = {
+  'jank_count (janks)': '%d',
+  'max_frame_delay (vsyncs)': '%d',
+  'avg_surface_fps (fps)': '%.2f',
+  'frame_lengths (vsyncs)': '%.3f',
+  'refresh_period (seconds)': '%.6f',
+}
+
+
+def _MergeResults(results, fields):
+  merged_results = collections.defaultdict(list)
+  for result in results:
+    if ((fields != ['all'] and not result.name in fields) or
+        result.value is None):
+      continue
+    name = '%s (%s)' % (result.name, result.unit)
+    if isinstance(result.value, list):
+      value = result.value
+    else:
+      value = [result.value]
+    merged_results[name] += value
+  for name, values in merged_results.iteritems():
+    merged_results[name] = sum(values) / float(len(values))
+  return merged_results
+
+
+def _GetTerminalHeight():
+  try:
+    import fcntl, termios, struct
+  except ImportError:
+    return 0, 0
+  height, _, _, _ = struct.unpack('HHHH',
+      fcntl.ioctl(0, termios.TIOCGWINSZ,
+          struct.pack('HHHH', 0, 0, 0, 0)))
+  return height
+
+
+def _PrintColumnTitles(results):
+  for name in results.keys():
+    print '%s ' % name,
+  print
+  for name in results.keys():
+    print '%s ' % ('-' * len(name)),
+  print
+
+
+def _PrintResults(results):
+  for name, value in results.iteritems():
+    value = _FIELD_FORMAT.get(name, '%s') % value
+    print value.rjust(len(name)) + ' ',
+  print
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options]',
+                                 description=__doc__)
+  parser.add_option('-v',
+                    '--verbose',
+                    dest='verbose_count',
+                    default=0,
+                    action='count',
+                    help='Verbose level (multiple times for more)')
+  parser.add_option('--device',
+                    help='Serial number of device we should use.')
+  parser.add_option('-f',
+                    '--fields',
+                    dest='fields',
+                    default='jank_count,max_frame_delay,avg_surface_fps,'
+                        'frame_lengths',
+                    help='Comma separated list of fields to display or "all".')
+  parser.add_option('-d',
+                    '--delay',
+                    dest='delay',
+                    default=1,
+                    type='float',
+                    help='Time in seconds to sleep between updates.')
+
+  options, _ = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(options.verbose_count)
+
+  device = device_utils.DeviceUtils(options.device)
+  collector = surface_stats_collector.SurfaceStatsCollector(device)
+  collector.DisableWarningAboutEmptyData()
+
+  fields = options.fields.split(',')
+  row_count = None
+
+  try:
+    collector.Start()
+    while True:
+      time.sleep(options.delay)
+      results = collector.SampleResults()
+      results = _MergeResults(results, fields)
+
+      if not results:
+        continue
+
+      terminal_height = _GetTerminalHeight()
+      if row_count is None or (terminal_height and
+          row_count >= terminal_height - 3):
+        _PrintColumnTitles(results)
+        row_count = 0
+
+      _PrintResults(results)
+      row_count += 1
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    collector.Stop()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/symbolize.py b/build/android/symbolize.py
new file mode 100755
index 0000000..cb5d475
--- /dev/null
+++ b/build/android/symbolize.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Symbolizes stack traces generated by Chromium for Android.
+
+Sample usage:
+  adb logcat chromium:V | symbolize.py
+"""
+
+import os
+import re
+import sys
+
+from pylib import constants
+
+# Uses symbol.py from third_party/android_platform, not python's.
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                            'third_party/android_platform/development/scripts'))
+import symbol
+
+# Sample output from base/debug/stack_trace_android.cc
+#00 0x693cd34f /path/to/some/libfoo.so+0x0007434f
+TRACE_LINE = re.compile('(?P<frame>\#[0-9]+ 0x[0-9a-f]{8,8}) '
+                        '(?P<lib>[^+]+)\+0x(?P<addr>[0-9a-f]{8,8})')
+
+class Symbolizer(object):
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    while True:
+      match = re.search(TRACE_LINE, data)
+      if not match:
+        self._output.write(data)
+        break
+
+      frame = match.group('frame')
+      lib = match.group('lib')
+      addr = match.group('addr')
+
+      # TODO(scherkus): Doing a single lookup per line is pretty slow,
+      # especially with larger libraries. Consider caching strategies such as:
+      # 1) Have Python load the libraries and do symbol lookups instead of
+      #    calling out to addr2line each time.
+      # 2) Have Python keep multiple addr2line instances open as subprocesses,
+      #    piping addresses and reading back symbols as we find them
+      # 3) Read ahead the entire stack trace until we find no more, then batch
+      #    the symbol lookups.
+      #
+      # TODO(scherkus): These results are memoized, which could result in
+      # incorrect lookups when running this script on long-lived instances
+      # (e.g., adb logcat) when doing incremental development. Consider clearing
+      # the cache when modification timestamp of libraries change.
+      sym = symbol.SymbolInformation(lib, addr, False)[0][0]
+
+      if not sym:
+        post = match.end('addr')
+        self._output.write(data[:post])
+        data = data[post:]
+        continue
+
+      pre = match.start('frame')
+      post = match.end('addr')
+
+      self._output.write(data[:pre])
+      self._output.write(frame)
+      self._output.write(' ')
+      self._output.write(sym)
+
+      data = data[post:]
+
+  def flush(self):
+    self._output.flush()
+
+
+def main():
+  symbolizer = Symbolizer(sys.stdout)
+  for line in sys.stdin:
+    symbolizer.write(line)
+  symbolizer.flush()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/symbolize_test.py b/build/android/symbolize_test.py
new file mode 100755
index 0000000..826d852
--- /dev/null
+++ b/build/android/symbolize_test.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest for symbolize.py.
+
+This test uses test libraries generated by the Android g++ toolchain.
+
+Should things break you can recreate the libraries and get the updated
+addresses and demangled names by running the following:
+  cd test/symbolize/
+  make
+  nm -gC *.so
+"""
+
+import StringIO
+import unittest
+
+import symbolize
+
+LIB_A_PATH = '/build/android/tests/symbolize/liba.so'
+LIB_B_PATH = '/build/android/tests/symbolize/libb.so'
+
+def RunSymbolizer(text):
+  output = StringIO.StringIO()
+  s = symbolize.Symbolizer(output)
+  s.write(text)
+  return output.getvalue()
+
+
+class SymbolizerUnittest(unittest.TestCase):
+  def testSingleLineNoMatch(self):
+    # Leading '#' is required.
+    expected = '00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Whitespace should be exactly one space.
+    expected = '#00  0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000  ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Decimal stack frame numbers are required.
+    expected = '#0a 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Hexadecimal addresses are required.
+    expected = '#00 0xghijklmn ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+0xghijklmn\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Addresses must be exactly 8 characters.
+    expected = '#00 0x0000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x000000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    expected = '#00 0x0000000 ' + LIB_A_PATH + '+0x0000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x000000000 ' + LIB_A_PATH + '+0x000000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Addresses must be prefixed with '0x'.
+    expected = '#00 00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Library name is required.
+    expected = '#00 0x00000000\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 +0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Library name must be followed by offset with no spaces around '+'.
+    expected = '#00 0x00000000 ' + LIB_A_PATH + ' +0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+ 0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + ' 0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+  def testSingleLine(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testSingleLineWithSurroundingText(self):
+    text = 'LEFT #00 0x00000000 ' + LIB_A_PATH + '+0x00000254 RIGHT\n'
+    expected = 'LEFT #00 0x00000000 A::Bar(char const*) RIGHT\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesSameLibrary(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    text += '#01 0x00000000 ' + LIB_A_PATH + '+0x00000234\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    expected += '#01 0x00000000 A::Foo(int)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesDifferentLibrary(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    text += '#01 0x00000000 ' + LIB_B_PATH + '+0x00000234\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    expected += '#01 0x00000000 B::Baz(float)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesWithSurroundingTextEverywhere(self):
+    text = 'TOP\n'
+    text += 'LEFT #00 0x00000000 ' + LIB_A_PATH + '+0x00000254 RIGHT\n'
+    text += 'LEFT #01 0x00000000 ' + LIB_B_PATH + '+0x00000234 RIGHT\n'
+    text += 'BOTTOM\n'
+    expected = 'TOP\n'
+    expected += 'LEFT #00 0x00000000 A::Bar(char const*) RIGHT\n'
+    expected += 'LEFT #01 0x00000000 B::Baz(float) RIGHT\n'
+    expected += 'BOTTOM\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/test_runner.py b/build/android/test_runner.py
new file mode 100755
index 0000000..5303463
--- /dev/null
+++ b/build/android/test_runner.py
@@ -0,0 +1,889 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+import collections
+import logging
+import optparse
+import os
+import shutil
+import signal
+import sys
+import threading
+
+from pylib import android_commands
+from pylib import constants
+from pylib import forwarder
+from pylib import ports
+from pylib.base import base_test_result
+from pylib.base import test_dispatcher
+from pylib.gtest import gtest_config
+from pylib.gtest import setup as gtest_setup
+from pylib.gtest import test_options as gtest_test_options
+from pylib.linker import setup as linker_setup
+from pylib.host_driven import setup as host_driven_setup
+from pylib.instrumentation import setup as instrumentation_setup
+from pylib.instrumentation import test_options as instrumentation_test_options
+from pylib.junit import setup as junit_setup
+from pylib.junit import test_dispatcher as junit_dispatcher
+from pylib.monkey import setup as monkey_setup
+from pylib.monkey import test_options as monkey_test_options
+from pylib.perf import setup as perf_setup
+from pylib.perf import test_options as perf_test_options
+from pylib.perf import test_runner as perf_test_runner
+from pylib.uiautomator import setup as uiautomator_setup
+from pylib.uiautomator import test_options as uiautomator_test_options
+from pylib.utils import apk_helper
+from pylib.utils import command_option_parser
+from pylib.utils import report_results
+from pylib.utils import reraiser_thread
+from pylib.utils import run_tests_helper
+
+
+def AddCommonOptions(option_parser):
+  """Adds all common options to |option_parser|."""
+
+  group = optparse.OptionGroup(option_parser, 'Common Options')
+  default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+  group.add_option('--debug', action='store_const', const='Debug',
+                   dest='build_type', default=default_build_type,
+                   help=('If set, run test suites under out/Debug. '
+                         'Default is env var BUILDTYPE or Debug.'))
+  group.add_option('--release', action='store_const',
+                   const='Release', dest='build_type',
+                   help=('If set, run test suites under out/Release.'
+                         ' Default is env var BUILDTYPE or Debug.'))
+  group.add_option('-c', dest='cleanup_test_files',
+                   help='Cleanup test files on the device after run',
+                   action='store_true')
+  group.add_option('--num_retries', dest='num_retries', type='int',
+                   default=2,
+                   help=('Number of retries for a test before '
+                         'giving up.'))
+  group.add_option('-v',
+                   '--verbose',
+                   dest='verbose_count',
+                   default=0,
+                   action='count',
+                   help='Verbose level (multiple times for more)')
+  group.add_option('--tool',
+                   dest='tool',
+                   help=('Run the test under a tool '
+                         '(use --tool help to list them)'))
+  group.add_option('--flakiness-dashboard-server',
+                   dest='flakiness_dashboard_server',
+                   help=('Address of the server that is hosting the '
+                         'Chrome for Android flakiness dashboard.'))
+  group.add_option('--skip-deps-push', dest='push_deps',
+                   action='store_false', default=True,
+                   help=('Do not push dependencies to the device. '
+                         'Use this at own risk for speeding up test '
+                         'execution on local machine.'))
+  group.add_option('-d', '--device', dest='test_device',
+                   help=('Target device for the test suite '
+                         'to run on.'))
+  option_parser.add_option_group(group)
+
+
+def ProcessCommonOptions(options):
+  """Processes and handles all common options."""
+  run_tests_helper.SetLogLevel(options.verbose_count)
+  constants.SetBuildType(options.build_type)
+
+
+def AddGTestOptions(option_parser):
+  """Adds gtest options to |option_parser|."""
+
+  option_parser.usage = '%prog gtest [options]'
+  option_parser.commands_dict = {}
+  option_parser.example = '%prog gtest -s base_unittests'
+
+  # TODO(gkanwar): Make this option required
+  option_parser.add_option('-s', '--suite', dest='suite_name',
+                           help=('Executable name of the test suite to run '
+                                 '(use -s help to list them).'))
+  option_parser.add_option('-f', '--gtest_filter', '--gtest-filter',
+                           dest='test_filter',
+                           help='googletest-style filter string.')
+  option_parser.add_option('--gtest_also_run_disabled_tests',
+                           '--gtest-also-run-disabled-tests',
+                           dest='run_disabled', action='store_true',
+                           help='Also run disabled tests if applicable.')
+  option_parser.add_option('-a', '--test-arguments', dest='test_arguments',
+                           default='',
+                           help='Additional arguments to pass to the test.')
+  option_parser.add_option('-t', dest='timeout',
+                           help='Timeout to wait for each test',
+                           type='int',
+                           default=60)
+  option_parser.add_option('--isolate_file_path',
+                           '--isolate-file-path',
+                           dest='isolate_file_path',
+                           help='.isolate file path to override the default '
+                                'path')
+  # TODO(gkanwar): Move these to Common Options once we have the plumbing
+  # in our other test types to handle these commands
+  AddCommonOptions(option_parser)
+
+
+def AddLinkerTestOptions(option_parser):
+  option_parser.usage = '%prog linker'
+  option_parser.commands_dict = {}
+  option_parser.example = '%prog linker'
+
+  option_parser.add_option('-f', '--gtest-filter', dest='test_filter',
+                           help='googletest-style filter string.')
+  AddCommonOptions(option_parser)
+
+
+def ProcessGTestOptions(options):
+  """Intercept test suite help to list test suites.
+
+  Args:
+    options: Command line options.
+  """
+  if options.suite_name == 'help':
+    print 'Available test suites are:'
+    for test_suite in (gtest_config.STABLE_TEST_SUITES +
+                       gtest_config.EXPERIMENTAL_TEST_SUITES):
+      print test_suite
+    sys.exit(0)
+
+  # Convert to a list, assuming all test suites if nothing was specified.
+  # TODO(gkanwar): Require having a test suite
+  if options.suite_name:
+    options.suite_name = [options.suite_name]
+  else:
+    options.suite_name = [s for s in gtest_config.STABLE_TEST_SUITES]
+
+
+def AddJavaTestOptions(option_parser):
+  """Adds the Java test options to |option_parser|."""
+
+  option_parser.add_option('-f', '--test-filter', dest='test_filter',
+                           help=('Test filter (if not fully qualified, '
+                                 'will run all matches).'))
+  option_parser.add_option(
+      '-A', '--annotation', dest='annotation_str',
+      help=('Comma-separated list of annotations. Run only tests with any of '
+            'the given annotations. An annotation can be either a key or a '
+            'key-values pair. A test that has no annotation is considered '
+            '"SmallTest".'))
+  option_parser.add_option(
+      '-E', '--exclude-annotation', dest='exclude_annotation_str',
+      help=('Comma-separated list of annotations. Exclude tests with these '
+            'annotations.'))
+  option_parser.add_option(
+      '--screenshot', dest='screenshot_failures', action='store_true',
+      help='Capture screenshots of test failures')
+  option_parser.add_option(
+      '--save-perf-json', action='store_true',
+      help='Saves the JSON file for each UI Perf test.')
+  option_parser.add_option(
+      '--official-build', action='store_true', help='Run official build tests.')
+  option_parser.add_option(
+      '--test_data', '--test-data', action='append', default=[],
+      help=('Each instance defines a directory of test data that should be '
+            'copied to the target(s) before running the tests. The argument '
+            'should be of the form <target>:<source>, <target> is relative to '
+            'the device data directory, and <source> is relative to the '
+            'chromium build directory.'))
+
+
+def ProcessJavaTestOptions(options):
+  """Processes options/arguments and populates |options| with defaults."""
+
+  if options.annotation_str:
+    options.annotations = options.annotation_str.split(',')
+  elif options.test_filter:
+    options.annotations = []
+  else:
+    options.annotations = ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest',
+                           'EnormousTest', 'IntegrationTest']
+
+  if options.exclude_annotation_str:
+    options.exclude_annotations = options.exclude_annotation_str.split(',')
+  else:
+    options.exclude_annotations = []
+
+
+def AddInstrumentationTestOptions(option_parser):
+  """Adds Instrumentation test options to |option_parser|."""
+
+  option_parser.usage = '%prog instrumentation [options]'
+  option_parser.commands_dict = {}
+  option_parser.example = ('%prog instrumentation '
+                           '--test-apk=ChromeShellTest')
+
+  AddJavaTestOptions(option_parser)
+  AddCommonOptions(option_parser)
+
+  option_parser.add_option('-j', '--java-only', action='store_true',
+                           default=False, help='Run only the Java tests.')
+  option_parser.add_option('-p', '--python-only', action='store_true',
+                           default=False,
+                           help='Run only the host-driven tests.')
+  option_parser.add_option('--host-driven-root',
+                           help='Root of the host-driven tests.')
+  option_parser.add_option('-w', '--wait_debugger', dest='wait_for_debugger',
+                           action='store_true',
+                           help='Wait for debugger.')
+  option_parser.add_option(
+      '--test-apk', dest='test_apk',
+      help=('The name of the apk containing the tests '
+            '(without the .apk extension; e.g. "ContentShellTest").'))
+  option_parser.add_option('--coverage-dir',
+                           help=('Directory in which to place all generated '
+                                 'EMMA coverage files.'))
+  option_parser.add_option('--device-flags', dest='device_flags', default='',
+                           help='The relative filepath to a file containing '
+                                'command-line flags to set on the device')
+
+
+def ProcessInstrumentationOptions(options, error_func):
+  """Processes options/arguments and populate |options| with defaults.
+
+  Args:
+    options: optparse.Options object.
+    error_func: Function to call with the error message in case of an error.
+
+  Returns:
+    An InstrumentationOptions named tuple which contains all options relevant to
+    instrumentation tests.
+  """
+
+  ProcessJavaTestOptions(options)
+
+  if options.java_only and options.python_only:
+    error_func('Options java_only (-j) and python_only (-p) '
+               'are mutually exclusive.')
+  options.run_java_tests = True
+  options.run_python_tests = True
+  if options.java_only:
+    options.run_python_tests = False
+  elif options.python_only:
+    options.run_java_tests = False
+
+  if not options.host_driven_root:
+    options.run_python_tests = False
+
+  if not options.test_apk:
+    error_func('--test-apk must be specified.')
+
+
+  options.test_apk_path = os.path.join(
+      constants.GetOutDirectory(),
+      constants.SDK_BUILD_APKS_DIR,
+      '%s.apk' % options.test_apk)
+  options.test_apk_jar_path = os.path.join(
+      constants.GetOutDirectory(),
+      constants.SDK_BUILD_TEST_JAVALIB_DIR,
+      '%s.jar' %  options.test_apk)
+  options.test_support_apk_path = '%sSupport%s' % (
+      os.path.splitext(options.test_apk_path))
+
+  options.test_runner = apk_helper.GetInstrumentationName(options.test_apk_path)
+
+  return instrumentation_test_options.InstrumentationOptions(
+      options.tool,
+      options.cleanup_test_files,
+      options.push_deps,
+      options.annotations,
+      options.exclude_annotations,
+      options.test_filter,
+      options.test_data,
+      options.save_perf_json,
+      options.screenshot_failures,
+      options.wait_for_debugger,
+      options.coverage_dir,
+      options.test_apk,
+      options.test_apk_path,
+      options.test_apk_jar_path,
+      options.test_runner,
+      options.test_support_apk_path,
+      options.device_flags
+      )
+
+
+def AddUIAutomatorTestOptions(option_parser):
+  """Adds UI Automator test options to |option_parser|."""
+
+  option_parser.usage = '%prog uiautomator [options]'
+  option_parser.commands_dict = {}
+  option_parser.example = (
+      '%prog uiautomator --test-jar=chrome_shell_uiautomator_tests'
+      ' --package=chrome_shell')
+  option_parser.add_option(
+      '--package',
+      help=('Package under test. Possible values: %s' %
+            constants.PACKAGE_INFO.keys()))
+  option_parser.add_option(
+      '--test-jar', dest='test_jar',
+      help=('The name of the dexed jar containing the tests (without the '
+            '.dex.jar extension). Alternatively, this can be a full path '
+            'to the jar.'))
+
+  AddJavaTestOptions(option_parser)
+  AddCommonOptions(option_parser)
+
+
+def ProcessUIAutomatorOptions(options, error_func):
+  """Processes UIAutomator options/arguments.
+
+  Args:
+    options: optparse.Options object.
+    error_func: Function to call with the error message in case of an error.
+
+  Returns:
+    A UIAutomatorOptions named tuple which contains all options relevant to
+    uiautomator tests.
+  """
+
+  ProcessJavaTestOptions(options)
+
+  if not options.package:
+    error_func('--package is required.')
+
+  if options.package not in constants.PACKAGE_INFO:
+    error_func('Invalid package.')
+
+  if not options.test_jar:
+    error_func('--test-jar must be specified.')
+
+  if os.path.exists(options.test_jar):
+    # The dexed JAR is fully qualified, assume the info JAR lives along side.
+    options.uiautomator_jar = options.test_jar
+  else:
+    options.uiautomator_jar = os.path.join(
+        constants.GetOutDirectory(),
+        constants.SDK_BUILD_JAVALIB_DIR,
+        '%s.dex.jar' % options.test_jar)
+  options.uiautomator_info_jar = (
+      options.uiautomator_jar[:options.uiautomator_jar.find('.dex.jar')] +
+      '_java.jar')
+
+  return uiautomator_test_options.UIAutomatorOptions(
+      options.tool,
+      options.cleanup_test_files,
+      options.push_deps,
+      options.annotations,
+      options.exclude_annotations,
+      options.test_filter,
+      options.test_data,
+      options.save_perf_json,
+      options.screenshot_failures,
+      options.uiautomator_jar,
+      options.uiautomator_info_jar,
+      options.package)
+
+
+def AddJUnitTestOptions(option_parser):
+  """Adds junit test options to |option_parser|."""
+  option_parser.usage = '%prog junit -s [test suite name]'
+  option_parser.commands_dict = {}
+
+  option_parser.add_option(
+      '-s', '--test-suite', dest='test_suite',
+      help=('JUnit test suite to run.'))
+  option_parser.add_option(
+      '-f', '--test-filter', dest='test_filter',
+      help='Filters tests googletest-style.')
+  option_parser.add_option(
+      '--package-filter', dest='package_filter',
+      help='Filters tests by package.')
+  option_parser.add_option(
+      '--runner-filter', dest='runner_filter',
+      help='Filters tests by runner class. Must be fully qualified.')
+  option_parser.add_option(
+      '--sdk-version', dest='sdk_version', type="int",
+      help='The Android SDK version.')
+  AddCommonOptions(option_parser)
+
+
+def ProcessJUnitTestOptions(options, error_func):
+  """Processes all JUnit test options."""
+  if not options.test_suite:
+    error_func('No test suite specified.')
+  return options
+
+
+def AddMonkeyTestOptions(option_parser):
+  """Adds monkey test options to |option_parser|."""
+
+  option_parser.usage = '%prog monkey [options]'
+  option_parser.commands_dict = {}
+  option_parser.example = (
+      '%prog monkey --package=chrome_shell')
+
+  option_parser.add_option(
+      '--package',
+      help=('Package under test. Possible values: %s' %
+            constants.PACKAGE_INFO.keys()))
+  option_parser.add_option(
+      '--event-count', default=10000, type='int',
+      help='Number of events to generate [default: %default].')
+  option_parser.add_option(
+      '--category', default='',
+      help='A list of allowed categories.')
+  option_parser.add_option(
+      '--throttle', default=100, type='int',
+      help='Delay between events (ms) [default: %default]. ')
+  option_parser.add_option(
+      '--seed', type='int',
+      help=('Seed value for pseudo-random generator. Same seed value generates '
+            'the same sequence of events. Seed is randomized by default.'))
+  option_parser.add_option(
+      '--extra-args', default='',
+      help=('String of other args to pass to the command verbatim '
+            '[default: "%default"].'))
+
+  AddCommonOptions(option_parser)
+
+
+def ProcessMonkeyTestOptions(options, error_func):
+  """Processes all monkey test options.
+
+  Args:
+    options: optparse.Options object.
+    error_func: Function to call with the error message in case of an error.
+
+  Returns:
+    A MonkeyOptions named tuple which contains all options relevant to
+    monkey tests.
+  """
+  if not options.package:
+    error_func('--package is required.')
+
+  if options.package not in constants.PACKAGE_INFO:
+    error_func('Invalid package.')
+
+  category = options.category
+  if category:
+    category = options.category.split(',')
+
+  return monkey_test_options.MonkeyOptions(
+      options.verbose_count,
+      options.package,
+      options.event_count,
+      category,
+      options.throttle,
+      options.seed,
+      options.extra_args)
+
+
+def AddPerfTestOptions(option_parser):
+  """Adds perf test options to |option_parser|."""
+
+  option_parser.usage = '%prog perf [options]'
+  option_parser.commands_dict = {}
+  option_parser.example = ('%prog perf '
+                           '[--single-step -- command args] or '
+                           '[--steps perf_steps.json] or '
+                           '[--print-step step]')
+
+  option_parser.add_option(
+      '--single-step',
+      action='store_true',
+      help='Execute the given command with retries, but only print the result '
+           'for the "most successful" round.')
+  option_parser.add_option(
+      '--steps',
+      help='JSON file containing the list of commands to run.')
+  option_parser.add_option(
+      '--flaky-steps',
+      help=('A JSON file containing steps that are flaky '
+            'and will have its exit code ignored.'))
+  option_parser.add_option(
+      '--output-json-list',
+      help='Write a simple list of names from --steps into the given file.')
+  option_parser.add_option(
+      '--print-step',
+      help='The name of a previously executed perf step to print.')
+  option_parser.add_option(
+      '--no-timeout', action='store_true',
+      help=('Do not impose a timeout. Each perf step is responsible for '
+            'implementing the timeout logic.'))
+  option_parser.add_option(
+      '-f', '--test-filter',
+      help=('Test filter (will match against the names listed in --steps).'))
+  option_parser.add_option(
+      '--dry-run',
+      action='store_true',
+      help='Just print the steps without executing.')
+  AddCommonOptions(option_parser)
+
+
+def ProcessPerfTestOptions(options, args, error_func):
+  """Processes all perf test options.
+
+  Args:
+    options: optparse.Options object.
+    error_func: Function to call with the error message in case of an error.
+
+  Returns:
+    A PerfOptions named tuple which contains all options relevant to
+    perf tests.
+  """
+  # Only one of steps, print_step or single_step must be provided.
+  count = len(filter(None,
+                     [options.steps, options.print_step, options.single_step]))
+  if count != 1:
+    error_func('Please specify one of: --steps, --print-step, --single-step.')
+  single_step = None
+  if options.single_step:
+    single_step = ' '.join(args[2:])
+  return perf_test_options.PerfOptions(
+      options.steps, options.flaky_steps, options.output_json_list,
+      options.print_step, options.no_timeout, options.test_filter,
+      options.dry_run, single_step)
+
+
+def _RunGTests(options, devices):
+  """Subcommand of RunTestsCommands which runs gtests."""
+  ProcessGTestOptions(options)
+
+  exit_code = 0
+  for suite_name in options.suite_name:
+    # TODO(gkanwar): Move this into ProcessGTestOptions once we require -s for
+    # the gtest command.
+    gtest_options = gtest_test_options.GTestOptions(
+        options.tool,
+        options.cleanup_test_files,
+        options.push_deps,
+        options.test_filter,
+        options.run_disabled,
+        options.test_arguments,
+        options.timeout,
+        options.isolate_file_path,
+        suite_name)
+    runner_factory, tests = gtest_setup.Setup(gtest_options, devices)
+
+    results, test_exit_code = test_dispatcher.RunTests(
+        tests, runner_factory, devices, shard=True, test_timeout=None,
+        num_retries=options.num_retries)
+
+    if test_exit_code and exit_code != constants.ERROR_EXIT_CODE:
+      exit_code = test_exit_code
+
+    report_results.LogFull(
+        results=results,
+        test_type='Unit test',
+        test_package=suite_name,
+        flakiness_server=options.flakiness_dashboard_server)
+
+  if os.path.isdir(constants.ISOLATE_DEPS_DIR):
+    shutil.rmtree(constants.ISOLATE_DEPS_DIR)
+
+  return exit_code
+
+
+def _RunLinkerTests(options, devices):
+  """Subcommand of RunTestsCommands which runs linker tests."""
+  runner_factory, tests = linker_setup.Setup(options, devices)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=True, test_timeout=60,
+      num_retries=options.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Linker test',
+      test_package='ChromiumLinkerTest')
+
+  return exit_code
+
+
+def _RunInstrumentationTests(options, error_func, devices):
+  """Subcommand of RunTestsCommands which runs instrumentation tests."""
+  instrumentation_options = ProcessInstrumentationOptions(options, error_func)
+
+  if len(devices) > 1 and options.wait_for_debugger:
+    logging.warning('Debugger can not be sharded, using first available device')
+    devices = devices[:1]
+
+  results = base_test_result.TestRunResults()
+  exit_code = 0
+
+  if options.run_java_tests:
+    runner_factory, tests = instrumentation_setup.Setup(instrumentation_options)
+
+    test_results, exit_code = test_dispatcher.RunTests(
+        tests, runner_factory, devices, shard=True, test_timeout=None,
+        num_retries=options.num_retries)
+
+    results.AddTestRunResults(test_results)
+
+  if options.run_python_tests:
+    runner_factory, tests = host_driven_setup.InstrumentationSetup(
+        options.host_driven_root, options.official_build,
+        instrumentation_options)
+
+    if tests:
+      test_results, test_exit_code = test_dispatcher.RunTests(
+          tests, runner_factory, devices, shard=True, test_timeout=None,
+          num_retries=options.num_retries)
+
+      results.AddTestRunResults(test_results)
+
+      # Only allow exit code escalation
+      if test_exit_code and exit_code != constants.ERROR_EXIT_CODE:
+        exit_code = test_exit_code
+
+  if options.device_flags:
+    options.device_flags = os.path.join(constants.DIR_SOURCE_ROOT,
+                                        options.device_flags)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Instrumentation',
+      test_package=os.path.basename(options.test_apk),
+      annotation=options.annotations,
+      flakiness_server=options.flakiness_dashboard_server)
+
+  return exit_code
+
+
+def _RunUIAutomatorTests(options, error_func, devices):
+  """Subcommand of RunTestsCommands which runs uiautomator tests."""
+  uiautomator_options = ProcessUIAutomatorOptions(options, error_func)
+
+  runner_factory, tests = uiautomator_setup.Setup(uiautomator_options)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=True, test_timeout=None,
+      num_retries=options.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='UIAutomator',
+      test_package=os.path.basename(options.test_jar),
+      annotation=options.annotations,
+      flakiness_server=options.flakiness_dashboard_server)
+
+  return exit_code
+
+
+def _RunJUnitTests(options, error_func):
+  """Subcommand of RunTestsCommand which runs junit tests."""
+  junit_options = ProcessJUnitTestOptions(options, error_func)
+  runner_factory, tests = junit_setup.Setup(junit_options)
+  _, exit_code = junit_dispatcher.RunTests(tests, runner_factory)
+
+  return exit_code
+
+
+def _RunMonkeyTests(options, error_func, devices):
+  """Subcommand of RunTestsCommands which runs monkey tests."""
+  monkey_options = ProcessMonkeyTestOptions(options, error_func)
+
+  runner_factory, tests = monkey_setup.Setup(monkey_options)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=False, test_timeout=None,
+      num_retries=options.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Monkey',
+      test_package='Monkey')
+
+  return exit_code
+
+
+def _RunPerfTests(options, args, error_func):
+  """Subcommand of RunTestsCommands which runs perf tests."""
+  perf_options = ProcessPerfTestOptions(options, args, error_func)
+
+  # Just save a simple json with a list of test names.
+  if perf_options.output_json_list:
+    return perf_test_runner.OutputJsonList(
+        perf_options.steps, perf_options.output_json_list)
+
+  # Just print the results from a single previously executed step.
+  if perf_options.print_step:
+    return perf_test_runner.PrintTestOutput(perf_options.print_step)
+
+  runner_factory, tests, devices = perf_setup.Setup(perf_options)
+
+  # shard=False means that each device will get the full list of tests
+  # and then each one will decide their own affinity.
+  # shard=True means each device will pop the next test available from a queue,
+  # which increases throughput but have no affinity.
+  results, _ = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=False, test_timeout=None,
+      num_retries=options.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Perf',
+      test_package='Perf')
+
+  if perf_options.single_step:
+    return perf_test_runner.PrintTestOutput('single_step')
+
+  perf_test_runner.PrintSummary(tests)
+
+  # Always return 0 on the sharding stage. Individual tests exit_code
+  # will be returned on the print_step stage.
+  return 0
+
+
+def _GetAttachedDevices(test_device=None):
+  """Get all attached devices.
+
+  Args:
+    test_device: Name of a specific device to use.
+
+  Returns:
+    A list of attached devices.
+  """
+  attached_devices = []
+
+  attached_devices = android_commands.GetAttachedDevices()
+  if test_device:
+    assert test_device in attached_devices, (
+        'Did not find device %s among attached device. Attached devices: %s'
+        % (test_device, ', '.join(attached_devices)))
+    attached_devices = [test_device]
+
+  assert attached_devices, 'No devices attached.'
+
+  return sorted(attached_devices)
+
+
+def RunTestsCommand(command, options, args, option_parser):
+  """Checks test type and dispatches to the appropriate function.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    Integer indicated exit code.
+
+  Raises:
+    Exception: Unknown command name passed in, or an exception from an
+        individual test runner.
+  """
+
+  # Check for extra arguments
+  if len(args) > 2 and command != 'perf':
+    option_parser.error('Unrecognized arguments: %s' % (' '.join(args[2:])))
+    return constants.ERROR_EXIT_CODE
+  if command == 'perf':
+    if ((options.single_step and len(args) <= 2) or
+        (not options.single_step and len(args) > 2)):
+      option_parser.error('Unrecognized arguments: %s' % (' '.join(args)))
+      return constants.ERROR_EXIT_CODE
+
+  ProcessCommonOptions(options)
+
+  devices = _GetAttachedDevices(options.test_device)
+
+  forwarder.Forwarder.RemoveHostLog()
+  if not ports.ResetTestServerPortAllocation():
+    raise Exception('Failed to reset test server port.')
+
+  if command == 'gtest':
+    return _RunGTests(options, devices)
+  elif command == 'linker':
+    return _RunLinkerTests(options, devices)
+  elif command == 'instrumentation':
+    return _RunInstrumentationTests(options, option_parser.error, devices)
+  elif command == 'uiautomator':
+    return _RunUIAutomatorTests(options, option_parser.error, devices)
+  elif command == 'junit':
+    return _RunJUnitTests(options, option_parser.error)
+  elif command == 'monkey':
+    return _RunMonkeyTests(options, option_parser.error, devices)
+  elif command == 'perf':
+    return _RunPerfTests(options, args, option_parser.error)
+  else:
+    raise Exception('Unknown test type.')
+
+
+def HelpCommand(command, _options, args, option_parser):
+  """Display help for a certain command, or overall help.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary. unused.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    Integer indicated exit code.
+  """
+  # If we don't have any args, display overall help
+  if len(args) < 3:
+    option_parser.print_help()
+    return 0
+  # If we have too many args, print an error
+  if len(args) > 3:
+    option_parser.error('Unrecognized arguments: %s' % (' '.join(args[3:])))
+    return constants.ERROR_EXIT_CODE
+
+  command = args[2]
+
+  if command not in VALID_COMMANDS:
+    option_parser.error('Unrecognized command.')
+
+  # Treat the help command as a special case. We don't care about showing a
+  # specific help page for itself.
+  if command == 'help':
+    option_parser.print_help()
+    return 0
+
+  VALID_COMMANDS[command].add_options_func(option_parser)
+  option_parser.usage = '%prog ' + command + ' [options]'
+  option_parser.commands_dict = {}
+  option_parser.print_help()
+
+  return 0
+
+
+# Define a named tuple for the values in the VALID_COMMANDS dictionary so the
+# syntax is a bit prettier. The tuple is two functions: (add options, run
+# command).
+CommandFunctionTuple = collections.namedtuple(
+    'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+    'gtest': CommandFunctionTuple(AddGTestOptions, RunTestsCommand),
+    'instrumentation': CommandFunctionTuple(
+        AddInstrumentationTestOptions, RunTestsCommand),
+    'uiautomator': CommandFunctionTuple(
+        AddUIAutomatorTestOptions, RunTestsCommand),
+    'junit': CommandFunctionTuple(
+        AddJUnitTestOptions, RunTestsCommand),
+    'monkey': CommandFunctionTuple(
+        AddMonkeyTestOptions, RunTestsCommand),
+    'perf': CommandFunctionTuple(
+        AddPerfTestOptions, RunTestsCommand),
+    'linker': CommandFunctionTuple(
+        AddLinkerTestOptions, RunTestsCommand),
+    'help': CommandFunctionTuple(lambda option_parser: None, HelpCommand)
+    }
+
+
+def DumpThreadStacks(_signal, _frame):
+  for thread in threading.enumerate():
+    reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+  signal.signal(signal.SIGUSR1, DumpThreadStacks)
+  option_parser = command_option_parser.CommandOptionParser(
+      commands_dict=VALID_COMMANDS)
+  return command_option_parser.ParseAndExecute(option_parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/tests/multiple_proguards/AndroidManifest.xml b/build/android/tests/multiple_proguards/AndroidManifest.xml
new file mode 100644
index 0000000..1794712
--- /dev/null
+++ b/build/android/tests/multiple_proguards/AndroidManifest.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  - Copyright 2013 Google Inc.
+  -
+  - Licensed under the Apache License, Version 2.0 (the "License"); you may not
+  - use this file except in compliance with the License. You may obtain a copy
+  - of the License at
+  -
+  - http://www.apache.org/licenses/LICENSE-2.0
+  -
+  - Unless required by applicable law or agreed to in writing, software
+  - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+  - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+  - License for the specific language governing permissions and limitations
+  - under the License.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+      package="dummy"
+      android:versionCode="1"
+      android:versionName="1.0">
+    <application android:label="dummy">
+        <activity android:name="dummy.DummyActivity"
+                  android:label="dummy">
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+</manifest>
diff --git a/build/android/tests/multiple_proguards/multiple_proguards.gyp b/build/android/tests/multiple_proguards/multiple_proguards.gyp
new file mode 100644
index 0000000..48a5d7b
--- /dev/null
+++ b/build/android/tests/multiple_proguards/multiple_proguards.gyp
@@ -0,0 +1,34 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'variables': {
+    'chromium_code': 1,
+  },
+  'targets': [
+    {
+      'target_name': 'multiple_proguards_test_apk',
+      'type': 'none',
+      'variables': {
+        'app_manifest_version_name%': '<(android_app_version_name)',
+        'java_in_dir': '.',
+        'proguard_enabled': 'true',
+        'proguard_flags_paths': [
+          # Both these proguard?.flags files need to be part of the build to
+          # remove both warnings from the src/dummy/DummyActivity.java file, else the
+          # build will fail.
+          'proguard1.flags',
+          'proguard2.flags',
+        ],
+        'R_package': 'dummy',
+        'R_package_relpath': 'dummy',
+        'apk_name': 'MultipleProguards',
+        # This is a build-only test. There's nothing to install.
+        'gyp_managed_install': 0,
+        # The Java code produces warnings, so force the build to not show them.
+        'chromium_code': 0,
+      },
+      'includes': [ '../../../../build/java_apk.gypi' ],
+    },
+  ],
+}
diff --git a/build/android/tests/multiple_proguards/proguard1.flags b/build/android/tests/multiple_proguards/proguard1.flags
new file mode 100644
index 0000000..95a814c
--- /dev/null
+++ b/build/android/tests/multiple_proguards/proguard1.flags
@@ -0,0 +1 @@
+-dontwarn sun.misc.Unsafe
diff --git a/build/android/tests/multiple_proguards/proguard2.flags b/build/android/tests/multiple_proguards/proguard2.flags
new file mode 100644
index 0000000..ceac62b
--- /dev/null
+++ b/build/android/tests/multiple_proguards/proguard2.flags
@@ -0,0 +1 @@
+-dontwarn sun.reflect.Reflection
diff --git a/build/android/tests/multiple_proguards/src/dummy/DummyActivity.java b/build/android/tests/multiple_proguards/src/dummy/DummyActivity.java
new file mode 100644
index 0000000..72f20f4
--- /dev/null
+++ b/build/android/tests/multiple_proguards/src/dummy/DummyActivity.java
@@ -0,0 +1,26 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package dummy;
+
+import android.app.Activity;
+
+/**
+ * Dummy activity to build apk.
+ *
+ * This class is created to ensure that proguard will produce two separate warnings.
+ */
+public class DummyActivity extends Activity {
+    private static void doBadThings1() {
+        try {
+            sun.misc.Unsafe.getUnsafe();
+        } catch (Exception e) {
+            throw new Error(e);
+        }
+    }
+
+    private static void doBadThings2() {
+        sun.reflect.Reflection.getCallerClass(2);
+  }
+}
diff --git a/build/android/tests/multiple_proguards/src/dummy/NativeLibraries.java b/build/android/tests/multiple_proguards/src/dummy/NativeLibraries.java
new file mode 100644
index 0000000..56cd734
--- /dev/null
+++ b/build/android/tests/multiple_proguards/src/dummy/NativeLibraries.java
@@ -0,0 +1,17 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.base.library_loader;
+
+/**
+ * This is a complete dummy, required because base now requires a version of
+ * NativeLibraries to build, but doesn't include it in its jar file.
+ */
+public class NativeLibraries {
+    public static boolean USE_LINKER = false;
+    public static boolean USE_LIBRARY_IN_ZIP_FILE = false;
+    public static boolean ENABLE_LINKER_TESTS = false;
+    static final String[] LIBRARIES = {};
+    static String VERSION_NUMBER = "";
+}
diff --git a/build/android/tests/symbolize/Makefile b/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000..5178a04
--- /dev/null
+++ b/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+	$(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/build/android/tests/symbolize/a.cc b/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000..f0c7ca4
--- /dev/null
+++ b/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+  A();
+  void Foo(int i);
+  void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/build/android/tests/symbolize/b.cc b/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000..db87520
--- /dev/null
+++ b/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+  B();
+  void Baz(float f);
+  void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/build/android/tests/symbolize/liba.so b/build/android/tests/symbolize/liba.so
new file mode 100644
index 0000000..79cb739
--- /dev/null
+++ b/build/android/tests/symbolize/liba.so
Binary files differ
diff --git a/build/android/tests/symbolize/libb.so b/build/android/tests/symbolize/libb.so
new file mode 100644
index 0000000..7cf01d4
--- /dev/null
+++ b/build/android/tests/symbolize/libb.so
Binary files differ
diff --git a/build/android/tombstones.py b/build/android/tombstones.py
new file mode 100755
index 0000000..fd060ad
--- /dev/null
+++ b/build/android/tombstones.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import datetime
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import optparse
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+def _ListTombstones(device):
+  """List the tombstone files on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Yields:
+    Tuples of (tombstone filename, date time of file on device).
+  """
+  lines = device.RunShellCommand('TZ=UTC su -c ls -a -l /data/tombstones')
+  for line in lines:
+    if 'tombstone' in line and not 'No such file or directory' in line:
+      details = line.split()
+      t = datetime.datetime.strptime(details[-3] + ' ' + details[-2],
+                                     '%Y-%m-%d %H:%M')
+      yield details[-1], t
+
+
+def _GetDeviceDateTime(device):
+  """Determine the date time on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Returns:
+    A datetime instance.
+  """
+  device_now_string = device.RunShellCommand('TZ=UTC date')
+  return datetime.datetime.strptime(
+      device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+  """Retrieve the tombstone data from the device
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to retrieve
+
+  Returns:
+    A list of lines
+  """
+  return device.ReadFile('/data/tombstones/' + tombstone_file, as_root=True)
+
+
+def _EraseTombstone(device, tombstone_file):
+  """Deletes a tombstone from the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to delete.
+  """
+  return device.RunShellCommand(
+      'rm /data/tombstones/' + tombstone_file, as_root=True)
+
+
+def _DeviceAbiToArch(device_abi):
+  # The order of this list is significant to find the more specific match (e.g.,
+  # arm64) before the less specific (e.g., arm).
+  arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+  for arch in arches:
+    if arch in device_abi:
+      return arch
+  raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+def _ResolveSymbols(tombstone_data, include_stack, device_abi):
+  """Run the stack tool for given tombstone input.
+
+  Args:
+    tombstone_data: a list of strings of tombstone data.
+    include_stack: boolean whether to include stack data in output.
+    device_abi: the default ABI of the device which generated the tombstone.
+
+  Yields:
+    A string for each line of resolved stack output.
+  """
+  # Check if the tombstone data has an ABI listed, if so use this in preference
+  # to the device's default ABI.
+  for line in tombstone_data:
+    found_abi = re.search('ABI: \'(.+?)\'', line)
+    if found_abi:
+      device_abi = found_abi.group(1)
+  arch = _DeviceAbiToArch(device_abi)
+  if not arch:
+    return
+
+  stack_tool = os.path.join(os.path.dirname(__file__), '..', '..',
+                            'third_party', 'android_platform', 'development',
+                            'scripts', 'stack')
+  proc = subprocess.Popen([stack_tool, '--arch', arch], stdin=subprocess.PIPE,
+                          stdout=subprocess.PIPE)
+  output = proc.communicate(input='\n'.join(tombstone_data))[0]
+  for line in output.split('\n'):
+    if not include_stack and 'Stack Data:' in line:
+      break
+    yield line
+
+
+def _ResolveTombstone(tombstone):
+  lines = []
+  lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+            ', about this long ago: ' +
+            (str(tombstone['device_now'] - tombstone['time']) +
+            ' Device: ' + tombstone['serial'])]
+  print '\n'.join(lines)
+  print 'Resolving...'
+  lines += _ResolveSymbols(tombstone['data'], tombstone['stack'],
+                           tombstone['device_abi'])
+  return lines
+
+
+def _ResolveTombstones(jobs, tombstones):
+  """Resolve a list of tombstones.
+
+  Args:
+    jobs: the number of jobs to use with multiprocess.
+    tombstones: a list of tombstones.
+  """
+  if not tombstones:
+    print 'No device attached?  Or no tombstones?'
+    return
+  if len(tombstones) == 1:
+    data = _ResolveTombstone(tombstones[0])
+  else:
+    pool = multiprocessing.Pool(processes=jobs)
+    data = pool.map(_ResolveTombstone, tombstones)
+    data = ['\n'.join(d) for d in data]
+  print '\n'.join(data)
+
+
+def _GetTombstonesForDevice(device, options):
+  """Returns a list of tombstones on a given device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    options: command line arguments from OptParse
+  """
+  ret = []
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    print 'No device attached?  Or no tombstones?'
+    return ret
+
+  # Sort the tombstones in date order, descending
+  all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+  # Only resolve the most recent unless --all-tombstones given.
+  tombstones = all_tombstones if options.all_tombstones else [all_tombstones[0]]
+
+  device_now = _GetDeviceDateTime(device)
+  for tombstone_file, tombstone_time in tombstones:
+    ret += [{'serial': str(device),
+             'device_abi': device.GetProp('ro.product.cpu.abi'),
+             'device_now': device_now,
+             'time': tombstone_time,
+             'file': tombstone_file,
+             'stack': options.stack,
+             'data': _GetTombstoneData(device, tombstone_file)}]
+
+  # Erase all the tombstones if desired.
+  if options.wipe_tombstones:
+    for tombstone_file, _ in all_tombstones:
+      _EraseTombstone(device, tombstone_file)
+
+  return ret
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--device',
+                    help='The serial number of the device. If not specified '
+                         'will use all devices.')
+  parser.add_option('-a', '--all-tombstones', action='store_true',
+                    help="""Resolve symbols for all tombstones, rather than just
+                         the most recent""")
+  parser.add_option('-s', '--stack', action='store_true',
+                    help='Also include symbols for stack data')
+  parser.add_option('-w', '--wipe-tombstones', action='store_true',
+                    help='Erase all tombstones from device after processing')
+  parser.add_option('-j', '--jobs', type='int',
+                    default=4,
+                    help='Number of jobs to use when processing multiple '
+                         'crash stacks.')
+  options, _ = parser.parse_args()
+
+  if options.device:
+    devices = [options.device]
+  else:
+    devices = android_commands.GetAttachedDevices()
+
+  tombstones = []
+  for device_serial in devices:
+    device = device_utils.DeviceUtils(device_serial)
+    tombstones += _GetTombstonesForDevice(device, options)
+
+  _ResolveTombstones(options.jobs, tombstones)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/update_verification.py b/build/android/update_verification.py
new file mode 100755
index 0000000..fe89567
--- /dev/null
+++ b/build/android/update_verification.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device."""
+import logging
+import optparse
+import os
+import shutil
+import sys
+import time
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+def _SaveAppData(device, package_name, from_apk=None, data_dir=None):
+  def _BackupAppData(data_dir=None):
+    device.old_interface.Adb().SendCommand('backup %s' % package_name)
+    backup_file = os.path.join(os.getcwd(), 'backup.ab')
+    assert os.path.exists(backup_file), 'Backup failed.'
+    if data_dir:
+      if not os.path.isdir(data_dir):
+        os.makedirs(data_dir)
+      shutil.move(backup_file, data_dir)
+      backup_file = os.path.join(data_dir, 'backup.ab')
+    print 'Application data saved to %s' % backup_file
+
+  if from_apk:
+    logging.info('Installing %s...', from_apk)
+    # TODO(jbudorick) Switch to AdbWrapper.Install on the impl switch.
+    output = device.old_interface.Install(from_apk, reinstall=True)
+    if 'Success' not in output:
+      raise Exception('Unable to install %s. output: %s' % (from_apk, output))
+
+  raw_input('Set the application state. Once ready, press enter and '
+            'select "Backup my data" on the device.')
+  _BackupAppData(data_dir)
+
+
+def _VerifyAppUpdate(device, to_apk, app_data, from_apk=None):
+  def _RestoreAppData():
+    assert os.path.exists(app_data), 'Backup file does not exist!'
+    device.old_interface.Adb().SendCommand('restore %s' % app_data)
+    # It seems restore command is not synchronous.
+    time.sleep(15)
+
+  if from_apk:
+    logging.info('Installing %s...', from_apk)
+    # TODO(jbudorick) Switch to AdbWrapper.Install on the impl switch.
+    output = device.old_interface.Install(from_apk, reinstall=True)
+    if 'Success' not in output:
+      raise Exception('Unable to install %s. output: %s' % (from_apk, output))
+
+  logging.info('Restoring the application data...')
+  raw_input('Press enter and select "Restore my data" on the device.')
+  _RestoreAppData()
+
+  logging.info('Verifying that %s cannot be installed side-by-side...',
+               to_apk)
+  # TODO(jbudorick) Switch to AdbWrapper.Install on the impl switch.
+  output = device.old_interface.Install(to_apk)
+  if 'INSTALL_FAILED_ALREADY_EXISTS' not in output:
+    if 'Success' in output:
+      raise Exception('Package name has changed! output: %s' % output)
+    else:
+      raise Exception(output)
+
+  logging.info('Verifying that %s can be overinstalled...', to_apk)
+  # TODO(jbudorick) Switch to AdbWrapper.Install on the impl switch.
+  output = device.old_interface.Install(to_apk, reinstall=True)
+  if 'Success' not in output:
+    raise Exception('Unable to install %s.\n output: %s' % (to_apk, output))
+  logging.info('Successfully updated to the new apk. Please verify that the '
+               'the application data is preserved.')
+
+
+def main():
+  logger = logging.getLogger()
+  logger.setLevel(logging.DEBUG)
+  desc = (
+      'Performs semi-automated application update verification testing. '
+      'When given --save, it takes a snapshot of the application data '
+      'on the device. (A dialog on the device will prompt the user to grant '
+      'permission to backup the data.) Otherwise, it performs the update '
+      'testing as follows: '
+      '1. Installs the |from-apk| (optional). '
+      '2. Restores the previously stored snapshot of application data '
+      'given by |app-data| '
+      '(A dialog on the device will prompt the user to grant permission to '
+      'restore the data.) '
+      '3. Verifies that |to-apk| cannot be installed side-by-side. '
+      '4. Verifies that |to-apk| can replace |from-apk|.')
+  parser = optparse.OptionParser(description=desc)
+  parser.add_option('--package-name', help='Package name for the application.')
+  parser.add_option('--save', action='store_true',
+                    help=('Save a snapshot of application data. '
+                          'This will be saved as backup.db in the '
+                          'current directory if |app-data| directory '
+                          'is not specifid.'))
+  parser.add_option('--from-apk',
+                    help=('APK to update from. This is optional if you already '
+                          'have the app installed.'))
+  parser.add_option('--to-apk', help='APK to update to.')
+  parser.add_option('--app-data',
+                    help=('Path to the application data to be restored or the '
+                          'directory where the data should be saved.'))
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.print_help(sys.stderr)
+    parser.error('Unknown arguments: %s.' % args)
+
+  devices = android_commands.GetAttachedDevices()
+  if len(devices) != 1:
+    parser.error('Exactly 1 device must be attached.')
+  device = device_utils.DeviceUtils(devices[0])
+
+  if options.from_apk:
+    assert os.path.isfile(options.from_apk)
+
+  if options.save:
+    if not options.package_name:
+      parser.print_help(sys.stderr)
+      parser.error('Missing --package-name.')
+    _SaveAppData(device, options.package_name, from_apk=options.from_apk,
+                 data_dir=options.app_data)
+  else:
+    if not options.to_apk or not options.app_data:
+      parser.print_help(sys.stderr)
+      parser.error('Missing --to-apk or --app-data.')
+    assert os.path.isfile(options.to_apk)
+    assert os.path.isfile(options.app_data)
+    _VerifyAppUpdate(device, options.to_apk, options.app_data,
+                     from_apk=options.from_apk)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/write_ordered_libraries.gypi b/build/android/write_ordered_libraries.gypi
new file mode 100644
index 0000000..1b52e71
--- /dev/null
+++ b/build/android/write_ordered_libraries.gypi
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# generates a json file with the list of dependent libraries needed for a given
+# shared library or executable.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'actions': [
+#      'variables': {
+#        'input_libraries': 'shared library or executable to process',
+#        'ordered_libraries_file': 'file to generate'
+#      },
+#      'includes': [ '../../build/android/write_ordered_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'action_name': 'ordered_libraries_<(_target_name)<(subtarget)',
+  'message': 'Writing dependency ordered libraries for <(_target_name)',
+  'variables': {
+    'input_libraries%': [],
+    'subtarget%': '',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/write_ordered_libraries.py',
+    '<@(input_libraries)',
+  ],
+  'outputs': [
+    '<(ordered_libraries_file)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/write_ordered_libraries.py',
+    '--input-libraries=<(input_libraries)',
+    '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
+    '--readelf=<(android_readelf)',
+    '--output=<(ordered_libraries_file)',
+  ],
+}
diff --git a/build/apk_fake_jar.gypi b/build/apk_fake_jar.gypi
new file mode 100644
index 0000000..128b84c
--- /dev/null
+++ b/build/apk_fake_jar.gypi
@@ -0,0 +1,15 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+
+{
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['>(apk_output_jar_path)'],
+      'library_dexed_jars_paths': ['>(apk_output_jar_path)'],
+    },
+  },
+}
diff --git a/build/apk_test.gypi b/build/apk_test.gypi
new file mode 100644
index 0000000..95cce37
--- /dev/null
+++ b/build/apk_test.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK based test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_suite_name': 'test_suite_name',  # string
+#     'input_jars_paths': ['/path/to/test_suite.jar', ... ],  # list
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:base_java',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'conditions': [
+     ['OS == "android"', {
+       'variables': {
+         # These are used to configure java_apk.gypi included below.
+         'apk_name': '<(test_suite_name)',
+         'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+         'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk',
+         'java_in_dir': '<(DEPTH)/testing/android/java',
+         'native_lib_target': 'lib<(test_suite_name)',
+         # TODO(yfriedman, cjhopman): Support managed installs for gtests.
+         'gyp_managed_install': 0,
+       },
+       'includes': [ 'java_apk.gypi' ],
+     }],  # 'OS == "android"
+  ],  # conditions
+}
diff --git a/build/apply_locales.py b/build/apply_locales.py
new file mode 100755
index 0000000..6af7280
--- /dev/null
+++ b/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+  parser = optparse.OptionParser()
+  usage = 'usage: %s [options ...] format_string locale_list'
+  parser.set_usage(usage.replace('%s', '%prog'))
+  parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+                    default=False,
+                    help='map "en-US" to "en" and "-" to "_" in locales')
+
+  (options, arglist) = parser.parse_args(argv)
+
+  if len(arglist) < 3:
+    print 'ERROR: need string and list of locales'
+    return 1
+
+  str_template = arglist[1]
+  locales = arglist[2:]
+
+  results = []
+  for locale in locales:
+    # For Cocoa to find the locale at runtime, it needs to use '_' instead
+    # of '-' (http://crbug.com/20441).  Also, 'en-US' should be represented
+    # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+    if options.dash_to_underscore:
+      if locale == 'en-US':
+        locale = 'en'
+      locale = locale.replace('-', '_')
+    results.append(str_template.replace('ZZLOCALE', locale))
+
+  # Quote each element so filename spaces don't mess up GYP's attempt to parse
+  # it into a list.
+  print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/asan.saves b/build/asan.saves
new file mode 100644
index 0000000..0c4e4ed
--- /dev/null
+++ b/build/asan.saves
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file lists symbols that should not be stripped by Xcode from the binaries
+# built for Mac OS X using AddressSanitizer
+# (http://dev.chromium.org/developers/testing/addresssanitizer).
+
+___asan_init
+___asan_handle_no_return
+___asan_register_global
+___asan_register_globals
+___asan_unregister_globals
+___asan_report_load1
+___asan_report_load2
+___asan_report_load4
+___asan_report_load8
+___asan_report_load16
+___asan_report_store1
+___asan_report_store2
+___asan_report_store4
+___asan_report_store8
+___asan_report_store16
diff --git a/build/branding_value.sh b/build/branding_value.sh
new file mode 100755
index 0000000..9fcb550
--- /dev/null
+++ b/build/branding_value.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a wrapper for fetching values from the BRANDING files.  Pass the
+# value of GYP's branding variable followed by the key you want and the right
+# file is checked.
+#
+#  branding_value.sh Chromium COPYRIGHT
+#  branding_value.sh Chromium PRODUCT_FULLNAME
+#
+
+set -e
+
+if [ $# -ne 2 ] ;  then
+  echo "error: expect two arguments, branding and key" >&2
+  exit 1
+fi
+
+BUILD_BRANDING=$1
+THE_KEY=$2
+
+pushd $(dirname "${0}") > /dev/null
+BUILD_DIR=$(pwd)
+popd > /dev/null
+
+TOP="${BUILD_DIR}/.."
+
+case ${BUILD_BRANDING} in
+  Chromium)
+    BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
+    ;;
+  Chrome)
+    BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
+    ;;
+  *)
+    echo "error: unknown branding: ${BUILD_BRANDING}" >&2
+    exit 1
+    ;;
+esac
+
+BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
+
+if [ -z "${BRANDING_VALUE}" ] ; then
+  echo "error: failed to find key '${THE_KEY}'" >&2
+  exit 1
+fi
+
+echo "${BRANDING_VALUE}"
diff --git a/build/build-ctags.sh b/build/build-ctags.sh
new file mode 100755
index 0000000..825da27
--- /dev/null
+++ b/build/build-ctags.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
+  cat <<EOF
+  You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
+  Debian or a related flavor of Linux, you may want to try running
+  apt-get install exuberant-ctags.
+EOF
+  exit
+fi
+
+CHROME_SRC_DIR="$PWD"
+
+fail() {
+  echo "Failed to create ctags for $1"
+  exit 1
+}
+
+ctags_cmd() {
+  echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
+}
+
+build_dir() {
+  local extraexcludes=""
+  if [[ a"$1" == "a--extra-excludes" ]]; then
+    extraexcludes="--exclude=third_party --exclude=build --exclude=out"
+    shift
+  fi
+
+  cd "$CHROME_SRC_DIR/$1" || fail $1
+  # Redirect error messages so they aren't seen because they are almost always
+  # errors about components that you just happen to have not built (NaCl, for
+  # example).
+  $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
+  mv -f .tmp_tags .tags
+}
+
+# We always build the top level but leave all submodules as optional.
+build_dir --extra-excludes "" "top level"
+
+# Build any other directies that are listed on the command line.
+for dir in $@; do
+  build_dir "$1"
+  shift
+done
diff --git a/build/build_config.h b/build/build_config.h
new file mode 100644
index 0000000..5534846
--- /dev/null
+++ b/build/build_config.h
@@ -0,0 +1,159 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+//  Operating System:
+//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) / OS_NACL
+//  Compiler:
+//    COMPILER_MSVC / COMPILER_GCC
+//  Processor:
+//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+// A set of macros to use for platform detection.
+#if defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__APPLE__)
+// only include TargetConditions after testing ANDROID as some android builds
+// on mac don't have this header available and it's not needed unless the target
+// is really mac/ios.
+#include <TargetConditionals.h>
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(__linux__)
+#define OS_LINUX 1
+// include a system header to pull in features.h for glibc/uclibc macros.
+#include <unistd.h>
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
+// we really are using glibc, not uClibc pretending to be glibc
+#define LIBC_GLIBC 1
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#define TOOLKIT_VIEWS 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#elif defined(__QNXNTO__)
+#define OS_QNX 1
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+
+#if defined(USE_OPENSSL) && defined(USE_NSS)
+#error Cannot use both OpenSSL and NSS
+#endif
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) ||     \
+    defined(OS_OPENBSD) || defined(OS_SOLARIS) || defined(OS_ANDROID) ||  \
+    defined(OS_NACL) || defined(OS_QNX)
+#define OS_POSIX 1
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \
+    !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__aarch64__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__)
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS64_FAMILY 1
+#define ARCH_CPU_MIPS64EL 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#endif
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/build/check_return_value.py b/build/check_return_value.py
new file mode 100755
index 0000000..c659d1e
--- /dev/null
+++ b/build/check_return_value.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+import os
+import subprocess
+import sys
+
+devnull = open(os.devnull, 'wb')
+if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+  print 1
+else:
+  print 0
diff --git a/build/chrome_settings.gypi b/build/chrome_settings.gypi
new file mode 100644
index 0000000..e9c7535
--- /dev/null
+++ b/build/chrome_settings.gypi
@@ -0,0 +1,30 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains settings for ../chrome/chrome.gyp that other gyp files
+# also use.
+{
+  'variables': {
+    # TODO: remove this helper when we have loops in GYP
+    'apply_locales_cmd': ['python', '<(DEPTH)/build/apply_locales.py'],
+
+    'conditions': [
+      ['OS=="mac"', {
+        'conditions': [
+          ['branding=="Chrome"', {
+            'mac_bundle_id': 'com.google.Chrome',
+            'mac_creator': 'rimZ',
+            # The policy .grd file also needs the bundle id.
+            'grit_defines': ['-D', 'mac_bundle_id=com.google.Chrome'],
+          }, {  # else: branding!="Chrome"
+            'mac_bundle_id': 'org.chromium.Chromium',
+            'mac_creator': 'Cr24',
+            # The policy .grd file also needs the bundle id.
+            'grit_defines': ['-D', 'mac_bundle_id=org.chromium.Chromium'],
+          }],  # branding
+        ],  # conditions
+      }],  # OS=="mac"
+    ],  # conditions
+  },  # variables
+}
diff --git a/build/common.croc b/build/common.croc
new file mode 100644
index 0000000..fde7a8b
--- /dev/null
+++ b/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+#       croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+  # List of root directories, applied in order
+  'roots' : [
+    # Sub-paths we specifically care about and want to call out
+    {
+      'root' : '_/src',
+      'altname' : 'CHROMIUM',
+    },
+  ],
+
+  # List of rules, applied in order
+  # Note that any 'include':0 rules here will be overridden by the 'include':1
+  # rules in the platform-specific configs.
+  'rules' : [
+    # Don't scan for executable lines in uninstrumented C++ header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '',
+      'group' : 'source',
+    },
+    {
+      'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.(c|h)$',
+      'language' : 'C',
+    },
+    {
+      'regexp' : '.*\\.(cc|cpp|hpp)$',
+      'language' : 'C++',
+    },
+
+    # Files/paths to include.  Specify these before the excludes, since rules
+    # are in order.
+    {
+      'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+      'include' : 1,
+    },
+    # Don't include subversion or mercurial SCM dirs
+    {
+      'regexp' : '.*/(\\.svn|\\.hg)/',
+      'include' : 0,
+    },
+    # Don't include output dirs
+    {
+      'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
+      'include' : 0,
+    },
+    # Don't include third-party source
+    {
+      'regexp' : '.*/third_party/',
+      'include' : 0,
+    },
+    # We don't run the V8 test suite, so we don't care about V8 coverage.
+    {
+      'regexp' : '.*/v8/',
+      'include' : 0,
+    },
+  ],
+
+  # Paths to add source from
+  'add_files' : [
+    'CHROMIUM'
+  ],
+
+  # Statistics to print
+  'print_stats' : [
+    {
+      'stat' : 'files_executable',
+      'format' : '*RESULT FilesKnown: files_executable= %d files',
+    },
+    {
+      'stat' : 'files_instrumented',
+      'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+    },
+    {
+      'stat' : '100.0 * files_instrumented / files_executable',
+      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
+    },
+    {
+      'stat' : 'lines_executable',
+      'format' : '*RESULT LinesKnown: lines_known= %d lines',
+    },
+    {
+      'stat' : 'lines_instrumented',
+      'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+      'group' : 'source',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+      'group' : 'test',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCovered: percent_covered= %g percent',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
+      'group' : 'source',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/common.gypi b/build/common.gypi
new file mode 100644
index 0000000..163abae
--- /dev/null
+++ b/build/common.gypi
@@ -0,0 +1,5725 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# IMPORTANT:
+# Please don't directly include this file if you are building via gyp_chromium,
+# since gyp_chromium is automatically forcing its inclusion.
+{
+  # Variables expected to be overriden on the GYP command line (-D) or by
+  # ~/.gyp/include.gypi.
+  'variables': {
+    # Putting a variables dict inside another variables dict looks kind of
+    # weird.  This is done so that 'host_arch', 'chromeos', etc are defined as
+    # variables within the outer variables dict here.  This is necessary
+    # to get these variables defined for the conditions within this variables
+    # dict that operate on these variables.
+    'variables': {
+      'variables': {
+        'variables': {
+          'variables': {
+            # Whether we're building a ChromeOS build.
+            'chromeos%': 0,
+
+            # Whether we're building the cast (chromecast) shell
+            'chromecast%': 0,
+
+            # Whether or not we are using the Aura windowing framework.
+            'use_aura%': 0,
+
+            # Whether or not we are building the Ash shell.
+            'use_ash%': 0,
+
+            # Whether or not we are using CRAS, the ChromeOS Audio Server.
+            'use_cras%': 0,
+
+            # Use a raw surface abstraction.
+            'use_ozone%': 0,
+
+            # Configure the build for small devices. See crbug.com/318413
+            'embedded%': 0,
+
+            'conditions': [
+              # Compute the architecture that we're building on.
+              ['OS=="win" or OS=="mac" or OS=="ios"', {
+                'host_arch%': 'ia32',
+              }, {
+                'host_arch%': '<!pymod_do_main(detect_host_arch)',
+              }],
+            ],
+          },
+          # Copy conditionally-set variables out one scope.
+          'chromeos%': '<(chromeos)',
+          'chromecast%': '<(chromecast)',
+          'use_aura%': '<(use_aura)',
+          'use_ash%': '<(use_ash)',
+          'use_cras%': '<(use_cras)',
+          'use_ozone%': '<(use_ozone)',
+          'embedded%': '<(embedded)',
+          'host_arch%': '<(host_arch)',
+
+          # Whether we are using Views Toolkit
+          'toolkit_views%': 0,
+
+          # Use OpenSSL instead of NSS as the underlying SSL and crypto
+          # implementation. Certificate verification will in most cases be
+          # handled by the OS. If OpenSSL's struct X509 is used to represent
+          # certificates, use_openssl_certs must be set.
+          'use_openssl%': 0,
+
+          # Typedef X509Certificate::OSCertHandle to OpenSSL's struct X509*.
+          'use_openssl_certs%': 0,
+
+          # Disable viewport meta tag by default.
+          'enable_viewport%': 0,
+
+          # Enable HiDPI support.
+          'enable_hidpi%': 0,
+
+          # Override buildtype to select the desired build flavor.
+          # Dev - everyday build for development/testing
+          # Official - release build (generally implies additional processing)
+          # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp
+          # conversion is done), some of the things which are now controlled by
+          # 'branding', such as symbol generation, will need to be refactored
+          # based on 'buildtype' (i.e. we don't care about saving symbols for
+          # non-Official # builds).
+          'buildtype%': 'Dev',
+
+          # Override branding to select the desired branding flavor.
+          'branding%': 'Chromium',
+
+          'conditions': [
+            # ChromeOS and Windows use Aura and Ash.
+            ['chromeos==1 or OS=="win" or OS=="linux"', {
+              'use_ash%': 1,
+              'use_aura%': 1,
+            }],
+
+            ['chromecast==1 and OS!="android"', {
+              'embedded%': 1,
+              'use_ozone%': 1,
+            }],
+
+            # Ozone uses Aura.
+            ['use_ozone==1', {
+              'use_aura%': 1,
+            }],
+
+            # Whether we're a traditional desktop unix.
+            ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and chromeos==0', {
+              'desktop_linux%': 1,
+            }, {
+              'desktop_linux%': 0,
+            }],
+
+            # Embedded implies ozone.
+            ['embedded==1', {
+              'use_ozone%': 1,
+            }],
+
+            ['OS=="android"', {
+              'target_arch%': 'arm',
+            }, {
+              # Default architecture we're building for is the architecture we're
+              # building on, and possibly sub-architecture (for iOS builds).
+              'target_arch%': '<(host_arch)',
+            }],
+          ],
+        },
+        # Copy conditionally-set variables out one scope.
+        'chromeos%': '<(chromeos)',
+        'chromecast%': '<(chromecast)',
+        'desktop_linux%': '<(desktop_linux)',
+        'use_aura%': '<(use_aura)',
+        'use_ash%': '<(use_ash)',
+        'use_cras%': '<(use_cras)',
+        'use_ozone%': '<(use_ozone)',
+        'embedded%': '<(embedded)',
+        'use_openssl%': '<(use_openssl)',
+        'use_openssl_certs%': '<(use_openssl_certs)',
+        'enable_viewport%': '<(enable_viewport)',
+        'enable_hidpi%': '<(enable_hidpi)',
+        'buildtype%': '<(buildtype)',
+        'branding%': '<(branding)',
+        'host_arch%': '<(host_arch)',
+        'target_arch%': '<(target_arch)',
+
+        'target_subarch%': '',
+
+        # This is set when building the Android WebView inside the Android
+        # build system, using the 'android' gyp backend. The WebView code is
+        # still built when this is unset, but builds using the normal chromium
+        # build system.
+        'android_webview_build%': 0,
+
+        # This is set when building the Android WebView in ninja for the
+        # telemetry bot.
+        'android_webview_telemetry_build%': 0,
+
+        # Set ARM architecture version.
+        'arm_version%': 7,
+
+        # Use aurax11 for clipboard implementation. This is true on linux_aura.
+        'use_clipboard_aurax11%': 0,
+
+        # goma settings.
+        # 1 to use goma.
+        # If no gomadir is set, it uses the default gomadir.
+        'use_goma%': 0,
+        'gomadir%': '',
+
+        # The system root for cross-compiles. Default: none.
+        'sysroot%': '',
+        'chroot_cmd%': '',
+
+        # The system libdir used for this ABI.
+        'system_libdir%': 'lib',
+
+        # Default MIPS arch variant. This is set in the conditions block
+        # below for MIPS targets.
+        'mips_arch_variant%': '',
+
+        'conditions': [
+          # Ash needs Aura.
+          ['use_aura==0', {
+            'use_ash%': 0,
+          }],
+
+          # Set default value of toolkit_views based on OS.
+          ['OS=="win" or chromeos==1 or use_aura==1', {
+            'toolkit_views%': 1,
+          }, {
+            'toolkit_views%': 0,
+          }],
+
+          # Embedded builds use aura without ash or views.
+          ['embedded==1', {
+            'use_aura%': 1,
+            'use_ash%': 0,
+            'toolkit_views%': 0,
+          }],
+
+          # Enable HiDPI on Mac OS, Chrome OS and Windows.
+          ['OS=="mac" or chromeos==1 or OS=="win"', {
+            'enable_hidpi%': 1,
+          }],
+
+          # Enable the OpenSSL backend on Mac OS.
+          ['OS=="mac"', {
+            'use_openssl%': 1,
+          }],
+
+          # Enable App Launcher everywhere but mobile.
+          ['OS!="ios" and OS!="android"', {
+            'enable_app_list%': 1,
+          }, {
+            'enable_app_list%': 0,
+          }],
+
+          ['use_aura==1 or (OS!="win" and OS!="mac" and OS!="ios" and OS!="android")', {
+            'use_default_render_theme%': 1,
+          }, {
+            'use_default_render_theme%': 0,
+          }],
+
+          ['use_ozone==1', {
+            'use_ozone_evdev%': 1,
+          }, {
+            'use_ozone_evdev%': 0,
+          }],
+
+          # Set default gomadir.
+          ['OS=="win"', {
+            'gomadir': 'c:\\goma\\goma-win',
+          }, {
+            'gomadir': '<!(/bin/echo -n ${HOME}/goma)',
+          }],
+
+          # Set the default "target_subarch" on iOS. Valid values are "arm32",
+          # "arm64" and "both" (meaning a fat binary).
+          #
+          # TODO(sdefresne): change the default from "arm32" to "both" for
+          # "target_subarch" once http://crbug.com/339477 is fixed.
+          #
+          # TODO(sdefresne): set the "target_arch" to "arm" once compilation
+          # of skia has been fixed for simulator. http://crbug.com/342377
+          ['OS=="ios"', {
+            'target_subarch%': 'arm32',
+          }],
+
+          # Set arch variants for MIPS platforms.
+          ['target_arch=="mips64el"', {
+            'conditions': [
+              ['OS=="android"', {
+                'mips_arch_variant%': 'r6',
+              }, {
+                'mips_arch_variant%': 'r2',
+              }],
+            ],
+          }],
+
+          ['target_arch=="mipsel"', {
+            'mips_arch_variant%': 'r1',
+          }],
+        ],
+      },
+
+      # Copy conditionally-set variables out one scope.
+      'chromeos%': '<(chromeos)',
+      'chromecast%': '<(chromecast)',
+      'host_arch%': '<(host_arch)',
+      'target_arch%': '<(target_arch)',
+      'target_subarch%': '<(target_subarch)',
+      'mips_arch_variant%': '<(mips_arch_variant)',
+      'toolkit_views%': '<(toolkit_views)',
+      'desktop_linux%': '<(desktop_linux)',
+      'use_aura%': '<(use_aura)',
+      'use_ash%': '<(use_ash)',
+      'use_cras%': '<(use_cras)',
+      'use_ozone%': '<(use_ozone)',
+      'use_ozone_evdev%': '<(use_ozone_evdev)',
+      'use_clipboard_aurax11%': '<(use_clipboard_aurax11)',
+      'embedded%': '<(embedded)',
+      'use_openssl%': '<(use_openssl)',
+      'use_openssl_certs%': '<(use_openssl_certs)',
+      'enable_viewport%': '<(enable_viewport)',
+      'enable_hidpi%': '<(enable_hidpi)',
+      'android_webview_build%': '<(android_webview_build)',
+      'android_webview_telemetry_build%': '<(android_webview_telemetry_build)',
+      'use_goma%': '<(use_goma)',
+      'gomadir%': '<(gomadir)',
+      'enable_app_list%': '<(enable_app_list)',
+      'use_default_render_theme%': '<(use_default_render_theme)',
+      'buildtype%': '<(buildtype)',
+      'branding%': '<(branding)',
+      'arm_version%': '<(arm_version)',
+      'sysroot%': '<(sysroot)',
+      'chroot_cmd%': '<(chroot_cmd)',
+      'system_libdir%': '<(system_libdir)',
+
+      # Set to 1 to enable fast builds. Set to 2 for even faster builds
+      # (it disables debug info for fastest compilation - only for use
+      # on compile-only bots).
+      'fastbuild%': 0,
+
+      # Set to 1 to not store any build metadata (this isn't working yet but
+      # this flag will help us to get there). See http://crbug.com/314403.
+      # TODO(sebmarchand): Update this comment once this flag guarantee that
+      #     there's no build metadata in the build artifacts.
+      'dont_embed_build_metadata%': 0,
+
+      # Set to 1 to force Visual C++ to use legacy debug information format /Z7.
+      # This is useful for parallel compilation tools which can't support /Zi.
+      # Only used on Windows.
+      'win_z7%' : 0,
+
+      # Set to 1 to enable dcheck in release.
+      'dcheck_always_on%': 0,
+
+      # Set to 1 to make a build that disables unshipped tracing events.
+      # Note: this setting is ignored if buildtype=="Official".
+      'tracing_like_official_build%': 0,
+
+      # Disable image loader component extension by default.
+      'image_loader_extension%': 0,
+
+      # Set NEON compilation flags.
+      'arm_neon%': 1,
+
+      # Detect NEON support at run-time.
+      'arm_neon_optional%': 0,
+
+      # Use libjpeg-turbo as the JPEG codec used by Chromium.
+      'use_libjpeg_turbo%': 1,
+
+      # Use system libjpeg. Note that the system's libjepg will be used even if
+      # use_libjpeg_turbo is set.
+      'use_system_libjpeg%': 0,
+
+      # By default, component is set to static_library and it can be overriden
+      # by the GYP command line or by ~/.gyp/include.gypi.
+      'component%': 'static_library',
+
+      # Set to select the Title Case versions of strings in GRD files.
+      'use_titlecase_in_grd%': 0,
+
+      # Use translations provided by volunteers at launchpad.net.  This
+      # currently only works on Linux.
+      'use_third_party_translations%': 0,
+
+      # Remoting compilation is enabled by default. Set to 0 to disable.
+      'remoting%': 1,
+
+      # Configuration policy is enabled by default. Set to 0 to disable.
+      'configuration_policy%': 1,
+
+      # Variable safe_browsing is used to control the build time configuration
+      # for safe browsing feature. Safe browsing can be compiled in 3 different
+      # levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
+      # reporting features without enabling phishing and malware detection. This
+      # is useful to integrate a third party phishing/malware detection to
+      # existing safe browsing logic.
+      'safe_browsing%': 1,
+
+      # Web speech is enabled by default. Set to 0 to disable.
+      'enable_web_speech%': 1,
+
+      # Notifications are compiled in by default. Set to 0 to disable.
+      'notifications%' : 1,
+
+      # Use dsymutil to generate real .dSYM files on Mac. The default is 0 for
+      # regular builds and 1 for ASan builds.
+      'mac_want_real_dsym%': 'default',
+
+      # If this is set, the clang plugins used on the buildbot will be used.
+      # Run tools/clang/scripts/update.sh to make sure they are compiled.
+      # This causes 'clang_chrome_plugins_flags' to be set.
+      # Has no effect if 'clang' is not set as well.
+      'clang_use_chrome_plugins%': 1,
+
+      # Enable building with ASAN (Clang's -fsanitize=address option).
+      # -fsanitize=address only works with clang, but asan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
+      'asan%': 0,
+      # Enable coverage gathering instrumentation in ASan. This flag also
+      # controls coverage granularity (1 for function-level coverage, 2 for
+      # block-level coverage).
+      'asan_coverage%': 0,
+
+      # Enable Chromium overrides of the default configurations for various
+      # dynamic tools (like ASan).
+      'use_sanitizer_options%': 0,
+
+      # Enable building with SyzyAsan.
+      # See https://code.google.com/p/sawbuck/wiki/SyzyASanHowTo
+      'syzyasan%': 0,
+
+      # Enable building with LSan (Clang's -fsanitize=leak option).
+      # -fsanitize=leak only works with clang, but lsan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/leaksanitizer
+      'lsan%': 0,
+
+      # Enable building with TSan (Clang's -fsanitize=thread option).
+      # -fsanitize=thread only works with clang, but tsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/ThreadSanitizer.html
+      'tsan%': 0,
+      'tsan_blacklist%': '<(PRODUCT_DIR)/../../tools/memory/tsan_v2/ignores.txt',
+
+      # Enable building with MSan (Clang's -fsanitize=memory option).
+      # MemorySanitizer only works with clang, but msan=1 implies clang=1
+      # See http://clang.llvm.org/docs/MemorySanitizer.html
+      'msan%': 0,
+      'msan_blacklist%': '<(PRODUCT_DIR)/../../tools/msan/blacklist.txt',
+      # Track where uninitialized memory originates from. From fastest to
+      # slowest: 0 - no tracking, 1 - track only the initial allocation site, 2
+      # - track the chain of stores leading from allocation site to use site.
+      'msan_track_origins%': 1,
+
+      # Enable building with UBSan (Clang's -fsanitize=undefined option).
+      # -fsanitize=undefined only works with clang, but ubsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/UsersManual.html
+      'ubsan%': 0,
+
+      # Enable building with UBsan's vptr (Clang's -fsanitize=vptr option).
+      # -fsanitize=vptr only works with clang, but ubsan_vptr=1 implies clang=1
+      'ubsan_vptr%': 0,
+      'ubsan_vptr_blacklist%': '<(PRODUCT_DIR)/../../tools/ubsan_vptr/blacklist.txt',
+
+      # Use the dynamic libraries instrumented by one of the sanitizers
+      # instead of the standard system libraries.
+      'use_instrumented_libraries%': 0,
+
+      # Use libc++ (third_party/libc++ and third_party/libc++abi) instead of
+      # stdlibc++ as standard library. This is intended to use for instrumented
+      # builds.
+      'use_custom_libcxx%': 0,
+
+      # Use system libc++ instead of the default C++ library, usually libstdc++.
+      # This is intended for iOS builds only.
+      'use_system_libcxx%': 0,
+
+      # Use a modified version of Clang to intercept allocated types and sizes
+      # for allocated objects. clang_type_profiler=1 implies clang=1.
+      # See http://dev.chromium.org/developers/deep-memory-profiler/cpp-object-type-identifier
+      # TODO(dmikurube): Support mac.  See http://crbug.com/123758#c11
+      'clang_type_profiler%': 0,
+
+      # Set to true to instrument the code with function call logger.
+      # See src/third_party/cygprofile/cyg-profile.cc for details.
+      'order_profiling%': 0,
+
+      # Use the provided profiled order file to link Chrome image with it.
+      # This makes Chrome faster by better using CPU cache when executing code.
+      # This is known as PGO (profile guided optimization).
+      # See https://sites.google.com/a/google.com/chrome-msk/dev/boot-speed-up-effort
+      'order_text_section%' : "",
+
+      # Set to 1 compile with -fPIC cflag on linux. This is a must for shared
+      # libraries on linux x86-64 and arm, plus ASLR.
+      'linux_fpic%': 1,
+
+      # Whether one-click signin is enabled or not.
+      'enable_one_click_signin%': 0,
+
+      # Whether to back up data before sync.
+      'enable_pre_sync_backup%': 0,
+
+      # Enable Chrome browser extensions
+      'enable_extensions%': 1,
+
+      # Enable Google Now.
+      'enable_google_now%': 1,
+
+      # Enable printing support and UI. This variable is used to configure
+      # which parts of printing will be built. 0 disables printing completely,
+      # 1 enables it fully, and 2 enables only the codepath to generate a
+      # Metafile (e.g. usually a PDF or EMF) and disables print preview, cloud
+      # print, UI, etc.
+      'enable_printing%': 1,
+
+      # Set the version of CLD.
+      #   0: Don't specify the version. This option is for the Finch testing.
+      #   1: Use only CLD1.
+      #   2: Use only CLD2.
+      'cld_version%': 2,
+
+      # For CLD2, the size of the tables that should be included in the build
+      # Only evaluated if cld_version == 2 or if building the CLD2 dynamic data
+      # tool explicitly.
+      # See third_party/cld_2/cld_2.gyp for more information.
+      #   0: Small tables, lower accuracy
+      #   1: Medium tables, medium accuracy
+      #   2: Large tables, high accuracy
+      'cld2_table_size%': 2,
+
+      # The data acquisition mode for CLD2. Possible values are:
+      #   static:     CLD2 data is statically linked to the executable.
+      #   standalone: CLD2 data is provided in a standalone file that is
+      #               bundled with the executable.
+      #   component:  CLD2 data is provided as a Chrome "component" and is
+      #               downloaded via the component updater.
+      #
+      # For more information on switching the CLD2 data source, see:
+      #   https://sites.google.com/a/chromium.org/dev/developers/how-tos/compact-language-detector-cld-data-source-configuration
+      #
+      # This string will be exposed in chrome://translate-internals under the
+      # heading "CLD Data Source". This allows easy determination of which
+      # data source the browser was built with.
+      'cld2_data_source%': 'static',
+
+      # Enable spell checker.
+      'enable_spellcheck%': 1,
+
+      # Webrtc compilation is enabled by default. Set to 0 to disable.
+      'enable_webrtc%': 1,
+
+      # Enables use of the session service, which is enabled by default.
+      # Support for disabling depends on the platform.
+      'enable_session_service%': 1,
+
+      # Enables theme support, which is enabled by default.  Support for
+      # disabling depends on the platform.
+      'enable_themes%': 1,
+
+      # Enables autofill dialog and associated features; disabled by default.
+      'enable_autofill_dialog%' : 0,
+
+      # Defaults Wallet integration in Autofill dialog to use production
+      # servers. Unofficial builds won't have the proper API keys.
+      'enable_prod_wallet_service%': 0,
+
+      # Enables support for background apps.
+      'enable_background%': 1,
+
+      # Enable the task manager by default.
+      'enable_task_manager%': 1,
+
+      # Enables used resource whitelist generation; disabled by default.
+      'enable_resource_whitelist_generation%': 0,
+
+      # Enable FILE support by default.
+      'disable_file_support%': 0,
+
+      # Enable FTP support by default.
+      'disable_ftp_support%': 0,
+
+      # Use native android functions in place of ICU.  Not supported by most
+      # components.
+      'use_icu_alternatives_on_android%': 0,
+
+      # XInput2 multitouch support is enabled by default (use_xi2_mt=2).
+      # Setting to zero value disables XI2 MT. When XI2 MT is enabled,
+      # the input value also defines the required XI2 minor minimum version.
+      # For example, use_xi2_mt=2 means XI2.2 or above version is required.
+      'use_xi2_mt%': 2,
+
+      # Use of precompiled headers on Windows.
+      #
+      # This variable may be explicitly set to 1 (enabled) or 0
+      # (disabled) in ~/.gyp/include.gypi or via the GYP command line.
+      # This setting will override the default.
+      #
+      # See
+      # http://code.google.com/p/chromium/wiki/WindowsPrecompiledHeaders
+      # for details.
+      'chromium_win_pch%': 0,
+
+      # Clang stuff.
+      'make_clang_dir%': 'third_party/llvm-build/Release+Asserts',
+      # Set this to true when building with Clang.
+      # See http://code.google.com/p/chromium/wiki/Clang for details.
+      # If this is set, clang is used as both host and target compiler in
+      # cross-compile builds.
+      'clang%': 0,
+
+      # Enable plug-in installation by default.
+      'enable_plugin_installation%': 1,
+
+      # Specifies whether to use canvas_skia.cc in place of platform
+      # specific implementations of gfx::Canvas. Affects text drawing in the
+      # Chrome UI.
+      # TODO(asvitkine): Enable this on all platforms and delete this flag.
+      #                  http://crbug.com/105550
+      'use_canvas_skia%': 0,
+
+      # Set to "tsan", "memcheck", or "drmemory" to configure the build to work
+      # with one of those tools.
+      'build_for_tool%': '',
+
+      # If no directory is specified then a temporary directory will be used.
+      'test_isolation_outdir%': '',
+      # True if isolate should fail if the isolate files refer to files
+      # that are missing.
+      'test_isolation_fail_on_missing': 1,
+
+      'wix_path%': '<(DEPTH)/third_party/wix',
+
+      # Managed users are enabled by default.
+      'enable_managed_users%': 1,
+
+      # Platform natively supports discardable memory.
+      'native_discardable_memory%': 0,
+
+      # Platform sends memory pressure signals natively.
+      'native_memory_pressure_signals%': 0,
+
+      'spdy_proxy_auth_property%' : '',
+      'spdy_proxy_auth_value%' : '',
+      'enable_mdns%' : 0,
+      'enable_service_discovery%': 0,
+      'enable_wifi_bootstrapping%': 0,
+      'enable_hangout_services_extension%': 0,
+
+       # Enable the Syzygy optimization step.
+      'syzygy_optimize%': 0,
+
+      # Enable hole punching for the protected video.
+      'video_hole%': 0,
+
+      # Temporary hack to allow us to unify blink's definitions of load
+      # completion. blink uses a crazy set of constraints to determine load
+      # completion, but only actually requires them for layout tests. However,
+      # we need to maintain all the old behaviors while the plumbing is put in
+      # place on both sides of the repo boundary.
+      'enable_load_completion_hacks%': 1,
+
+      # Automatically select platforms under ozone. Turn this off to
+      # build only explicitly selected platforms.
+      'ozone_auto_platforms%': 1,
+
+      # If this is set clang is used as host compiler, but not as target
+      # compiler. Always do this by default.
+      'host_clang%': 1,
+
+      'conditions': [
+        # A flag for POSIX platforms
+        ['OS=="win"', {
+          'os_posix%': 0,
+        }, {
+          'os_posix%': 1,
+        }],
+
+        # A flag for BSD platforms
+        ['OS=="freebsd" or OS=="openbsd"', {
+          'os_bsd%': 1,
+        }, {
+          'os_bsd%': 0,
+        }],
+
+        # NSS usage.
+        ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and use_openssl==0', {
+          'use_nss%': 1,
+        }, {
+          'use_nss%': 0,
+        }],
+
+        # When OpenSSL is used for SSL and crypto on Unix-like systems, use
+        # OpenSSL's certificate definition.
+        ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and use_openssl==1', {
+          'use_openssl_certs%': 1,
+        }, {
+          'use_openssl_certs%': 0,
+        }],
+
+        # libudev usage.  This currently only affects the content layer.
+        ['OS=="linux" and embedded==0', {
+          'use_udev%': 1,
+        }, {
+          'use_udev%': 0,
+        }],
+
+        # Flags to use X11 on non-Mac POSIX platforms.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or use_ozone==1', {
+          'use_x11%': 0,
+        }, {
+          'use_x11%': 1,
+        }],
+
+        # Flags to use glib.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or use_ozone==1', {
+          'use_glib%': 0,
+        }, {
+          'use_glib%': 1,
+        }],
+
+        # Flags to use pango and cairo.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or embedded==1', {
+          'use_pango%': 0,
+          'use_cairo%': 0,
+        }, {
+          'use_pango%': 1,
+          'use_cairo%': 1,
+        }],
+
+        # DBus usage.
+        ['OS=="linux" and embedded==0', {
+          'use_dbus%': 1,
+        }, {
+          'use_dbus%': 0,
+        }],
+
+        # We always use skia text rendering in Aura on Windows, since GDI
+        # doesn't agree with our BackingStore.
+        # TODO(beng): remove once skia text rendering is on by default.
+        ['use_aura==1 and OS=="win"', {
+          'enable_skia_text%': 1,
+        }],
+
+        # A flag to enable or disable our compile-time dependency
+        # on gnome-keyring. If that dependency is disabled, no gnome-keyring
+        # support will be available. This option is useful
+        # for Linux distributions and for Aura.
+        ['OS!="linux" or chromeos==1', {
+          'use_gnome_keyring%': 0,
+        }, {
+          'use_gnome_keyring%': 1,
+        }],
+
+        ['OS=="mac" or OS=="ios"', {
+          # Mac and iOS want Title Case strings
+          'use_titlecase_in_grd%': 1,
+        }],
+
+        # Enable loader extensions on Chrome OS.
+        ['chromeos==1', {
+          'image_loader_extension%': 1,
+        }, {
+          'image_loader_extension%': 0,
+        }],
+
+        ['OS=="win" or OS=="mac" or (OS=="linux" and chromeos==0)', {
+          'enable_one_click_signin%': 1,
+          'enable_pre_sync_backup%': 1,
+        }],
+
+        ['OS=="android"', {
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'cld_version%': 1,
+          'enable_spellcheck%': 0,
+          'enable_themes%': 0,
+          'remoting%': 0,
+          'arm_neon%': 0,
+          'arm_neon_optional%': 1,
+          'native_discardable_memory%': 1,
+          'native_memory_pressure_signals%': 1,
+          'enable_printing%': 2,
+          'enable_task_manager%':0,
+           # Set to 1 once we have a notification system for Android.
+           # http://crbug.com/115320
+          'notifications%': 0,
+          'video_hole%': 1,
+        }],
+
+        # Android OS includes support for proprietary codecs regardless of
+        # building Chromium or Google Chrome. We also ship Google Chrome and
+        # Chromecast with proprietary codecs.
+        ['OS=="android" or branding=="Chrome" or chromecast==1', {
+          'proprietary_codecs%': 1,
+        }, {
+          'proprietary_codecs%': 0,
+        }],
+
+        ['OS=="mac" or OS=="ios"', {
+          'native_discardable_memory%': 1,
+          'native_memory_pressure_signals%': 1,
+        }],
+
+        # Enable autofill dialog for Android, Mac and Views-enabled platforms.
+        ['toolkit_views==1 or (OS=="android" and android_webview_build==0) or OS=="mac"', {
+          'enable_autofill_dialog%': 1,
+
+          'conditions': [
+            ['buildtype=="Official"', {
+              'enable_prod_wallet_service%': 1,
+            }],
+          ]
+        }],
+
+        ['OS=="android"', {
+          'enable_webrtc%': 1,
+        }],
+
+        ['OS=="ios"', {
+          'disable_ftp_support%': 1,
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'cld_version%': 1,
+          'enable_printing%': 0,
+          'enable_session_service%': 0,
+          'enable_themes%': 0,
+          'enable_webrtc%': 0,
+          'notifications%': 0,
+          'remoting%': 0,
+          'safe_browsing%': 0,
+          'enable_managed_users%': 0,
+          'enable_task_manager%': 0,
+          'use_system_libcxx%': 1,
+          'support_pre_M6_history_database%': 0,
+        }],
+
+        # Use GPU accelerated cross process image transport by default
+        # on linux builds with the Aura window manager
+        ['use_aura==1 and OS=="linux"', {
+          'ui_compositor_image_transport%': 1,
+        }, {
+          'ui_compositor_image_transport%': 0,
+        }],
+
+        # Turn precompiled headers on by default.
+        ['OS=="win" and buildtype!="Official"', {
+          'chromium_win_pch%': 1
+        }],
+
+        ['chromeos==1 or OS=="android" or OS=="ios" or desktop_linux==1', {
+          'enable_plugin_installation%': 0,
+        }, {
+          'enable_plugin_installation%': 1,
+        }],
+
+        # Whether PPAPI is enabled.
+        ['OS=="android" or OS=="ios" or embedded==1', {
+          'enable_plugins%': 0,
+        }, {
+          'enable_plugins%': 1,
+        }],
+
+        # linux_use_bundled_gold: whether to use the gold linker binary checked
+        # into third_party/binutils.  Force this off via GYP_DEFINES when you
+        # are using a custom toolchain and need to control -B in ldflags.
+        # Do not use 32-bit gold on 32-bit hosts as it runs out address space
+        # for component=static_library builds.
+        ['OS=="linux" and (target_arch=="x64" or target_arch=="arm")', {
+          'linux_use_bundled_gold%': 1,
+        }, {
+          'linux_use_bundled_gold%': 0,
+        }],
+
+        # linux_use_bundled_binutils: whether to use the binary binutils
+        # checked into third_party/binutils.  These are not multi-arch so cannot
+        # be used except on x86 and x86-64 (the only two architectures which
+        # are currently checke in).  Force this off via GYP_DEFINES when you
+        # are using a custom toolchain and need to control -B in cflags.
+        ['OS=="linux" and (target_arch=="x64")', {
+          'linux_use_bundled_binutils%': 1,
+        }, {
+          'linux_use_bundled_binutils%': 0,
+        }],
+
+        # linux_use_gold_flags: whether to use build flags that rely on gold.
+        # On by default for x64 Linux.
+        ['OS=="linux" and target_arch=="x64"', {
+          'linux_use_gold_flags%': 1,
+        }, {
+          'linux_use_gold_flags%': 0,
+        }],
+
+        # linux_use_debug_fission: whether to use split DWARF debug info
+        # files. This can reduce link time significantly, but is incompatible
+        # with some utilities such as icecc and ccache. Requires gold and
+        # gcc >= 4.8 or clang.
+        # http://gcc.gnu.org/wiki/DebugFission
+        ['OS=="linux" and target_arch=="x64"', {
+          'linux_use_debug_fission%': 1,
+        }, {
+          'linux_use_debug_fission%': 0,
+        }],
+
+        ['OS=="android" or OS=="ios"', {
+          'enable_captive_portal_detection%': 0,
+        }, {
+          'enable_captive_portal_detection%': 1,
+        }],
+
+        # Enable Skia UI text drawing incrementally on different platforms.
+        # http://crbug.com/105550
+        #
+        # On Aura, this allows per-tile painting to be used in the browser
+        # compositor.
+        ['OS!="android"', {
+          'use_canvas_skia%': 1,
+        }],
+
+        ['chromeos==1', {
+          # When building for ChromeOS we dont want Chromium to use libjpeg_turbo.
+          'use_libjpeg_turbo%': 0,
+        }],
+
+        # Do not enable the Settings App on ChromeOS.
+        ['enable_app_list==1 and chromeos==0', {
+          'enable_settings_app%': 1,
+        }, {
+          'enable_settings_app%': 0,
+        }],
+
+        ['OS=="linux" and target_arch=="arm" and chromeos==0', {
+          # Set some defaults for arm/linux chrome builds
+          'use_allocator%': 'none',
+          # sysroot needs to be an absolute path otherwise it generates
+          # incorrect results when passed to pkg-config
+          'sysroot%': '<!(cd <(DEPTH) && pwd -P)/arm-sysroot',
+        }], # OS=="linux" and target_arch=="arm" and chromeos==0
+
+        ['OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0', {
+          'conditions': [
+            ['target_arch=="x64"', {
+              'sysroot%': '<!(cd <(DEPTH) && pwd -P)/chrome/installer/linux/debian_wheezy_amd64-sysroot',
+            }],
+            ['target_arch=="ia32"', {
+              'sysroot%': '<!(cd <(DEPTH) && pwd -P)/chrome/installer/linux/debian_wheezy_i386-sysroot',
+            }],
+        ],
+        }], # OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0
+
+        ['OS=="linux" and target_arch=="mipsel"', {
+          'sysroot%': '<!(cd <(DEPTH) && pwd -P)/mipsel-sysroot/sysroot',
+          'CXX%': '<!(cd <(DEPTH) && pwd -P)/mipsel-sysroot/bin/mipsel-linux-gnu-gcc',
+        }],
+
+        # Whether tests targets should be run, archived or just have the
+        # dependencies verified. All the tests targets have the '_run' suffix,
+        # e.g. base_unittests_run runs the target base_unittests. The test
+        # target always calls tools/swarming_client/isolate.py. See the script's
+        # --help for more information. Meant to be overriden with GYP_DEFINES.
+        # TODO(maruel): Remove the conditions as more configurations are
+        # supported.
+        ['OS!="ios" and OS!="android" and chromeos==0', {
+          'test_isolation_mode%': 'check',
+        }, {
+          'test_isolation_mode%': 'noop',
+        }],
+        # Whether Android build uses OpenMAX DL FFT.
+        ['OS=="android" and ((target_arch=="arm" and arm_version >= 7) or target_arch=="ia32" or target_arch=="x64" or target_arch=="arm64" or target_arch=="mipsel")', {
+          # Currently only supported on Android ARMv7+, ARM64, ia32, x64 and mipsel.
+          # When enabled, this will also enable WebAudio support on
+          # Android for these architectures.  Default is enabled.  Whether
+          # WebAudio is actually available depends on runtime settings
+          # and flags.
+          'use_openmax_dl_fft%': 1,
+        }, {
+          'use_openmax_dl_fft%': 0,
+        }],
+        ['OS=="win" or OS=="linux"', {
+            'enable_mdns%' : 1,
+        }],
+
+        # Turns on compiler optimizations in V8 in Debug build, except
+        # on android_clang, where we're hitting a weird linker error.
+        # TODO(dpranke): http://crbug.com/266155 .
+        ['OS=="android"', {
+          'v8_optimized_debug%': 1,
+        }, {
+          'v8_optimized_debug%': 2,
+        }],
+
+        # Disable various features by default on embedded.
+        ['embedded==1', {
+          'remoting%': 0,
+          'enable_printing%': 0,
+        }],
+
+        # By default, use ICU data file (icudtl.dat) on all platforms
+        # except when building Android WebView or Chromecast.
+        # TODO(jshin): Handle 'use_system_icu' on Linux (Chromium).
+        # Set the data reduction proxy origin for Android Webview.
+        ['android_webview_build==0 and android_webview_telemetry_build==0 and chromecast==0', {
+          'icu_use_data_file_flag%' : 1,
+        }, {
+          'icu_use_data_file_flag%' : 0,
+        }],
+        ['OS=="win" or OS=="mac"', {
+            'enable_wifi_bootstrapping%' : 1,
+        }],
+
+        # Path to sas.dll, which provides the SendSAS function.
+        # http://msdn.microsoft.com/en-us/library/windows/desktop/dd979761(v=vs.85).aspx
+        ['target_arch=="x64"', {
+          'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/amd64',
+        }, {
+          'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/x86',
+        }],
+
+        # Turn on JNI generation optimizations on non-WebView builds.
+        ['OS=="android" and android_webview_build==0', {
+          'optimize_jni_generation%': 1,
+        }, {
+          'optimize_jni_generation%': 0,
+        }],
+      ],
+
+      # Set this to 1 to enable use of concatenated impulse responses
+      # for the HRTF panner in WebAudio.
+      'use_concatenated_impulse_responses': 1,
+
+      # You can set the variable 'use_official_google_api_keys' to 1
+      # to use the Google-internal file containing official API keys
+      # for Google Chrome even in a developer build.  Setting this
+      # variable explicitly to 1 will cause your build to fail if the
+      # internal file is missing.
+      #
+      # The variable is documented here, but not handled in this file;
+      # see //google_apis/determine_use_official_keys.gypi for the
+      # implementation.
+      #
+      # Set the variable to 0 to not use the internal file, even when
+      # it exists in your checkout.
+      #
+      # Leave it unset in your include.gypi to have the variable
+      # implicitly set to 1 if you have
+      # src/google_apis/internal/google_chrome_api_keys.h in your
+      # checkout, and implicitly set to 0 if not.
+      #
+      # Note that official builds always behave as if the variable
+      # was explicitly set to 1, i.e. they always use official keys,
+      # and will fail to build if the internal file is missing.
+      #
+      # NOTE: You MUST NOT explicitly set the variable to 2 in your
+      # include.gypi or by other means. Due to subtleties of GYP, this
+      # is not the same as leaving the variable unset, even though its
+      # default value in
+      # //google_apis/determine_use_official_keys.gypi is 2.
+
+      # Set these to bake the specified API keys and OAuth client
+      # IDs/secrets into your build.
+      #
+      # If you create a build without values baked in, you can instead
+      # set environment variables to provide the keys at runtime (see
+      # src/google_apis/google_api_keys.h for details).  Features that
+      # require server-side APIs may fail to work if no keys are
+      # provided.
+      #
+      # Note that if you are building an official build or if
+      # use_official_google_api_keys has been set to 1 (explicitly or
+      # implicitly), these values will be ignored and the official
+      # keys will be used instead.
+      'google_api_key%': '',
+      'google_default_client_id%': '',
+      'google_default_client_secret%': '',
+      # Native Client is enabled by default.
+      'disable_nacl%': '0',
+
+      # Set to 1 to support old history files
+      'support_pre_M6_history_database%': '1',
+    },
+
+    # Copy conditionally-set variables out one scope.
+    'branding%': '<(branding)',
+    'buildtype%': '<(buildtype)',
+    'target_arch%': '<(target_arch)',
+    'target_subarch%': '<(target_subarch)',
+    'mips_arch_variant%': '<(mips_arch_variant)',
+    'host_arch%': '<(host_arch)',
+    'toolkit_views%': '<(toolkit_views)',
+    'ui_compositor_image_transport%': '<(ui_compositor_image_transport)',
+    'use_aura%': '<(use_aura)',
+    'use_ash%': '<(use_ash)',
+    'use_cras%': '<(use_cras)',
+    'use_openssl%': '<(use_openssl)',
+    'use_openssl_certs%': '<(use_openssl_certs)',
+    'use_nss%': '<(use_nss)',
+    'use_udev%': '<(use_udev)',
+    'os_bsd%': '<(os_bsd)',
+    'os_posix%': '<(os_posix)',
+    'use_dbus%': '<(use_dbus)',
+    'use_glib%': '<(use_glib)',
+    'use_pango%': '<(use_pango)',
+    'use_cairo%': '<(use_cairo)',
+    'use_ozone%': '<(use_ozone)',
+    'use_ozone_evdev%': '<(use_ozone_evdev)',
+    'use_clipboard_aurax11%': '<(use_clipboard_aurax11)',
+    'desktop_linux%': '<(desktop_linux)',
+    'use_x11%': '<(use_x11)',
+    'use_gnome_keyring%': '<(use_gnome_keyring)',
+    'linux_fpic%': '<(linux_fpic)',
+    'chromeos%': '<(chromeos)',
+    'chromecast%': '<(chromecast)',
+    'enable_viewport%': '<(enable_viewport)',
+    'enable_hidpi%': '<(enable_hidpi)',
+    'use_xi2_mt%':'<(use_xi2_mt)',
+    'image_loader_extension%': '<(image_loader_extension)',
+    'fastbuild%': '<(fastbuild)',
+    'dont_embed_build_metadata%': '<(dont_embed_build_metadata)',
+    'win_z7%': '<(win_z7)',
+    'dcheck_always_on%': '<(dcheck_always_on)',
+    'tracing_like_official_build%': '<(tracing_like_official_build)',
+    'arm_version%': '<(arm_version)',
+    'arm_neon%': '<(arm_neon)',
+    'arm_neon_optional%': '<(arm_neon_optional)',
+    'sysroot%': '<(sysroot)',
+    'chroot_cmd%': '<(chroot_cmd)',
+    'system_libdir%': '<(system_libdir)',
+    'component%': '<(component)',
+    'enable_resource_whitelist_generation%': '<(enable_resource_whitelist_generation)',
+    'use_titlecase_in_grd%': '<(use_titlecase_in_grd)',
+    'use_third_party_translations%': '<(use_third_party_translations)',
+    'remoting%': '<(remoting)',
+    'enable_one_click_signin%': '<(enable_one_click_signin)',
+    'enable_pre_sync_backup%': '<(enable_pre_sync_backup)',
+    'enable_webrtc%': '<(enable_webrtc)',
+    'chromium_win_pch%': '<(chromium_win_pch)',
+    'configuration_policy%': '<(configuration_policy)',
+    'safe_browsing%': '<(safe_browsing)',
+    'enable_web_speech%': '<(enable_web_speech)',
+    'notifications%': '<(notifications)',
+    'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)',
+    'mac_want_real_dsym%': '<(mac_want_real_dsym)',
+    'asan%': '<(asan)',
+    'asan_coverage%': '<(asan_coverage)',
+    'use_sanitizer_options%': '<(use_sanitizer_options)',
+    'syzyasan%': '<(syzyasan)',
+    'syzygy_optimize%': '<(syzygy_optimize)',
+    'lsan%': '<(lsan)',
+    'msan%': '<(msan)',
+    'msan_blacklist%': '<(msan_blacklist)',
+    'msan_track_origins%': '<(msan_track_origins)',
+    'tsan%': '<(tsan)',
+    'tsan_blacklist%': '<(tsan_blacklist)',
+    'ubsan%': '<(ubsan)',
+    'ubsan_vptr%': '<(ubsan_vptr)',
+    'ubsan_vptr_blacklist%': '<(ubsan_vptr_blacklist)',
+    'use_instrumented_libraries%': '<(use_instrumented_libraries)',
+    'use_custom_libcxx%': '<(use_custom_libcxx)',
+    'use_system_libcxx%': '<(use_system_libcxx)',
+    'clang_type_profiler%': '<(clang_type_profiler)',
+    'order_profiling%': '<(order_profiling)',
+    'order_text_section%': '<(order_text_section)',
+    'enable_extensions%': '<(enable_extensions)',
+    'enable_plugin_installation%': '<(enable_plugin_installation)',
+    'enable_plugins%': '<(enable_plugins)',
+    'enable_session_service%': '<(enable_session_service)',
+    'enable_themes%': '<(enable_themes)',
+    'enable_autofill_dialog%': '<(enable_autofill_dialog)',
+    'enable_prod_wallet_service%': '<(enable_prod_wallet_service)',
+    'enable_background%': '<(enable_background)',
+    'linux_use_bundled_gold%': '<(linux_use_bundled_gold)',
+    'linux_use_bundled_binutils%': '<(linux_use_bundled_binutils)',
+    'linux_use_gold_flags%': '<(linux_use_gold_flags)',
+    'linux_use_debug_fission%': '<(linux_use_debug_fission)',
+    'use_canvas_skia%': '<(use_canvas_skia)',
+    'test_isolation_mode%': '<(test_isolation_mode)',
+    'test_isolation_outdir%': '<(test_isolation_outdir)',
+    'test_isolation_fail_on_missing': '<(test_isolation_fail_on_missing)',
+    'enable_printing%': '<(enable_printing)',
+    'enable_spellcheck%': '<(enable_spellcheck)',
+    'enable_google_now%': '<(enable_google_now)',
+    'cld_version%': '<(cld_version)',
+    'cld2_table_size%': '<(cld2_table_size)',
+    'cld2_data_source%': '<(cld2_data_source)',
+    'enable_captive_portal_detection%': '<(enable_captive_portal_detection)',
+    'disable_file_support%': '<(disable_file_support)',
+    'disable_ftp_support%': '<(disable_ftp_support)',
+    'use_icu_alternatives_on_android%': '<(use_icu_alternatives_on_android)',
+    'enable_task_manager%': '<(enable_task_manager)',
+    'sas_dll_path%': '<(sas_dll_path)',
+    'wix_path%': '<(wix_path)',
+    'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+    'use_system_libjpeg%': '<(use_system_libjpeg)',
+    'android_webview_build%': '<(android_webview_build)',
+    'android_webview_telemetry_build%': '<(android_webview_telemetry_build)',
+    'icu_use_data_file_flag%': '<(icu_use_data_file_flag)',
+    'gyp_managed_install%': 0,
+    'create_standalone_apk%': 1,
+    'enable_app_list%': '<(enable_app_list)',
+    'use_default_render_theme%': '<(use_default_render_theme)',
+    'enable_settings_app%': '<(enable_settings_app)',
+    'google_api_key%': '<(google_api_key)',
+    'google_default_client_id%': '<(google_default_client_id)',
+    'google_default_client_secret%': '<(google_default_client_secret)',
+    'enable_managed_users%': '<(enable_managed_users)',
+    'native_discardable_memory%': '<(native_discardable_memory)',
+    'native_memory_pressure_signals%': '<(native_memory_pressure_signals)',
+    'spdy_proxy_auth_property%': '<(spdy_proxy_auth_property)',
+    'spdy_proxy_auth_value%': '<(spdy_proxy_auth_value)',
+    'enable_mdns%' : '<(enable_mdns)',
+    'enable_service_discovery%' : '<(enable_service_discovery)',
+    'enable_wifi_bootstrapping%': '<(enable_wifi_bootstrapping)',
+    'enable_hangout_services_extension%' : '<(enable_hangout_services_extension)',
+    'v8_optimized_debug%': '<(v8_optimized_debug)',
+    'proprietary_codecs%': '<(proprietary_codecs)',
+    'use_goma%': '<(use_goma)',
+    'gomadir%': '<(gomadir)',
+    'video_hole%': '<(video_hole)',
+    'enable_load_completion_hacks%': '<(enable_load_completion_hacks)',
+    'support_pre_M6_history_database%': '<(support_pre_M6_history_database)',
+
+    # Whether or not we are building the Athena shell.
+    'use_athena%': '0',
+
+    # Use system protobuf instead of bundled one.
+    'use_system_protobuf%': 0,
+
+    # Use system yasm instead of bundled one.
+    'use_system_yasm%': 0,
+
+    # Use system ICU instead of bundled one.
+    'use_system_icu%' : 0,
+
+    # Default to enabled PIE; this is important for ASLR but we may need to be
+    # able to turn it off for various reasons.
+    'linux_disable_pie%': 0,
+
+    # The release channel that this build targets. This is used to restrict
+    # channel-specific build options, like which installer packages to create.
+    # The default is 'all', which does no channel-specific filtering.
+    'channel%': 'all',
+
+    # Override chromium_mac_pch and set it to 0 to suppress the use of
+    # precompiled headers on the Mac.  Prefix header injection may still be
+    # used, but prefix headers will not be precompiled.  This is useful when
+    # using distcc to distribute a build to compile slaves that don't
+    # share the same compiler executable as the system driving the compilation,
+    # because precompiled headers rely on pointers into a specific compiler
+    # executable's image.  Setting this to 0 is needed to use an experimental
+    # Linux-Mac cross compiler distcc farm.
+    'chromium_mac_pch%': 1,
+
+    # The default value for mac_strip in target_defaults. This cannot be
+    # set there, per the comment about variable% in a target_defaults.
+    'mac_strip_release%': 0,
+
+    # Set to 1 to enable java code coverage. Instruments classes during build
+    # to produce .ec files during runtime.
+    'emma_coverage%': 0,
+
+    # EMMA filter string consisting of a list of inclusion/exclusion patterns
+    # separated with whitespace and/or comma. Only has effect if
+    # 'emma_coverage=1'.
+    'emma_filter%': '',
+
+    # Set to 1 to enable running Android lint on java/class files.
+    'android_lint%': 1,
+
+    # Although base/allocator lets you select a heap library via an
+    # environment variable, the libcmt shim it uses sometimes gets in
+    # the way.  To disable it entirely, and switch to normal msvcrt, do e.g.
+    #  'win_use_allocator_shim': 0,
+    #  'win_release_RuntimeLibrary': 2
+    # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
+    'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to prevent release.vsprops from being included.
+    # Yes(1) means include release.vsprops.
+    # Once all vsprops settings are migrated into gyp, this can go away.
+    'msvs_use_common_release%': 1,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to override additional linker options for msvs.
+    # Yes(1) means set use the common linker options.
+    'msvs_use_common_linker_extras%': 1,
+
+    # TODO(sgk): eliminate this if possible.
+    # It would be nicer to support this via a setting in 'target_defaults'
+    # in chrome/app/locales/locales.gypi overriding the setting in the
+    # 'Debug' configuration in the 'target_defaults' dict below,
+    # but that doesn't work as we'd like.
+    'msvs_debug_link_incremental%': '2',
+
+    # Needed for some of the largest modules.
+    'msvs_debug_link_nonincremental%': '1',
+
+    # Turns on Use Library Dependency Inputs for linking chrome.dll on Windows
+    # to get incremental linking to be faster in debug builds.
+    'incremental_chrome_dll%': '0',
+
+    # Experimental setting to break chrome.dll into multiple pieces based on
+    # process type.
+    'chrome_multiple_dll%': '0',
+
+    # Experimental setting to optimize Chrome's DLLs with PGO.
+    'chrome_pgo_phase%': '0',
+
+    # Clang stuff.
+    'clang%': '<(clang)',
+    'host_clang%': '<(host_clang)',
+    'make_clang_dir%': '<(make_clang_dir)',
+
+    # Control which version of clang to use when building for iOS.  If set to
+    # '1', uses the version of clang that ships with Xcode.  If set to '0', uses
+    # the version of clang that ships with the Chromium source.  This variable
+    # is automatically set to '1' when using the Xcode generator.
+    'clang_xcode%': 0,
+
+    # These two variables can be set in GYP_DEFINES while running
+    # |gclient runhooks| to let clang run a plugin in every compilation.
+    # Only has an effect if 'clang=1' is in GYP_DEFINES as well.
+    # Example:
+    #     GYP_DEFINES='clang=1 clang_load=/abs/path/to/libPrintFunctionNames.dylib clang_add_plugin=print-fns' gclient runhooks
+
+    'clang_load%': '',
+    'clang_add_plugin%': '',
+
+    # Tell ld64 to write map files describing binary layout. Useful
+    # for looking at what contributes to binary size, e.g. with
+    # https://github.com/nico/bloat
+    'mac_write_linker_maps%': 0,
+
+    # The default type of gtest.
+    'gtest_target_type%': 'executable',
+
+    # Enable sampling based profiler.
+    # See http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html
+    'profiling%': '0',
+    # Profile without optimizing out stack frames when profiling==1.
+    'profiling_full_stack_frames%': '0',
+
+    # And if we want to dump symbols for Breakpad-enabled builds.
+    'linux_dump_symbols%': 0,
+    # And if we want to strip the binary after dumping symbols.
+    'linux_strip_binary%': 0,
+    # If we want stack unwind support for backtrace().
+    'debug_unwind_tables%': 1,
+    'release_unwind_tables%': 1,
+
+    # Override where to find binutils
+    'binutils_version%': 0,
+    'binutils_dir%': '',
+
+    # Enable TCMalloc.
+    # Default of 'use_allocator' is set to 'none' if OS=='android' later.
+    'use_allocator%': 'tcmalloc',
+
+    # Set to 1 to link against libgnome-keyring instead of using dlopen().
+    'linux_link_gnome_keyring%': 0,
+    # Set to 1 to link against gsettings APIs instead of using dlopen().
+    'linux_link_gsettings%': 0,
+
+    # Enable use of OpenMAX DL FFT routines.
+    'use_openmax_dl_fft%': '<(use_openmax_dl_fft)',
+
+    # Enable new NPDevice API.
+    'enable_new_npdevice_api%': 0,
+
+    # Enable EGLImage support in OpenMAX
+    'enable_eglimage%': 1,
+
+    # .gyp files or targets should set chromium_code to 1 if they build
+    # Chromium-specific code, as opposed to external code.  This variable is
+    # used to control such things as the set of warnings to enable, and
+    # whether warnings are treated as errors.
+    'chromium_code%': 0,
+
+    # Disable fatal linker warnings, similarly to how we make it possible
+    # to disable -Werror (e.g. for different toolchain versions).
+    'disable_fatal_linker_warnings%': 0,
+
+    'release_valgrind_build%': 0,
+
+    # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+    'enable_wexit_time_destructors%': 0,
+
+    # Build libpeerconnection as a static library by default.
+    'libpeer_target_type%': 'static_library',
+
+    # Set to 1 to compile with the OpenGL ES 2.0 conformance tests.
+    'internal_gles2_conform_tests%': 0,
+
+    # Set to 1 to compile with the Khronos GL-CTS conformance tests.
+    'internal_khronos_glcts_tests%': 0,
+
+    # Set to 1 to compile the filter fuzzer.
+    'internal_filter_fuzzer%': 0,
+
+    # NOTE: When these end up in the Mac bundle, we need to replace '-' for '_'
+    # so Cocoa is happy (http://crbug.com/20441).
+    'locales': [
+      'am', 'ar', 'bg', 'bn', 'ca', 'cs', 'da', 'de', 'el', 'en-GB',
+      'en-US', 'es-419', 'es', 'et', 'fa', 'fi', 'fil', 'fr', 'gu', 'he',
+      'hi', 'hr', 'hu', 'id', 'it', 'ja', 'kn', 'ko', 'lt', 'lv',
+      'ml', 'mr', 'ms', 'nb', 'nl', 'pl', 'pt-BR', 'pt-PT', 'ro', 'ru',
+      'sk', 'sl', 'sr', 'sv', 'sw', 'ta', 'te', 'th', 'tr', 'uk',
+      'vi', 'zh-CN', 'zh-TW',
+    ],
+
+    # Pseudo locales are special locales which are used for testing and
+    # debugging. They don't get copied to the final app. For more info,
+    # check out https://sites.google.com/a/chromium.org/dev/Home/fake-bidi
+    'pseudo_locales': [
+      'fake-bidi',
+    ],
+
+    'grit_defines': [],
+
+    # If debug_devtools is set to 1, JavaScript files for DevTools are
+    # stored as is and loaded from disk. Otherwise, a concatenated file
+    # is stored in resources.pak. It is still possible to load JS files
+    # from disk by passing --debug-devtools cmdline switch.
+    'debug_devtools%': 0,
+
+    # The Java Bridge is not compiled in by default.
+    'java_bridge%': 0,
+
+    # Code signing for iOS binaries.  The bots need to be able to disable this.
+    'chromium_ios_signing%': 1,
+
+    # This flag is only used when disable_nacl==0 and disables all those
+    # subcomponents which would require the installation of a native_client
+    # untrusted toolchain.
+    'disable_nacl_untrusted%': 0,
+
+    # Disable Dart by default.
+    'enable_dart%': 0,
+
+    # Copy out the setting of disable_nacl.
+    'disable_nacl%': '<(disable_nacl)',
+
+    # Portable Native Client is enabled by default.
+    'disable_pnacl%': 0,
+
+    # Whether to build full debug version for Debug configuration on Android.
+    # Compared to full debug version, the default Debug configuration on Android
+    # has no full v8 debug, has size optimization and linker gc section, so that
+    # we can build a debug version with acceptable size and performance.
+    'android_full_debug%': 0,
+
+    # Sets the default version name and code for Android app, by default we
+    # do a developer build.
+    'android_app_version_name%': 'Developer Build',
+    'android_app_version_code%': 0,
+
+    # Contains data about the attached devices for gyp_managed_install.
+    'build_device_config_path': '<(PRODUCT_DIR)/build_devices.cfg',
+
+    'sas_dll_exists': '<!pymod_do_main(dir_exists "<(sas_dll_path)")',
+    'wix_exists': '<!pymod_do_main(dir_exists "<(wix_path)")',
+
+    'windows_sdk_default_path': '<(DEPTH)/third_party/platformsdk_win8/files',
+    'directx_sdk_default_path': '<(DEPTH)/third_party/directxsdk/files',
+
+    # Whether we are using the rlz library or not.  Platforms like Android send
+    # rlz codes for searches but do not use the library.
+    'enable_rlz%': 0,
+
+    # Turns on the i18n support in V8.
+    'v8_enable_i18n_support': 1,
+
+    # Compile d8 for the host toolset.
+    'v8_toolset_for_d8': 'host',
+
+    # Use the chromium skia by default.
+    'use_system_skia%': '0',
+
+    # Use brlapi from brltty for braille display support.
+    'use_brlapi%': 0,
+
+    # Relative path to icu.gyp from this file.
+    'icu_gyp_path': '../third_party/icu/icu.gyp',
+
+    # IPC fuzzer is disabled by default.
+    'enable_ipc_fuzzer%': 0,
+
+
+    # Force disable libstdc++ debug mode.
+    'disable_glibcxx_debug%': 0,
+
+    # Set to 1 to compile with MSE support for MPEG2 TS
+    'enable_mpeg2ts_stream_parser%': 0,
+
+    # Support ChromeOS touchpad gestures with ozone.
+    'use_evdev_gestures%': 0,
+
+    # Default ozone platform (if no --ozone-platform flag).
+    'ozone_platform%': "",
+
+    # Ozone platforms to include in the build.
+    'ozone_platform_caca%': 0,
+    'ozone_platform_dri%': 0,
+    'ozone_platform_egltest%': 0,
+    'ozone_platform_gbm%': 0,
+    'ozone_platform_ozonex%': 0,
+    'ozone_platform_test%': 0,
+
+    # Chrome OS: whether to build ChromeVox from sources in the Chromium
+    # repository rather than using precompiled JavaScript in
+    # chrome/third_party/chromevox.  This is still experimental.
+    'use_migrated_chromevox%': 1,
+
+    'conditions': [
+      # Enable the Syzygy optimization step for the official builds.
+      ['OS=="win" and buildtype=="Official" and syzyasan!=1', {
+        'syzygy_optimize%': 1,
+      }, {
+        'syzygy_optimize%': 0,
+      }],
+      # Get binutils version so we can enable debug fission if we can.
+      ['os_posix==1 and OS!="mac" and OS!="ios"', {
+        'conditions': [
+          # compiler_version doesn't work with clang
+          # TODO(mithro): Land https://codereview.chromium.org/199793014/ so
+          # compiler_version works with clang.
+          # TODO(glider): set clang to 1 earlier for ASan and TSan builds so
+          # that it takes effect here.
+          ['clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan==0 and ubsan_vptr==0', {
+            'binutils_version%': '<!pymod_do_main(compiler_version target assembler)',
+          }],
+          # On Android we know the binutils version in the toolchain.
+          ['OS=="android"', {
+            'binutils_version%': 222,
+          }],
+          ['host_arch=="x64"', {
+            'binutils_dir%': 'third_party/binutils/Linux_x64/Release/bin',
+          }],
+          ['host_arch=="ia32"', {
+            'binutils_dir%': 'third_party/binutils/Linux_ia32/Release/bin',
+          }],
+          # Our version of binutils in third_party/binutils
+          ['linux_use_bundled_binutils==1', {
+            'binutils_version%': 224,
+          }],
+        ],
+      }, {
+        'binutils_version%': 0,
+      }],
+      # The version of GCC in use, set later in platforms that use GCC and have
+      # not explicitly chosen to build with clang. Currently, this means all
+      # platforms except Windows, Mac and iOS.
+      # TODO(glider): set clang to 1 earlier for ASan and TSan builds so that
+      # it takes effect here.
+      ['os_posix==1 and OS!="mac" and OS!="ios" and clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan_vptr==0', {
+        'conditions': [
+          ['OS=="android" and android_webview_build==0', {
+            'host_gcc_version%': '<!pymod_do_main(compiler_version host compiler)',
+            # We directly set the gcc version since we know what we use.
+            'gcc_version%': 49,
+          }],
+          ['OS=="android" and android_webview_build==1', {
+            # Android WebView uses a hermetic clang toolchain for host builds.
+            'host_gcc_version%': 0,
+            # Android WebView uses the GCC toolchain from the Android build.
+            'gcc_version%': 48,
+          }],
+          ['OS!="android"', {
+            'host_gcc_version%': '<!pymod_do_main(compiler_version host compiler)',
+            'gcc_version%': '<!pymod_do_main(compiler_version target compiler)',
+          }],
+        ],
+      }, {
+        'host_gcc_version%': 0,
+        'gcc_version%': 0,
+      }],
+      ['OS=="win" and "<!pymod_do_main(dir_exists <(windows_sdk_default_path))"=="True"', {
+        'windows_sdk_path%': '<(windows_sdk_default_path)',
+      }, {
+        'windows_sdk_path%': 'C:/Program Files (x86)/Windows Kits/8.0',
+      }],
+      ['OS=="win" and "<!pymod_do_main(dir_exists <(directx_sdk_default_path))"=="True"', {
+        'directx_sdk_path%': '<(directx_sdk_default_path)',
+      }, {
+        'directx_sdk_path%': '$(DXSDK_DIR)',
+      }],
+      ['OS=="win"', {
+        'windows_driver_kit_path%': '$(WDK_DIR)',
+      }],
+      ['os_posix==1 and OS!="mac" and OS!="ios"', {
+        'conditions': [
+          ['target_arch=="mipsel" or target_arch=="mips64el"', {
+            'werror%': '',
+            'disable_nacl%': 1,
+            'nacl_untrusted_build%': 0,
+            'use_allocator%': 'none',
+          }],
+          ['OS=="linux" and target_arch=="mipsel"', {
+            'sysroot%': '<(sysroot)',
+            'CXX%': '<(CXX)',
+          }],
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'linux_dump_symbols%': 1,
+
+            # Omit unwind support in official release builds to save space. We
+            # can use breakpad for these builds.
+            'release_unwind_tables%': 0,
+
+            'conditions': [
+              # For official builds, use a 64-bit linker to avoid running out
+              # of address space. The buildbots should have a 64-bit kernel
+              # and a 64-bit libc installed.
+              ['host_arch=="ia32" and target_arch=="ia32"', {
+                'linux_use_bundled_gold%': '1',
+                'binutils_dir%': 'third_party/binutils/Linux_x64/Release/bin',
+              }],
+            ],
+          }],
+        ],
+      }],  # os_posix==1 and OS!="mac" and OS!="ios"
+      ['OS=="ios"', {
+        'disable_nacl%': 1,
+        'enable_background%': 0,
+        'icu_use_data_file_flag%': 1,
+        'enable_web_speech%': 0,
+        'use_system_libxml%': 1,
+        'use_system_sqlite%': 1,
+        'locales==': [
+          'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-GB', 'en-US', 'es', 'es-MX',
+          'fi', 'fr', 'he', 'hi', 'hr', 'hu', 'id', 'it', 'ja', 'ko', 'ms',
+          'nb', 'nl', 'pl', 'pt', 'pt-PT', 'ro', 'ru', 'sk', 'sv', 'th', 'tr',
+          'uk', 'vi', 'zh-CN', 'zh-TW',
+        ],
+
+        # The Mac SDK is set for iOS builds and passed through to Mac
+        # sub-builds. This allows the Mac sub-build SDK in an iOS build to be
+        # overridden from the command line the same way it is for a Mac build.
+        'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py 10.6)',
+
+        # iOS SDK and deployment target support.  The |ios_sdk| value is left
+        # blank so that when it is set in the project files it will be the
+        # "current" iOS SDK.  Forcing a specific SDK even if it is "current"
+        # causes Xcode to spit out a warning for every single project file for
+        # not using the "current" SDK.
+        'ios_sdk%': '',
+        'ios_sdk_path%': '',
+        'ios_deployment_target%': '7.0',
+
+        'conditions': [
+          # ios_product_name is set to the name of the .app bundle as it should
+          # appear on disk.
+          ['branding=="Chrome"', {
+            'ios_product_name%': 'Chrome',
+          }, { # else: branding!="Chrome"
+            'ios_product_name%': 'Chromium',
+          }],
+          ['branding=="Chrome" and buildtype=="Official"', {
+            'ios_breakpad%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'ios_breakpad%': 0,
+          }],
+        ],
+      }],  # OS=="ios"
+      ['OS=="android"', {
+        # Location of Android NDK.
+        'variables': {
+          'variables': {
+            # Unfortunately we have to use absolute paths to the SDK/NDK because
+            # they're passed to ant which uses a different relative path from
+            # gyp.
+            'android_ndk_root%': '<!(cd <(DEPTH) && pwd -P)/third_party/android_tools/ndk/',
+            'android_sdk_root%': '<!(cd <(DEPTH) && pwd -P)/third_party/android_tools/sdk/',
+            'android_host_arch%': '<!(uname -m)',
+            # Android API-level of the SDK used for compilation.
+            'android_sdk_version%': '20',
+            'android_sdk_build_tools_version%': '20.0.0',
+            'host_os%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')",
+          },
+          # Copy conditionally-set variables out one scope.
+          'android_ndk_root%': '<(android_ndk_root)',
+          'android_sdk_root%': '<(android_sdk_root)',
+          'android_sdk_version%': '<(android_sdk_version)',
+          'android_stlport_root': '<(android_ndk_root)/sources/cxx-stl/stlport',
+          'host_os%': '<(host_os)',
+
+          'android_sdk%': '<(android_sdk_root)/platforms/android-<(android_sdk_version)',
+          # Android SDK build tools (e.g. dx, aapt, aidl)
+          'android_sdk_tools%': '<(android_sdk_root)/build-tools/<(android_sdk_build_tools_version)',
+
+          # Android API level 14 is ICS (Android 4.0) which is the minimum
+          # platform requirement for Chrome on Android, we use it for native
+          # code compilation.
+          'conditions': [
+            ['target_arch == "ia32"', {
+              'android_app_abi%': 'x86',
+              'android_gdbserver%': '<(android_ndk_root)/prebuilt/android-x86/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-14/arch-x86',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_root)/toolchains/x86-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "x64"', {
+              'android_app_abi%': 'x86_64',
+              'android_gdbserver%': '<(android_ndk_root)/prebuilt/android-x86_64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-L/arch-x86_64',
+              'android_ndk_lib_dir%': 'usr/lib64',
+              'android_toolchain%': '<(android_ndk_root)/toolchains/x86_64-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch=="arm"', {
+              'conditions': [
+                ['arm_version<7', {
+                  'android_app_abi%': 'armeabi',
+                }, {
+                  'android_app_abi%': 'armeabi-v7a',
+                }],
+              ],
+              'android_gdbserver%': '<(android_ndk_root)/prebuilt/android-arm/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-14/arch-arm',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_root)/toolchains/arm-linux-androideabi-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "arm64"', {
+              'android_app_abi%': 'arm64-v8a',
+              'android_gdbserver%': '<(android_ndk_root)/prebuilt/android-arm64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-L/arch-arm64',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_root)/toolchains/aarch64-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "mipsel"', {
+              'android_app_abi%': 'mips',
+              'android_gdbserver%': '<(android_ndk_root)/prebuilt/android-mips/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-14/arch-mips',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_root)/toolchains/mipsel-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "mips64el"', {
+              'android_app_abi%': 'mips64',
+              'android_gdbserver%': '<(android_ndk_root)/prebuilt/android-mips64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-L/arch-mips64',
+              'android_ndk_lib_dir%': 'usr/lib64',
+              'android_toolchain%': '<(android_ndk_root)/toolchains/mips64el-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+              'gcc_version%': 49,
+            }],
+          ],
+        },
+        # Copy conditionally-set variables out one scope.
+        'android_app_abi%': '<(android_app_abi)',
+        'android_gdbserver%': '<(android_gdbserver)',
+        'android_ndk_root%': '<(android_ndk_root)',
+        'android_ndk_sysroot%': '<(android_ndk_sysroot)',
+        'android_sdk_root%': '<(android_sdk_root)',
+        'android_sdk_version%': '<(android_sdk_version)',
+        'android_toolchain%': '<(android_toolchain)',
+
+        'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+        'android_ndk_lib': '<(android_ndk_sysroot)/<(android_ndk_lib_dir)',
+        'android_sdk_tools%': '<(android_sdk_tools)',
+        'android_sdk%': '<(android_sdk)',
+        'android_sdk_jar%': '<(android_sdk)/android.jar',
+
+        'android_stlport_root': '<(android_stlport_root)',
+        'android_stlport_include': '<(android_stlport_root)/stlport',
+        'android_stlport_libs_dir': '<(android_stlport_root)/libs/<(android_app_abi)',
+        'host_os%': '<(host_os)',
+
+        # Location of the "objcopy" binary, used by both gyp and scripts.
+        'android_objcopy%' : '<!(/bin/echo -n <(android_toolchain)/*-objcopy)',
+
+        # Location of the "strip" binary, used by both gyp and scripts.
+        'android_strip%' : '<!(/bin/echo -n <(android_toolchain)/*-strip)',
+
+        # Location of the "readelf" binary.
+        'android_readelf%' : '<!(/bin/echo -n <(android_toolchain)/*-readelf)',
+
+        # Determines whether we should optimize JNI generation at the cost of
+        # breaking assumptions in the build system that when inputs have changed
+        # the outputs should always change as well.  This is meant purely for
+        # developer builds, to avoid spurious re-linking of native files.
+        'optimize_jni_generation%': '<(optimize_jni_generation)',
+
+        # Always uses openssl.
+        'use_openssl%': 1,
+        'use_openssl_certs%': 1,
+
+        'proprietary_codecs%': '<(proprietary_codecs)',
+        'safe_browsing%': 1,
+        'enable_web_speech%': 0,
+        'java_bridge%': 1,
+        'build_ffmpegsumo%': 0,
+        'use_allocator%': 'none',
+
+        # Disable Native Client.
+        'disable_nacl%': 1,
+
+        # Android does not support background apps.
+        'enable_background%': 0,
+
+        # Sessions are store separately in the Java side.
+        'enable_session_service%': 0,
+
+        'p2p_apis%' : 0,
+
+        'gtest_target_type%': 'shared_library',
+
+        # Uses system APIs for decoding audio and video.
+        'use_libffmpeg%': '0',
+
+        # When building as part of the Android system, use system libraries
+        # where possible to reduce ROM size.
+        'use_system_icu%': '<(android_webview_build)',
+        'use_system_stlport%': '<(android_webview_build)',
+
+        # Copy it out one scope.
+        'android_webview_build%': '<(android_webview_build)',
+
+        # Default android linker script for shared library exports.
+        'android_linker_script%': '<(SHARED_INTERMEDIATE_DIR)/android_exports.lst',
+      }],  # OS=="android"
+      ['embedded==1', {
+        'use_system_fontconfig%': 0,
+      }, {
+        'use_system_fontconfig%': 1,
+      }],
+      ['chromecast==1', {
+        'enable_mpeg2ts_stream_parser%': 1,
+        'ffmpeg_branding%': 'Chrome',
+        'ozone_platform_ozonex%': 1,
+        'use_playready%': 0,
+        'conditions': [
+          ['target_arch=="arm"', {
+            'arm_arch%': '',
+            'arm_tune%': 'cortex-a9',
+            'arm_thumb%': 1,
+            'video_hole%': 1,
+          }],
+        ],
+      }],
+      ['android_webview_build==1', {
+        # When building the WebView in the Android tree, jarjar will remap all
+        # the class names, so the JNI generator needs to know this.
+        'jni_generator_jarjar_file': '../android_webview/build/jarjar-rules.txt',
+      }],
+      ['OS=="linux" and target_arch!="mipsel"', {
+        # TODO(thakis): This is here to measure perf for a while.
+        'clang%': 1,
+      }],  # OS=="mac"
+      ['OS=="mac"', {
+        'conditions': [
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'mac_strip_release%': 1,
+          }],
+        ],
+      }],  # OS=="mac"
+      ['OS=="mac" or OS=="ios"', {
+        'clang%': 1,
+
+        'variables': {
+          # Mac OS X SDK and deployment target support.  The SDK identifies
+          # the version of the system headers that will be used, and
+          # corresponds to the MAC_OS_X_VERSION_MAX_ALLOWED compile-time
+          # macro.  "Maximum allowed" refers to the operating system version
+          # whose APIs are available in the headers.  The deployment target
+          # identifies the minimum system version that the built products are
+          # expected to function on.  It corresponds to the
+          # MAC_OS_X_VERSION_MIN_REQUIRED compile-time macro.  To ensure these
+          # macros are available, #include <AvailabilityMacros.h>.  Additional
+          # documentation on these macros is available at
+          # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3
+          # Chrome normally builds with the Mac OS X 10.6 SDK and sets the
+          # deployment target to 10.6.  Other projects, such as O3D, may
+          # override these defaults.
+
+          # Normally, mac_sdk_min is used to find an SDK that Xcode knows
+          # about that is at least the specified version. In official builds,
+          # the SDK must match mac_sdk_min exactly. If the SDK is installed
+          # someplace that Xcode doesn't know about, set mac_sdk_path to the
+          # path to the SDK; when set to a non-empty string, SDK detection
+          # based on mac_sdk_min will be bypassed entirely.
+          'mac_sdk_min%': '10.6',
+          'mac_sdk_path%': '',
+
+          'mac_deployment_target%': '10.6',
+        },
+
+        'mac_sdk_min': '<(mac_sdk_min)',
+        'mac_sdk_path': '<(mac_sdk_path)',
+        'mac_deployment_target': '<(mac_deployment_target)',
+
+        # Compile in Breakpad support by default so that it can be
+        # tested, even if it is not enabled by default at runtime.
+        'mac_breakpad_compiled_in%': 1,
+        'conditions': [
+          # mac_product_name is set to the name of the .app bundle as it should
+          # appear on disk.  This duplicates data from
+          # chrome/app/theme/chromium/BRANDING and
+          # chrome/app/theme/google_chrome/BRANDING, but is necessary to get
+          # these names into the build system.
+          ['branding=="Chrome"', {
+            'mac_product_name%': 'Google Chrome',
+          }, { # else: branding!="Chrome"
+            'mac_product_name%': 'Chromium',
+          }],
+
+          ['branding=="Chrome" and buildtype=="Official"', {
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py --verify <(mac_sdk_min) --sdk_path=<(mac_sdk_path))',
+            # Enable uploading crash dumps.
+            'mac_breakpad_uploads%': 1,
+            # Enable dumping symbols at build time for use by Mac Breakpad.
+            'mac_breakpad%': 1,
+            # Enable Keystone auto-update support.
+            'mac_keystone%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py <(mac_sdk_min))',
+            'mac_breakpad_uploads%': 0,
+            'mac_breakpad%': 0,
+            'mac_keystone%': 0,
+          }],
+        ],
+      }],  # OS=="mac" or OS=="ios"
+      ['OS=="win"', {
+        'conditions': [
+          # This is the architecture convention used in WinSDK paths.
+          ['target_arch=="ia32"', {
+            'winsdk_arch%': 'x86',
+          },{
+            'winsdk_arch%': '<(target_arch)',
+          }],
+          ['component=="shared_library"', {
+            'win_use_allocator_shim%': 0,
+          },{
+            # Turn on multiple dll by default on Windows when in static_library.
+            'chrome_multiple_dll%': 1,
+          }],
+          ['asan==1', {
+            'win_use_allocator_shim%': 0,
+          }],
+          ['component=="shared_library" and "<(GENERATOR)"=="ninja"', {
+            # Only enabled by default for ninja because it's buggy in VS.
+            # Not enabled for component=static_library because some targets
+            # are too large and the toolchain fails due to the size of the
+            # .obj files.
+            'incremental_chrome_dll%': 1,
+          }],
+          # Don't do incremental linking for large modules on 32-bit or when
+          # component=static_library as the toolchain fails due to the size of
+          # the .ilk files.
+          ['MSVS_OS_BITS==32 or component=="static_library"', {
+            'msvs_large_module_debug_link_mode%': '1',  # No
+          },{
+            'msvs_large_module_debug_link_mode%': '2',  # Yes
+          }],
+          ['MSVS_VERSION=="2013e"', {
+            'msvs_express%': 1,
+            'secure_atl%': 0,
+          },{
+            'msvs_express%': 0,
+            'secure_atl%': 1,
+          }],
+        ],
+        'nacl_win64_defines': [
+          # This flag is used to minimize dependencies when building
+          # Native Client loader for 64-bit Windows.
+          'NACL_WIN64',
+        ],
+      }],
+
+      ['os_posix==1 and chromeos==0 and OS!="android" and OS!="ios" and embedded==0', {
+        'use_cups%': 1,
+      }, {
+        'use_cups%': 0,
+      }],
+
+      ['enable_plugins==1 and (OS=="linux" or OS=="mac" or OS=="win")', {
+        'enable_pepper_cdms%': 1,
+      }, {
+        'enable_pepper_cdms%': 0,
+      }],
+
+      ['OS=="android"', {
+        'enable_browser_cdms%': 1,
+      }, {
+        'enable_browser_cdms%': 0,
+      }],
+
+      # Native Client glibc toolchain is enabled
+      # by default except on arm, mips and mips64.
+      ['target_arch=="arm" or target_arch=="mipsel" or target_arch=="mips64el"', {
+        'disable_glibc%': 1,
+      }, {
+        'disable_glibc%': 0,
+      }],
+
+      # Set the relative path from this file to the GYP file of the JPEG
+      # library used by Chromium.
+      ['use_system_libjpeg==1 or use_libjpeg_turbo==0', {
+        # Configuration for using the system libjeg is here.
+        'libjpeg_gyp_path': '../third_party/libjpeg/libjpeg.gyp',
+      }, {
+        'libjpeg_gyp_path': '../third_party/libjpeg_turbo/libjpeg.gyp',
+      }],
+
+      # Options controlling the use of GConf (the classic GNOME configuration
+      # system) and GIO, which contains GSettings (the new GNOME config system).
+      ['chromeos==1 or embedded==1', {
+        'use_gconf%': 0,
+        'use_gio%': 0,
+      }, {
+        'use_gconf%': 1,
+        'use_gio%': 1,
+      }],
+
+      # Set up -D and -E flags passed into grit.
+      ['branding=="Chrome"', {
+        # TODO(mmoss) The .grd files look for _google_chrome, but for
+        # consistency they should look for google_chrome_build like C++.
+        'grit_defines': ['-D', '_google_chrome',
+                         '-E', 'CHROMIUM_BUILD=google_chrome'],
+      }, {
+        'grit_defines': ['-D', '_chromium',
+                         '-E', 'CHROMIUM_BUILD=chromium'],
+      }],
+      ['chromeos==1', {
+        'grit_defines': ['-D', 'chromeos', '-D', 'scale_factors=2x'],
+      }],
+      ['desktop_linux==1', {
+        'grit_defines': ['-D', 'desktop_linux'],
+      }],
+      ['toolkit_views==1', {
+        'grit_defines': ['-D', 'toolkit_views'],
+      }],
+      ['use_aura==1', {
+        'grit_defines': ['-D', 'use_aura'],
+      }],
+      ['use_ash==1', {
+        'grit_defines': ['-D', 'use_ash'],
+      }],
+      ['use_nss==1', {
+        'grit_defines': ['-D', 'use_nss'],
+      }],
+      ['use_ozone==1', {
+        'grit_defines': ['-D', 'use_ozone'],
+      }],
+      ['image_loader_extension==1', {
+        'grit_defines': ['-D', 'image_loader_extension'],
+      }],
+      ['remoting==1', {
+        'grit_defines': ['-D', 'remoting'],
+      }],
+      ['use_titlecase_in_grd==1', {
+        'grit_defines': ['-D', 'use_titlecase'],
+      }],
+      ['use_third_party_translations==1', {
+        'grit_defines': ['-D', 'use_third_party_translations'],
+        'locales': [
+          'ast', 'bs', 'ca@valencia', 'en-AU', 'eo', 'eu', 'gl', 'hy', 'ia',
+          'ka', 'ku', 'kw', 'ms', 'ug'
+        ],
+      }],
+      ['OS=="android"', {
+        'grit_defines': ['-t', 'android',
+                         '-E', 'ANDROID_JAVA_TAGGED_ONLY=true'],
+      }],
+      ['OS=="mac" or OS=="ios"', {
+        'grit_defines': ['-D', 'scale_factors=2x'],
+      }],
+      ['OS == "ios"', {
+        'grit_defines': [
+          '-t', 'ios',
+          # iOS uses a whitelist to filter resources.
+          '-w', '<(DEPTH)/build/ios/grit_whitelist.txt'
+        ],
+
+        # Enable host builds when generating with ninja-ios.
+        'conditions': [
+          ['"<(GENERATOR)"=="ninja"', {
+            'host_os%': "mac",
+          }],
+
+          # TODO(sdefresne): Remove the target_subarch check once Apple has
+          # upstreamed the support for "arm64". http://crbug.com/341453
+          ['target_subarch!="arm32" or "<(GENERATOR)"=="xcode"', {
+            'clang_xcode%': 1,
+          }],
+        ],
+      }],
+      ['enable_extensions==1', {
+        'grit_defines': ['-D', 'enable_extensions'],
+      }],
+      ['enable_plugins!=0', {
+        'grit_defines': ['-D', 'enable_plugins'],
+      }],
+      ['enable_printing!=0', {
+        'grit_defines': ['-D', 'enable_printing'],
+      }],
+      ['enable_printing==1', {
+        'grit_defines': ['-D', 'enable_full_printing'],
+      }],
+      ['enable_themes==1', {
+        'grit_defines': ['-D', 'enable_themes'],
+      }],
+      ['enable_app_list==1', {
+        'grit_defines': ['-D', 'enable_app_list'],
+      }],
+      ['enable_settings_app==1', {
+        'grit_defines': ['-D', 'enable_settings_app'],
+      }],
+      ['enable_google_now==1', {
+        'grit_defines': ['-D', 'enable_google_now'],
+      }],
+      ['use_concatenated_impulse_responses==1', {
+        'grit_defines': ['-D', 'use_concatenated_impulse_responses'],
+      }],
+      ['enable_webrtc==1', {
+        'grit_defines': ['-D', 'enable_webrtc'],
+      }],
+      ['enable_hangout_services_extension==1', {
+        'grit_defines': ['-D', 'enable_hangout_services_extension'],
+      }],
+      ['enable_task_manager==1', {
+        'grit_defines': ['-D', 'enable_task_manager'],
+      }],
+      ['notifications==1', {
+        'grit_defines': ['-D', 'enable_notifications'],
+      }],
+      ['enable_wifi_bootstrapping==1', {
+        'grit_defines': ['-D', 'enable_wifi_bootstrapping'],
+      }],
+      ['enable_resource_whitelist_generation==1 and OS!="win"', {
+        'grit_rc_header_format': ['-h', '#define {textual_id} _Pragma("whitelisted_resource_{numeric_id}") {numeric_id}'],
+      }],
+      ['enable_resource_whitelist_generation==1 and OS=="win"', {
+        'grit_rc_header_format': ['-h', '#define {textual_id} __pragma(message("whitelisted_resource_{numeric_id}")) {numeric_id}'],
+      }],
+      ['enable_mdns==1 or OS=="mac"', {
+        'grit_defines': ['-D', 'enable_service_discovery'],
+        'enable_service_discovery%': 1
+      }],
+      ['clang_use_chrome_plugins==1 and OS!="win"', {
+        'clang_chrome_plugins_flags': [
+          '<!@(<(DEPTH)/tools/clang/scripts/plugin_flags.sh)'
+        ],
+      }],
+      ['asan==1 or msan==1 or lsan==1 or tsan==1', {
+        'clang%': 1,
+        'use_allocator%': 'none',
+        'use_sanitizer_options%': 1,
+      }],
+      ['asan==1 and OS=="linux" and chromeos==0', {
+        'use_custom_libcxx%': 1,
+      }],
+      ['ubsan==1', {
+        'clang%': 1,
+      }],
+      ['ubsan_vptr==1', {
+        'clang%': 1,
+      }],
+      ['asan==1 and OS=="mac"', {
+        # TODO(glider): we do not strip ASan binaries until the dynamic ASan
+        # runtime is fully adopted. See http://crbug.com/242503.
+        'mac_strip_release': 0,
+      }],
+      ['tsan==1', {
+        'use_custom_libcxx%': 1,
+      }],
+      ['msan==1', {
+        # Use a just-built, MSan-instrumented libc++ instead of the system-wide
+        # libstdc++. This is required to avoid false positive reports whenever
+        # the C++ standard library is used.
+        'use_custom_libcxx%': 1,
+        # Running the V8-generated code on an ARM simulator is a powerful hack
+        # that allows the tool to see the memory accesses from JITted code.
+        # Without this flag, JS code causes false positive reports from MSan.
+        'v8_target_arch': 'arm64',
+      }],
+
+      ['OS=="linux" and clang_type_profiler==1', {
+        'clang%': 1,
+        'clang_use_chrome_plugins%': 0,
+        'conditions': [
+          ['host_arch=="x64"', {
+            'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_x64',
+          }],
+          ['host_arch=="ia32"', {
+            # 32-bit Clang is unsupported.  It may not build.  Put your 32-bit
+            # Clang in this directory at your own risk if needed for some
+            # purpose (e.g. to compare 32-bit and 64-bit behavior like memory
+            # usage).  Any failure by this compiler should not close the tree.
+            'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_ia32',
+          }],
+        ],
+      }],
+
+      ['OS=="win"', {
+        # The Clang plugins don't currently work on Windows.
+        # TODO(hans): One day, this will work. (crbug.com/82385)
+        'clang_use_chrome_plugins%': 0,
+      }],
+
+      # On valgrind bots, override the optimizer settings so we don't inline too
+      # much and make the stacks harder to figure out.
+      #
+      # TODO(rnk): Kill off variables that no one else uses and just implement
+      # them under a build_for_tool== condition.
+      ['build_for_tool=="memcheck" or build_for_tool=="tsan"', {
+        # gcc flags
+        'mac_debug_optimization': '1',
+        'mac_release_optimization': '1',
+        'release_optimize': '1',
+        'no_gc_sections': 1,
+        'debug_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                              '-fno-builtin -fno-optimize-sibling-calls',
+        'release_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                                '-fno-builtin -fno-optimize-sibling-calls',
+
+        # MSVS flags for TSan on Pin and Windows.
+        'win_debug_RuntimeChecks': '0',
+        'win_debug_disable_iterator_debugging': '1',
+        'win_debug_Optimization': '1',
+        'win_debug_InlineFunctionExpansion': '0',
+        'win_release_InlineFunctionExpansion': '0',
+        'win_release_OmitFramePointers': '0',
+
+        'use_allocator': 'tcmalloc',
+        'release_valgrind_build': 1,
+        'werror': '',
+        'component': 'static_library',
+        'use_system_zlib': 0,
+      }],
+
+      # Build tweaks for DrMemory.
+      # TODO(rnk): Combine with tsan config to share the builder.
+      # http://crbug.com/108155
+      ['build_for_tool=="drmemory"', {
+        # These runtime checks force initialization of stack vars which blocks
+        # DrMemory's uninit detection.
+        'win_debug_RuntimeChecks': '0',
+        # Iterator debugging is slow.
+        'win_debug_disable_iterator_debugging': '1',
+        # Try to disable optimizations that mess up stacks in a release build.
+        # DrM-i#1054 (http://code.google.com/p/drmemory/issues/detail?id=1054)
+        # /O2 and /Ob0 (disable inline) cannot be used together because of a
+        # compiler bug, so we use /Ob1 instead.
+        'win_release_InlineFunctionExpansion': '1',
+        'win_release_OmitFramePointers': '0',
+        # Ditto for debug, to support bumping win_debug_Optimization.
+        'win_debug_InlineFunctionExpansion': 0,
+        'win_debug_OmitFramePointers': 0,
+        # Keep the code under #ifndef NVALGRIND.
+        'release_valgrind_build': 1,
+      }],
+
+      # Enable RLZ on Win, Mac, iOS and ChromeOS.
+      ['branding=="Chrome" and (OS=="win" or OS=="mac" or OS=="ios" or chromeos==1)', {
+        'enable_rlz%': 1,
+      }],
+
+      # Set default compiler flags depending on ARM version.
+      ['arm_version==6 and android_webview_build==0', {
+        'arm_arch%': 'armv6',
+        'arm_tune%': '',
+        'arm_fpu%': 'vfp',
+        'arm_float_abi%': 'softfp',
+        'arm_thumb%': 0,
+      }],
+      ['arm_version==7 and android_webview_build==0', {
+        'arm_arch%': 'armv7-a',
+        'arm_tune%': 'generic-armv7-a',
+        'conditions': [
+          ['arm_neon==1', {
+            'arm_fpu%': 'neon',
+          }, {
+            'arm_fpu%': 'vfpv3-d16',
+          }],
+        ],
+        # Change the default to hard once the armhf transition is complete.
+        'arm_float_abi%': 'softfp',
+        'arm_thumb%': 1,
+      }],
+
+      ['android_webview_build==1', {
+        # The WebView build gets its cpu-specific flags from the Android build system.
+        'arm_arch%': '',
+        'arm_tune%': '',
+        'arm_fpu%': '',
+        'arm_float_abi%': '',
+        'arm_thumb%': 0,
+      }],
+
+      # Enable brlapi by default for chromeos.
+      [ 'chromeos==1', {
+        'use_brlapi%': 1,
+      }],
+
+      ['use_ozone==1 and ozone_auto_platforms==1', {
+        # Use test as the default platform.
+        'ozone_platform%': 'test',
+
+        # Build all platforms whose deps are in install-build-deps.sh.
+        # Only these platforms will be compile tested by buildbots.
+        'ozone_platform_dri%': 1,
+        'ozone_platform_test%': 1,
+        'ozone_platform_egltest%': 1,
+      }],
+
+      ['desktop_linux==1 and use_aura==1 and use_x11==1', {
+        'use_clipboard_aurax11%': 1,
+      }],
+
+      ['OS=="win" and use_goma==1', {
+        # goma doesn't support pch yet.
+        'chromium_win_pch': 0,
+        # goma doesn't support PDB yet, so win_z7=1 or fastbuild=1.
+        'conditions': [
+          ['win_z7==0 and fastbuild==0', {
+            'fastbuild': 1,
+          }],
+        ],
+      }],
+
+      ['OS=="win" and (clang==1 or asan==1)', {
+        'chromium_win_pch': 0,
+      }],
+
+      ['host_clang==1', {
+        'host_cc': '<(make_clang_dir)/bin/clang',
+        'host_cxx': '<(make_clang_dir)/bin/clang++',
+      }, {
+        'host_cc': '<!(which gcc)',
+        'host_cxx': '<!(which g++)',
+      }],
+
+      # The seccomp-bpf sandbox is only supported on five architectures
+      # currently.
+      # Do not disable seccomp_bpf anywhere without talking to
+      # security@chromium.org!
+      ['((OS=="linux" or OS=="android") and '
+           '(target_arch=="ia32" or target_arch=="x64" or '
+             'target_arch=="arm" or target_arch=="mipsel" or '
+             'target_arch=="arm64"))', {
+         'use_seccomp_bpf%': 1,
+      }, {
+         'use_seccomp_bpf%': 0,
+      }],
+    ],
+
+    # older history files use fts2 instead of fts3
+    'sqlite_enable_fts2%': '<(support_pre_M6_history_database)',
+
+    # The path to the ANGLE library.
+    'angle_path': '<(DEPTH)/third_party/angle',
+
+    # List of default apps to install in new profiles.  The first list contains
+    # the source files as found in svn.  The second list, used only for linux,
+    # contains the destination location for each of the files.  When a crx
+    # is added or removed from the list, the chrome/browser/resources/
+    # default_apps/external_extensions.json file must also be updated.
+    'default_apps_list': [
+      'browser/resources/default_apps/external_extensions.json',
+      'browser/resources/default_apps/gmail.crx',
+      'browser/resources/default_apps/search.crx',
+      'browser/resources/default_apps/youtube.crx',
+      'browser/resources/default_apps/drive.crx',
+      'browser/resources/default_apps/docs.crx',
+    ],
+    'default_apps_list_linux_dest': [
+      '<(PRODUCT_DIR)/default_apps/external_extensions.json',
+      '<(PRODUCT_DIR)/default_apps/gmail.crx',
+      '<(PRODUCT_DIR)/default_apps/search.crx',
+      '<(PRODUCT_DIR)/default_apps/youtube.crx',
+      '<(PRODUCT_DIR)/default_apps/drive.crx',
+      '<(PRODUCT_DIR)/default_apps/docs.crx',
+    ],
+
+    # Whether to allow building of the GPU-related isolates.
+    'archive_gpu_tests%': 0,
+
+     # Whether to allow building of chromoting related isolates.
+    'archive_chromoting_tests%': 0,
+  },
+  'target_defaults': {
+    'variables': {
+      # The condition that operates on chromium_code is in a target_conditions
+      # section, and will not have access to the default fallback value of
+      # chromium_code at the top of this file, or to the chromium_code
+      # variable placed at the root variables scope of .gyp files, because
+      # those variables are not set at target scope.  As a workaround,
+      # if chromium_code is not set at target scope, define it in target scope
+      # to contain whatever value it has during early variable expansion.
+      # That's enough to make it available during target conditional
+      # processing.
+      'chromium_code%': '<(chromium_code)',
+
+      'component%': '<(component)',
+
+      'chromecast%': '<(chromecast)',
+
+      # See http://msdn.microsoft.com/en-us/library/aa652360(VS.71).aspx
+      'win_release_Optimization%': '2', # 2 = /Os
+      'win_debug_Optimization%': '0',   # 0 = /Od
+
+      # See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_release_OmitFramePointers%': '0',
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_debug_OmitFramePointers%': '',
+
+      # See http://msdn.microsoft.com/en-us/library/8wtf2dfz(VS.71).aspx
+      'win_debug_RuntimeChecks%': '3',    # 3 = all checks enabled, 0 = off
+
+      # See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
+      'win_debug_InlineFunctionExpansion%': '',    # empty = default, 0 = off,
+      'win_release_InlineFunctionExpansion%': '2', # 1 = only __inline, 2 = max
+
+      # VS inserts quite a lot of extra checks to algorithms like
+      # std::partial_sort in Debug build which make them O(N^2)
+      # instead of O(N*logN). This is particularly slow under memory
+      # tools like ThreadSanitizer so we want it to be disablable.
+      # See http://msdn.microsoft.com/en-us/library/aa985982(v=VS.80).aspx
+      'win_debug_disable_iterator_debugging%': '0',
+
+      # An application manifest fragment to declare compatibility settings for
+      # 'executable' targets. Ignored in other target type.
+      'win_exe_compatibility_manifest%':
+          '<(DEPTH)\\build\\win\\compatibility.manifest',
+
+      'release_extra_cflags%': '',
+      'debug_extra_cflags%': '',
+
+      'release_valgrind_build%': '<(release_valgrind_build)',
+
+      # the non-qualified versions are widely assumed to be *nix-only
+      'win_release_extra_cflags%': '',
+      'win_debug_extra_cflags%': '',
+
+      # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+      'enable_wexit_time_destructors%': '<(enable_wexit_time_destructors)',
+
+      # Only used by Windows build for now.  Can be used to build into a
+      # differet output directory, e.g., a build_dir_prefix of VS2010_ would
+      # output files in src/build/VS2010_{Debug,Release}.
+      'build_dir_prefix%': '',
+
+      # Targets are by default not nacl untrusted code.
+      'nacl_untrusted_build%': 0,
+
+      'pnacl_compile_flags': [
+        # pnacl uses the clang compiler so we need to suppress all the
+        # same warnings as we do for clang.
+        # TODO(sbc): Remove these if/when they are removed from the clang
+        # build.
+        '-Wno-unused-function',
+        '-Wno-char-subscripts',
+        '-Wno-c++11-extensions',
+        '-Wno-unnamed-type-template-args',
+      ],
+
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MDd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+        ['OS=="ios"', {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': 's', # Use -Os unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }, {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': '3', # Use -O3 unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }],
+        ['OS=="android"', {
+          'host_os%': '<(host_os)',  # See comment above chromium_code.
+        }],
+      ],
+      'clang_warning_flags': [
+        '-Wheader-hygiene',
+
+        # Don't die on dtoa code that uses a char as an array index.
+        # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+        '-Wno-char-subscripts',
+
+        # TODO(thakis): This used to be implied by -Wno-unused-function,
+        # which we no longer use. Check if it makes sense to remove
+        # this as well. http://crbug.com/316352
+        '-Wno-unneeded-internal-declaration',
+
+        # Warns on switches on enums that cover all enum values but
+        # also contain a default: branch. Chrome is full of that.
+        '-Wno-covered-switch-default',
+
+        # Warns when a const char[] is converted to bool.
+        '-Wstring-conversion',
+
+        # C++11-related flags:
+
+        # This warns on using ints as initializers for floats in
+        # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+        # which happens in several places in chrome code. Not sure if
+        # this is worth fixing.
+        '-Wno-c++11-narrowing',
+
+        # Clang considers the `register` keyword as deprecated, but e.g.
+        # code generated by flex (used in angle) contains that keyword.
+        # http://crbug.com/255186
+        '-Wno-deprecated-register',
+
+        # TODO(hans): Clean this up. Or disable with finer granularity.
+        '-Wno-unused-local-typedef',
+      ],
+    },
+    'includes': [ 'set_clang_warning_flags.gypi', ],
+    'defines': [
+      # Don't use deprecated V8 APIs anywhere.
+      'V8_DEPRECATION_WARNINGS',
+    ],
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ],
+    'conditions': [
+      ['(OS=="mac" or OS=="ios") and asan==1', {
+        'dependencies': [
+          '<(DEPTH)/build/mac/asan.gyp:asan_dynamic_runtime',
+        ],
+      }],
+      ['OS=="win" and asan==1 and component=="shared_library"', {
+        'dependencies': [
+          '<(DEPTH)/build/win/asan.gyp:asan_dynamic_runtime',
+        ],
+      }],
+      ['OS=="linux" and use_allocator!="none" and clang_type_profiler==1', {
+        'cflags_cc!': ['-fno-rtti'],
+        'cflags_cc+': [
+          '-frtti',
+          '-gline-tables-only',
+          '-fintercept-allocation-functions',
+        ],
+        'defines': ['TYPE_PROFILING'],
+        'dependencies': [
+          '<(DEPTH)/base/allocator/allocator.gyp:type_profiler',
+        ],
+      }],
+      ['branding=="Chrome"', {
+        'defines': ['GOOGLE_CHROME_BUILD'],
+      }, {  # else: branding!="Chrome"
+        'defines': ['CHROMIUM_BUILD'],
+      }],
+      ['OS=="mac" and component=="shared_library"', {
+        'xcode_settings': {
+          'DYLIB_INSTALL_NAME_BASE': '@rpath',
+          'LD_RUNPATH_SEARCH_PATHS': [
+            # For unbundled binaries.
+            '@loader_path/.',
+            # For bundled binaries, to get back from Binary.app/Contents/MacOS.
+            '@loader_path/../../..',
+          ],
+        },
+      }],
+      ['clang==1 and OS!="win"', {
+        # This is here so that all files get recompiled after a clang roll and
+        # when turning clang on or off.
+        # (defines are passed via the command line, and build systems rebuild
+        # things when their commandline changes). Nothing should ever read this
+        # define.
+        'defines': ['CR_CLANG_REVISION=<!(<(DEPTH)/tools/clang/scripts/update.sh --print-revision)'],
+      }],
+      ['enable_rlz==1', {
+        'defines': ['ENABLE_RLZ'],
+      }],
+      ['component=="shared_library"', {
+        'defines': ['COMPONENT_BUILD'],
+      }],
+      ['toolkit_views==1', {
+        'defines': ['TOOLKIT_VIEWS=1'],
+      }],
+      ['ui_compositor_image_transport==1', {
+        'defines': ['UI_COMPOSITOR_IMAGE_TRANSPORT'],
+      }],
+      ['use_aura==1', {
+        'defines': ['USE_AURA=1'],
+      }],
+      ['use_ash==1', {
+        'defines': ['USE_ASH=1'],
+      }],
+      ['use_pango==1', {
+        'defines': ['USE_PANGO=1'],
+      }],
+      ['use_cairo==1', {
+        'defines': ['USE_CAIRO=1'],
+      }],
+      ['use_cras==1', {
+        'defines': ['USE_CRAS=1'],
+      }],
+      ['use_glib==1', {
+        'defines': ['USE_GLIB=1'],
+      }],
+      ['use_ozone==1', {
+        'defines': ['USE_OZONE=1'],
+      }],
+      ['use_default_render_theme==1', {
+        'defines': ['USE_DEFAULT_RENDER_THEME=1'],
+      }],
+      ['use_libjpeg_turbo==1', {
+        'defines': ['USE_LIBJPEG_TURBO=1'],
+      }],
+      ['use_x11==1', {
+        'defines': ['USE_X11=1'],
+      }],
+      ['use_clipboard_aurax11==1', {
+        'defines': ['USE_CLIPBOARD_AURAX11=1'],
+      }],
+      ['enable_one_click_signin==1', {
+        'defines': ['ENABLE_ONE_CLICK_SIGNIN'],
+      }],
+      ['enable_pre_sync_backup==1', {
+        'defines': ['ENABLE_PRE_SYNC_BACKUP'],
+      }],
+      ['use_xi2_mt!=0 and use_x11==1', {
+        'defines': ['USE_XI2_MT=<(use_xi2_mt)'],
+      }],
+      ['image_loader_extension==1', {
+        'defines': ['IMAGE_LOADER_EXTENSION=1'],
+      }],
+      ['profiling==1', {
+        'defines': ['ENABLE_PROFILING=1'],
+      }],
+      ['remoting==1', {
+        'defines': ['ENABLE_REMOTING=1'],
+      }],
+      ['enable_webrtc==1', {
+        'defines': ['ENABLE_WEBRTC=1'],
+      }],
+      ['proprietary_codecs==1', {
+        'defines': ['USE_PROPRIETARY_CODECS'],
+        'conditions': [
+          ['enable_mpeg2ts_stream_parser==1', {
+            'defines': ['ENABLE_MPEG2TS_STREAM_PARSER'],
+          }],
+        ],
+      }],
+      ['enable_viewport==1', {
+        'defines': ['ENABLE_VIEWPORT'],
+      }],
+      ['enable_pepper_cdms==1', {
+        'defines': ['ENABLE_PEPPER_CDMS'],
+      }],
+      ['enable_browser_cdms==1', {
+        'defines': ['ENABLE_BROWSER_CDMS'],
+      }],
+      ['configuration_policy==1', {
+        'defines': ['ENABLE_CONFIGURATION_POLICY'],
+      }],
+      ['notifications==1', {
+        'defines': ['ENABLE_NOTIFICATIONS'],
+      }],
+      ['enable_hidpi==1', {
+        'defines': ['ENABLE_HIDPI=1'],
+      }],
+      ['native_discardable_memory==1', {
+        'defines': ['DISCARDABLE_MEMORY_ALWAYS_SUPPORTED_NATIVELY'],
+      }],
+      ['native_memory_pressure_signals==1', {
+        'defines': ['SYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE'],
+      }],
+      ['use_udev==1', {
+        'defines': ['USE_UDEV'],
+      }],
+      ['fastbuild!=0', {
+        'xcode_settings': {
+          'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
+        },
+        'conditions': [
+          ['clang==1 and asan==0 and msan==0 and tsan==0 and ubsan_vptr==0', {
+            # Clang creates chubby debug information, which makes linking very
+            # slow. For now, don't create debug information with clang.  See
+            # http://crbug.com/70000
+            'conditions': [
+              ['OS=="linux"', {
+                'variables': {
+                  'debug_extra_cflags': '-g0',
+                },
+              }],
+              # Android builds symbols on release by default, disable them.
+              ['OS=="android"', {
+                'variables': {
+                  'debug_extra_cflags': '-g0',
+                  'release_extra_cflags': '-g0',
+                },
+              }],
+            ],
+          }, { # else clang!=1
+            'conditions': [
+              ['OS=="win" and fastbuild==2', {
+                # Completely disable debug information.
+                'msvs_settings': {
+                  'VCLinkerTool': {
+                    'GenerateDebugInformation': 'false',
+                  },
+                  'VCCLCompilerTool': {
+                    'DebugInformationFormat': '0',
+                  },
+                },
+              }],
+              ['OS=="win" and fastbuild==1', {
+                'msvs_settings': {
+                  'VCLinkerTool': {
+                    # This tells the linker to generate .pdbs, so that
+                    # we can get meaningful stack traces.
+                    'GenerateDebugInformation': 'true',
+                  },
+                  'VCCLCompilerTool': {
+                    # No debug info to be generated by compiler.
+                    'DebugInformationFormat': '0',
+                  },
+                },
+              }],
+              ['OS=="linux" and fastbuild==2', {
+                'variables': {
+                  'debug_extra_cflags': '-g0',
+                },
+              }],
+              ['OS=="linux" and fastbuild==1', {
+                'variables': {
+                  'debug_extra_cflags': '-g1',
+                },
+              }],
+              ['OS=="android" and fastbuild==2', {
+                'variables': {
+                  'debug_extra_cflags': '-g0',
+                  'release_extra_cflags': '-g0',
+                },
+              }],
+              ['OS=="android" and fastbuild==1', {
+                'variables': {
+                  'debug_extra_cflags': '-g1',
+                  'release_extra_cflags': '-g1',
+                },
+              }],
+            ],
+          }], # clang!=1
+        ],
+      }],  # fastbuild!=0
+      ['dont_embed_build_metadata==1', {
+        'defines': [
+          'DONT_EMBED_BUILD_METADATA',
+        ],
+      }],  # dont_embed_build_metadata==1
+      ['dcheck_always_on!=0', {
+        'defines': ['DCHECK_ALWAYS_ON=1'],
+      }],  # dcheck_always_on!=0
+      ['tracing_like_official_build!=0', {
+        'defines': ['TRACING_IS_OFFICIAL_BUILD=1'],
+      }],  # tracing_like_official_build!=0
+      ['win_use_allocator_shim==0', {
+        'conditions': [
+          ['OS=="win"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+        ],
+      }],
+      ['enable_eglimage==1', {
+        'defines': [
+          'ENABLE_EGLIMAGE=1',
+        ],
+      }],
+      ['asan==1', {
+        'defines': [
+          'ADDRESS_SANITIZER',
+          'MEMORY_TOOL_REPLACES_ALLOCATOR',
+          'MEMORY_SANITIZER_INITIAL_SIZE',
+        ],
+      }],
+      ['syzyasan==1', {
+        # SyzyAsan needs /PROFILE turned on to produce appropriate pdbs.
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'Profile': 'true',
+          },
+        },
+        'defines': [
+            'SYZYASAN',
+            'MEMORY_TOOL_REPLACES_ALLOCATOR',
+            'MEMORY_SANITIZER_INITIAL_SIZE',
+        ],
+      }],
+      ['OS=="win"', {
+        'defines': [
+          '__STD_C',
+          '_CRT_SECURE_NO_DEPRECATE',
+          '_SCL_SECURE_NO_DEPRECATE',
+          # This define is required to pull in the new Win8 interfaces from
+          # system headers like ShObjIdl.h.
+          'NTDDI_VERSION=0x06020000',
+          # This is required for ATL to use XP-safe versions of its functions.
+          '_USING_V110_SDK71_',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/wtl/include',
+        ],
+        'conditions': [
+          ['win_z7!=0', {
+            'msvs_settings': {
+              # Generates debug info when win_z7=1
+              # even if fastbuild=1 (that makes GenerateDebugInformation false).
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'true',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '1',
+              }
+            }
+          }],
+        ],  # win_z7!=0
+      }],  # OS==win
+      ['chromecast==1', {
+        'defines': [
+          'LOG_DISABLED=0',
+        ],
+        'conditions': [
+          ['target_arch=="arm"', {
+            'defines': [
+              # TODO(lcwu): Work around an error when building Chromium
+              # with gcc-4.5.3 (e.g. v8/src/platform-linux.cc). Remove
+              # this define once the toolchain is updated.
+              # See crbug.com/388933.
+              '__SOFTFP',
+            ],
+          }],
+          ['use_playready==1', {
+            'defines': [
+              'PLAYREADY_CDM_AVAILABLE',
+            ],
+          }],
+        ],
+      }],
+      ['enable_task_manager==1', {
+        'defines': [
+          'ENABLE_TASK_MANAGER=1',
+        ],
+      }],
+      ['enable_extensions==1', {
+        'defines': [
+          'ENABLE_EXTENSIONS=1',
+        ],
+      }],
+      ['OS=="win" and branding=="Chrome"', {
+        'defines': ['ENABLE_SWIFTSHADER'],
+      }],
+      ['enable_dart==1', {
+        'defines': ['WEBKIT_USING_DART=1'],
+      }],
+      ['enable_plugin_installation==1', {
+        'defines': ['ENABLE_PLUGIN_INSTALLATION=1'],
+      }],
+      ['enable_plugins==1', {
+        'defines': ['ENABLE_PLUGINS=1'],
+      }],
+      ['enable_session_service==1', {
+        'defines': ['ENABLE_SESSION_SERVICE=1'],
+      }],
+      ['enable_themes==1', {
+        'defines': ['ENABLE_THEMES=1'],
+      }],
+      ['enable_autofill_dialog==1', {
+        'defines': ['ENABLE_AUTOFILL_DIALOG=1'],
+      }],
+      ['enable_prod_wallet_service==1', {
+        'defines': ['ENABLE_PROD_WALLET_SERVICE=1'],
+      }],
+      ['enable_background==1', {
+        'defines': ['ENABLE_BACKGROUND=1'],
+      }],
+      ['enable_google_now==1', {
+        'defines': ['ENABLE_GOOGLE_NOW=1'],
+      }],
+      ['cld_version!=0', {
+        'defines': ['CLD_VERSION=<(cld_version)'],
+      }],
+      ['cld_version==2', {
+        # This is used to populate the "CLD Data Source" field in:
+        # chrome://translate-internals
+        'defines': ['CLD2_DATA_SOURCE=<(cld2_data_source)'],
+      }],
+      ['enable_printing==1', {
+        'defines': ['ENABLE_FULL_PRINTING=1', 'ENABLE_PRINTING=1'],
+      }],
+      ['enable_printing==2', {
+        'defines': ['ENABLE_PRINTING=1'],
+      }],
+      ['enable_spellcheck==1', {
+        'defines': ['ENABLE_SPELLCHECK=1'],
+      }],
+      ['enable_captive_portal_detection==1', {
+        'defines': ['ENABLE_CAPTIVE_PORTAL_DETECTION=1'],
+      }],
+      ['enable_app_list==1', {
+        'defines': ['ENABLE_APP_LIST=1'],
+      }],
+      ['enable_settings_app==1', {
+        'defines': ['ENABLE_SETTINGS_APP=1'],
+      }],
+      ['disable_file_support==1', {
+        'defines': ['DISABLE_FILE_SUPPORT=1'],
+      }],
+      ['disable_ftp_support==1', {
+        'defines': ['DISABLE_FTP_SUPPORT=1'],
+      }],
+      ['use_icu_alternatives_on_android==1', {
+        'defines': ['USE_ICU_ALTERNATIVES_ON_ANDROID=1'],
+      }],
+      ['enable_managed_users==1', {
+        'defines': ['ENABLE_MANAGED_USERS=1'],
+      }],
+      ['spdy_proxy_auth_property != ""', {
+        'defines': ['SPDY_PROXY_AUTH_PROPERTY="<(spdy_proxy_auth_property)"'],
+      }],
+      ['spdy_proxy_auth_value != ""', {
+        'defines': ['SPDY_PROXY_AUTH_VALUE="<(spdy_proxy_auth_value)"'],
+      }],
+      ['enable_mdns==1', {
+        'defines': ['ENABLE_MDNS=1'],
+      }],
+      ['enable_service_discovery==1', {
+        'defines' : [ 'ENABLE_SERVICE_DISCOVERY=1' ],
+      }],
+      ['enable_wifi_bootstrapping==1', {
+        'defines' : [ 'ENABLE_WIFI_BOOTSTRAPPING=1' ],
+      }],
+      ['enable_hangout_services_extension==1', {
+        'defines': ['ENABLE_HANGOUT_SERVICES_EXTENSION=1'],
+      }],
+      ['enable_ipc_fuzzer==1', {
+        'defines': ['ENABLE_IPC_FUZZER=1'],
+      }],
+      ['video_hole==1', {
+        'defines': ['VIDEO_HOLE=1'],
+      }],
+      ['enable_load_completion_hacks==1', {
+        'defines': ['ENABLE_LOAD_COMPLETION_HACKS=1'],
+      }],
+    ],  # conditions for 'target_defaults'
+    'target_conditions': [
+      ['<(use_openssl)==1', {
+        'defines': ['USE_OPENSSL=1'],
+      }],
+      ['<(use_openssl_certs)==1', {
+        'defines': ['USE_OPENSSL_CERTS=1'],
+      }],
+      ['>(nacl_untrusted_build)==1', {
+        'defines': [
+          'USE_OPENSSL=1',
+          'USE_OPENSSL_CERTS=1',
+        ],
+      }],
+      ['<(use_nss)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['USE_NSS=1'],
+      }],
+      ['<(chromeos)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['OS_CHROMEOS=1'],
+      }],
+      ['enable_wexit_time_destructors==1 and OS!="win"', {
+        # TODO: Enable on Windows too, http://crbug.com/404525
+        'variables': { 'clang_warning_flags': ['-Wexit-time-destructors']},
+      }],
+      ['chromium_code==0', {
+        'conditions': [
+          [ 'os_posix==1 and OS!="mac" and OS!="ios"', {
+            # We don't want to get warnings from third-party code,
+            # so remove any existing warning-enabling flags like -Wall.
+            'cflags!': [
+              '-Wall',
+              '-Wextra',
+            ],
+            'cflags_cc': [
+              # Don't warn about hash_map in third-party code.
+              '-Wno-deprecated',
+            ],
+            'cflags': [
+              # Don't warn about printf format problems.
+              # This is off by default in gcc but on in Ubuntu's gcc(!).
+              '-Wno-format',
+            ],
+            'cflags_cc!': [
+              # Necessary because llvm.org/PR10448 is WONTFIX (crbug.com/90453).
+              '-Wsign-compare',
+            ]
+          }],
+          # TODO: Fix all warnings on chromeos too.
+          [ 'os_posix==1 and OS!="mac" and OS!="ios" and (clang!=1 or chromeos==1)', {
+            'cflags!': [
+              '-Werror',
+            ],
+          }],
+          [ 'os_posix==1 and os_bsd!=1 and OS!="mac" and OS!="android"', {
+            'cflags': [
+              # Don't warn about ignoring the return value from e.g. close().
+              # This is off by default in some gccs but on by default in others.
+              # BSD systems do not support this option, since they are usually
+              # using gcc 4.2.1, which does not have this flag yet.
+              '-Wno-unused-result',
+            ],
+          }],
+          [ 'OS=="win"', {
+            'defines': [
+              '_CRT_SECURE_NO_DEPRECATE',
+              '_CRT_NONSTDC_NO_WARNINGS',
+              '_CRT_NONSTDC_NO_DEPRECATE',
+              '_SCL_SECURE_NO_DEPRECATE',
+            ],
+            'msvs_disabled_warnings': [4800],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'WarningLevel': '3',
+                'WarnAsError': 'true',
+                'Detect64BitPortabilityProblems': 'false',
+              },
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'msvs_settings': {
+                  'VCCLCompilerTool': { 'WarnAsError': 'false' },
+                }
+              }],
+              ['clang==1', {
+                'msvs_settings': {
+                  'VCCLCompilerTool': { 'WarnAsError': 'false' },
+                }
+              }],
+              [ 'component=="shared_library"', {
+              # TODO(darin): Unfortunately, some third_party code depends on base.
+                'msvs_disabled_warnings': [
+                  4251,  # class 'std::xx' needs to have dll-interface.
+                 ],
+              }],
+            ],
+          }],
+
+          [ 'OS=="mac" or OS=="ios"', {
+            'xcode_settings': {
+              'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'xcode_settings': {
+                  'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',    # -Werror
+                },
+              }],
+            ],
+          }],
+          [ 'OS=="ios"', {
+            'xcode_settings': {
+              # TODO(ios): Fix remaining warnings in third-party code, then
+              # remove this; the Mac cleanup didn't get everything that's
+              # flagged in an iOS build.
+              'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
+              'RUN_CLANG_STATIC_ANALYZER': 'NO',
+              # Several internal ios directories generate numerous warnings for
+              # -Wobjc-missing-property-synthesis.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
+            },
+          }],
+        ],
+      }, {
+        'includes': [
+           # Rules for excluding e.g. foo_win.cc from the build on non-Windows.
+          'filename_rules.gypi',
+        ],
+        # In Chromium code, we define __STDC_foo_MACROS in order to get the
+        # C99 macros on Mac and Linux.
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          '__STDC_FORMAT_MACROS',
+        ],
+        'conditions': [
+          ['OS=="win"', {
+            # turn on warnings for signed/unsigned mismatch on chromium code.
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': ['/we4389'],
+              },
+            },
+          }],
+          ['OS=="win" and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+        ],
+      }],
+    ],  # target_conditions for 'target_defaults'
+    'default_configuration': 'Debug',
+    'configurations': {
+      # VCLinkerTool LinkIncremental values below:
+      #   0 == default
+      #   1 == /INCREMENTAL:NO
+      #   2 == /INCREMENTAL
+      # Debug links incremental, Release does not.
+      #
+      # Abstract base configurations to cover common attributes.
+      #
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_configuration_attributes': {
+          'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)',
+          'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+          'CharacterSet': '1',
+        },
+        # Add the default import libs.
+        'msvs_settings':{
+          'VCLinkerTool': {
+            'AdditionalDependencies': [
+              'kernel32.lib',
+              'gdi32.lib',
+              'winspool.lib',
+              'comdlg32.lib',
+              'advapi32.lib',
+              'shell32.lib',
+              'ole32.lib',
+              'oleaut32.lib',
+              'user32.lib',
+              'uuid.lib',
+              'odbc32.lib',
+              'odbccp32.lib',
+              'delayimp.lib',
+              'credui.lib',
+              'netapi32.lib',
+            ],
+          },
+        },
+      },
+      'x86_Base': {
+        'abstract': 1,
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'MinimumRequiredVersion': '5.01',  # XP.
+            'TargetMachine': '1',
+          },
+          'VCLibrarianTool': {
+            'TargetMachine': '1',
+          },
+        },
+        'msvs_configuration_platform': 'Win32',
+      },
+      'x64_Base': {
+        'abstract': 1,
+        'msvs_configuration_platform': 'x64',
+        'msvs_settings': {
+          'VCLinkerTool': {
+            # Make sure to understand http://crbug.com/361720 if you want to
+            # increase this.
+            'MinimumRequiredVersion': '5.02',  # Server 2003.
+            'TargetMachine': '17', # x86 - 64
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+            # Doesn't exist x64 SDK. Should use oleaut32 in any case.
+            'IgnoreDefaultLibraryNames': [ 'olepro32.lib' ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+            'TargetMachine': '17', # x64
+          },
+        },
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'defines': [
+          'DYNAMIC_ANNOTATIONS_ENABLED=1',
+          'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+        ],
+        'xcode_settings': {
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
+          'OTHER_CFLAGS': [
+            '<@(debug_extra_cflags)',
+          ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'Optimization': '<(win_debug_Optimization)',
+            'PreprocessorDefinitions': ['_DEBUG'],
+            'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)',
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)',
+            'conditions': [
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_debug_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_debug_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_debug_InlineFunctionExpansion)',
+              }],
+              ['win_debug_disable_iterator_debugging==1', {
+                'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'],
+              }],
+
+              # if win_debug_OmitFramePointers is blank, leave as default
+              ['win_debug_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_debug_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': '<(msvs_debug_link_incremental)',
+            # ASLR makes debugging with windbg difficult because Chrome.exe and
+            # Chrome.dll share the same base name. As result, windbg will
+            # name the Chrome.dll module like chrome_<base address>, where
+            # <base address> typically changes with each launch. This in turn
+            # means that breakpoints in Chrome.dll don't stick from one launch
+            # to the next. For this reason, we turn ASLR off in debug builds.
+            # Note that this is a three-way bool, where 0 means to pick up
+            # the default setting, 1 is off and 2 is on.
+            'RandomizedBaseAddress': 1,
+          },
+          'VCResourceCompilerTool': {
+            'PreprocessorDefinitions': ['_DEBUG'],
+          },
+        },
+        'conditions': [
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(debug_extra_cflags)',
+                ],
+              }],
+            ],
+          }],
+          ['OS=="linux" and target_arch!="ia32" and disable_glibcxx_debug==0', {
+            # Enable libstdc++ debugging facilities to help catch problems
+            # early, see http://crbug.com/65151 .
+            # TODO(phajdan.jr): Should we enable this for all of POSIX?
+            'defines': ['_GLIBCXX_DEBUG=1',],
+          }],
+          ['release_valgrind_build==0', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fstack-protector-all',  # Implies -fstack-protector
+              ],
+            },
+          }],
+          ['clang==1', {
+            'cflags': [
+              # Allow comparing the address of references and 'this' against 0
+              # in debug builds. Technically, these can never be null in
+              # well-defined C/C++ and Clang can optimize such checks away in
+              # release builds, but they may be used in asserts in debug builds.
+              '-Wno-undefined-bool-conversion',
+              '-Wno-tautological-undefined-compare',
+            ],
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-Wno-undefined-bool-conversion',
+                '-Wno-tautological-undefined-compare',
+              ],
+            },
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-Wno-undefined-bool-conversion',
+                  '-Wno-tautological-undefined-compare',
+                ],
+              },
+            },
+          }],
+        ],
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'xcode_settings': {
+          'DEAD_CODE_STRIPPING': 'YES',  # -Wl,-dead_strip
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)',
+          'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)',
+            'conditions': [
+              # In official builds, each target will self-select
+              # an optimization level.
+              ['buildtype!="Official"', {
+                  'Optimization': '<(win_release_Optimization)',
+                },
+              ],
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_release_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_release_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_release_InlineFunctionExpansion)',
+              }],
+
+              # if win_release_OmitFramePointers is blank, leave as default
+              ['win_release_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_release_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [
+                '/d2Zi+',  # Improve debugging of Release builds.
+                '/Zc:inline',  # Remove unreferenced COMDAT (faster links).
+                '<@(win_release_extra_cflags)',
+            ],
+          },
+          'VCLinkerTool': {
+            # LinkIncremental is a tri-state boolean, where 0 means default
+            # (i.e., inherit from parent solution), 1 means false, and
+            # 2 means true.
+            'LinkIncremental': '1',
+            # This corresponds to the /PROFILE flag which ensures the PDB
+            # file contains FIXUP information (growing the PDB file by about
+            # 5%) but does not otherwise alter the output binary. This
+            # information is used by the Syzygy optimization tool when
+            # decomposing the release image.
+            'Profile': 'true',
+          },
+        },
+        'conditions': [
+          ['msvs_use_common_release', {
+            'includes': ['release.gypi'],
+          }],
+          ['release_valgrind_build==0 and tsan==0', {
+            'defines': [
+              'NVALGRIND',
+              'DYNAMIC_ANNOTATIONS_ENABLED=0',
+            ],
+          }, {
+            'defines': [
+              'MEMORY_TOOL_REPLACES_ALLOCATOR',
+              'MEMORY_SANITIZER_INITIAL_SIZE',
+              'DYNAMIC_ANNOTATIONS_ENABLED=1',
+              'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+            ],
+          }],
+          ['win_use_allocator_shim==0', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          # _FORTIFY_SOURCE isn't really supported by Clang now, see
+          # http://llvm.org/bugs/show_bug.cgi?id=16821.
+          # It seems to work fine with Ubuntu 12 headers though, so use it
+          # in official builds.
+          ['os_posix==1 and (asan!=1 and msan!=1 and tsan!=1 and lsan!=1 and ubsan!=1) and (OS!="linux" or clang!=1 or buildtype=="Official")', {
+            'target_conditions': [
+              ['chromium_code==1', {
+                # Non-chromium code is not guaranteed to compile cleanly
+                # with _FORTIFY_SOURCE. Also, fortified build may fail
+                # when optimizations are disabled, so only do that for Release
+                # build.
+                'defines': [
+                  '_FORTIFY_SOURCE=2',
+                ],
+              }],
+            ],
+          }],
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(release_extra_cflags)',
+                ],
+                'conditions': [
+                  ['enable_resource_whitelist_generation==1', {
+                    'cflags': [
+                      '-Wunknown-pragmas -Wno-error=unknown-pragmas',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['OS=="ios"', {
+            'defines': [
+              'NS_BLOCK_ASSERTIONS=1',
+            ],
+          }],
+        ],
+      },
+      #
+      # Concrete configurations
+      #
+      'Debug': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'],
+      },
+      'Release': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+      },
+      'conditions': [
+        [ 'OS=="ios"', {
+          'Profile': {
+            'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+            'target_conditions': [
+              [ '_type=="executable"', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'xcode_settings': {
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              }],
+            ],
+          },
+        }],
+        [ 'OS=="win"', {
+          # TODO(bradnelson): add a gyp mechanism to make this more graceful.
+          'Debug_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'],
+          },
+          'Release_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'],
+          },
+        }],
+      ],
+    },
+  },
+  'conditions': [
+    ['os_posix==1', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,-z,now',
+          '-Wl,-z,relro',
+        ],
+        # TODO(glider): enable the default options on other systems.
+        'conditions': [
+          ['use_sanitizer_options==1 and ((OS=="linux" and (chromeos==0 or target_arch!="ia32")) or OS=="mac")', {
+            'dependencies': [
+              '<(DEPTH)/build/sanitizers/sanitizers.gyp:sanitizer_options',
+            ],
+          }],
+        ],
+      },
+    }],
+    # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+    ['os_posix==1 and disable_fatal_linker_warnings==0 and use_evdev_gestures==0 and (chromeos==0 or target_arch!="arm")', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--fatal-warnings',
+        ],
+      },
+    }],
+    ['os_posix==1 and chromeos==0', {
+      # Chrome OS enables -fstack-protector-strong via its build wrapper,
+      # and we want to avoid overriding this, so stack-protector is only
+      # enabled when not building on Chrome OS.
+      # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc
+      # supports it.
+      'target_defaults': {
+        'cflags': [
+          '-fstack-protector',
+          '--param=ssp-buffer-size=4',
+        ],
+      },
+    }],
+    ['os_posix==1 and OS!="mac" and OS!="ios"', {
+      'target_defaults': {
+        # Enable -Werror by default, but put it in a variable so it can
+        # be disabled in ~/.gyp/include.gypi on the valgrind builders.
+        'variables': {
+          'werror%': '-Werror',
+          'libraries_for_target%': '',
+        },
+        'defines': [
+          '_FILE_OFFSET_BITS=64',
+        ],
+        'cflags': [
+          '<(werror)',  # See note above about the werror variable.
+          '-pthread',
+          '-fno-strict-aliasing',  # See http://crbug.com/32204
+          '-Wall',
+          # TODO(evan): turn this back on once all the builds work.
+          # '-Wextra',
+          # Don't warn about unused function params.  We use those everywhere.
+          '-Wno-unused-parameter',
+          # Don't warn about the "struct foo f = {0};" initialization pattern.
+          '-Wno-missing-field-initializers',
+          # Don't export any symbols (for example, to plugins we dlopen()).
+          # Note: this is *required* to make some plugins work.
+          '-fvisibility=hidden',
+          '-pipe',
+        ],
+        'cflags_cc': [
+          '-fno-exceptions',
+          '-fno-rtti',
+          '-fno-threadsafe-statics',
+          # Make inline functions have hidden visiblity by default.
+          # Surprisingly, not covered by -fvisibility=hidden.
+          '-fvisibility-inlines-hidden',
+          # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+          # so we specify it explicitly.  (llvm.org/PR10448, crbug.com/90453)
+          '-Wsign-compare',
+        ],
+        'ldflags': [
+          '-pthread', '-Wl,-z,noexecstack',
+        ],
+        'libraries' : [
+          '<(libraries_for_target)',
+        ],
+        'configurations': {
+          'Debug_Base': {
+            'variables': {
+              'debug_optimize%': '0',
+            },
+            'defines': [
+              '_DEBUG',
+            ],
+            'cflags': [
+              '-O>(debug_optimize)',
+              '-g',
+              '-gdwarf-4',
+            ],
+            'conditions' : [
+              ['OS=="android"', {
+                'ldflags': [
+                  # Warn in case of text relocations.
+                  '-Wl,--warn-shared-textrel',
+                ],
+              }],
+              ['OS=="android" and android_full_debug==0', {
+                # Some configurations are copied from Release_Base to reduce
+                # the binary size.
+                'variables': {
+                  'debug_optimize%': 's',
+                },
+                'cflags': [
+                  '-fdata-sections',
+                  '-ffunction-sections',
+                ],
+                'ldflags': [
+                  '-Wl,-O1',
+                  '-Wl,--as-needed',
+                ],
+              }],
+              ['OS=="android" and android_full_debug==0 and target_arch!="arm64"', {
+                # We don't omit frame pointers on arm64 since they are required
+                # to correctly unwind stackframes which contain system library
+                # function frames (crbug.com/391706).
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+              ['OS=="linux" and target_arch=="ia32"', {
+                'ldflags': [
+                  '-Wl,--no-as-needed',
+                ],
+              }],
+              ['debug_unwind_tables==1', {
+                'cflags': ['-funwind-tables'],
+              }, {
+                'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'],
+              }],
+              # TODO(mostynb): shuffle clang/gcc_version/binutils_version
+              # definitions in to the right scope to use them when setting
+              # linux_use_debug_fission, so it can be used here alone.
+              ['linux_use_debug_fission==1 and linux_use_gold_flags==1 and (clang==1 or gcc_version>=48) and binutils_version>=223', {
+                'cflags': ['-gsplit-dwarf'],
+              }],
+            ],
+          },
+          'Release_Base': {
+            'variables': {
+              'release_optimize%': '2',
+              # Binaries become big and gold is unable to perform GC
+              # and remove unused sections for some of test targets
+              # on 32 bit platform.
+              # (This is currently observed only in chromeos valgrind bots)
+              # The following flag is to disable --gc-sections linker
+              # option for these bots.
+              'no_gc_sections%': 0,
+
+              # TODO(bradnelson): reexamine how this is done if we change the
+              # expansion of configurations
+              'release_valgrind_build%': 0,
+            },
+            'cflags': [
+              '-O<(release_optimize)',
+              # Don't emit the GCC version ident directives, they just end up
+              # in the .comment section taking up binary size.
+              '-fno-ident',
+              # Put data and code in their own sections, so that unused symbols
+              # can be removed at link time with --gc-sections.
+              '-fdata-sections',
+              '-ffunction-sections',
+            ],
+            'ldflags': [
+              # Specifically tell the linker to perform optimizations.
+              # See http://lwn.net/Articles/192624/ .
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'conditions' : [
+              ['no_gc_sections==0', {
+                'ldflags': [
+                  '-Wl,--gc-sections',
+                ],
+              }],
+              ['OS=="android" and target_arch!="arm64"', {
+                # We don't omit frame pointers on arm64 since they are required
+                # to correctly unwind stackframes which contain system library
+                # function frames (crbug.com/391706).
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ]
+              }],
+              ['OS=="android"', {
+                'variables': {
+                  'release_optimize%': 's',
+                },
+                'ldflags': [
+                  # Warn in case of text relocations.
+                  '-Wl,--warn-shared-textrel',
+                ],
+              }],
+              ['profiling==1', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-g',
+                ],
+                'conditions' : [
+                  ['profiling_full_stack_frames==1', {
+                    'cflags': [
+                      '-fno-inline',
+                      '-fno-optimize-sibling-calls',
+                    ],
+                  }],
+                ],
+              }],
+              ['release_unwind_tables==1', {
+                'cflags': ['-funwind-tables'],
+              }, {
+                'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'],
+              }],
+            ],
+          },
+        },
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'asflags': [
+                  # Needed so that libs with .s files (e.g. libicudata.a)
+                  # are compatible with the general 32-bit-ness.
+                  '-32',
+                ],
+                # All floating-point computations on x87 happens in 80-bit
+                # precision.  Because the C and C++ language standards allow
+                # the compiler to keep the floating-point values in higher
+                # precision than what's specified in the source and doing so
+                # is more efficient than constantly rounding up to 64-bit or
+                # 32-bit precision as specified in the source, the compiler,
+                # especially in the optimized mode, tries very hard to keep
+                # values in x87 floating-point stack (in 80-bit precision)
+                # as long as possible. This has important side effects, that
+                # the real value used in computation may change depending on
+                # how the compiler did the optimization - that is, the value
+                # kept in 80-bit is different than the value rounded down to
+                # 64-bit or 32-bit. There are possible compiler options to
+                # make this behavior consistent (e.g. -ffloat-store would keep
+                # all floating-values in the memory, thus force them to be
+                # rounded to its original precision) but they have significant
+                # runtime performance penalty.
+                #
+                # -mfpmath=sse -msse2 makes the compiler use SSE instructions
+                # which keep floating-point values in SSE registers in its
+                # native precision (32-bit for single precision, and 64-bit
+                # for double precision values). This means the floating-point
+                # value used during computation does not change depending on
+                # how the compiler optimized the code, since the value is
+                # always kept in its specified precision.
+                #
+                # Refer to http://crbug.com/348761 for rationale behind SSE2
+                # being a minimum requirement for 32-bit Linux builds and
+                # http://crbug.com/313032 for an example where this has "bit"
+                # us in the past.
+                'cflags': [
+                  '-msse2',
+                  '-mfpmath=sse',
+                  '-mmmx',  # Allows mmintrin.h for MMX intrinsics.
+                  '-m32',
+                ],
+                'ldflags': [
+                  '-m32',
+                ],
+                'conditions': [
+                  # Use gold linker for Android ia32 target.
+                  ['OS=="android"', {
+                    'ldflags': [
+                      '-fuse-ld=gold',
+                    ],
+                  }],
+                  # Install packages have started cropping up with
+                  # different headers between the 32-bit and 64-bit
+                  # versions, so we have to shadow those differences off
+                  # and make sure a 32-bit-on-64-bit build picks up the
+                  # right files.
+                  # For android build, use NDK headers instead of host headers
+                  ['host_arch!="ia32" and OS!="android"', {
+                    'include_dirs+': [
+                      '/usr/include32',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="x64"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  # Use gold linker for Android x64 target.
+                  ['OS=="android"', {
+                    'ldflags': [
+                      '-fuse-ld=gold',
+                    ],
+                  }],
+                ],
+                'cflags': [
+                  '-m64',
+                  '-march=x86-64',
+                ],
+                'ldflags': [
+                  '-m64',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['clang==0', {
+                    'cflags_cc': [
+                      # The codesourcery arm-2009q3 toolchain warns at that the ABI
+                      # has changed whenever it encounters a varargs function. This
+                      # silences those warnings, as they are not helpful and
+                      # clutter legitimate warnings.
+                      '-Wno-abi',
+                    ],
+                  }],
+                  ['clang==1 and arm_arch!="" and OS!="android"', {
+                    'cflags': [
+                      '-target arm-linux-gnueabihf',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-gnueabihf',
+                    ],
+                  }],
+                  ['arm_arch!=""', {
+                    'cflags': [
+                      '-march=<(arm_arch)',
+                    ],
+                  }],
+                  ['clang==1 and OS!="android"', {
+                    'cflags': [
+                      # We need to disable clang's builtin assembler as it can't
+                      # handle several asm files, crbug.com/124610
+                      '-no-integrated-as',
+                    ],
+                  }],
+                  ['arm_tune!=""', {
+                    'cflags': [
+                      '-mtune=<(arm_tune)',
+                    ],
+                  }],
+                  ['arm_fpu!=""', {
+                    'cflags': [
+                      '-mfpu=<(arm_fpu)',
+                    ],
+                  }],
+                  ['arm_float_abi!=""', {
+                    'cflags': [
+                      '-mfloat-abi=<(arm_float_abi)',
+                    ],
+                  }],
+                  ['arm_thumb==1', {
+                    'cflags': [
+                      '-mthumb',
+                    ],
+                  }],
+                  ['OS=="android"', {
+                    # Most of the following flags are derived from what Android
+                    # uses by default when building for arm, reference for which
+                    # can be found in the following file in the Android NDK:
+                    # toolchains/arm-linux-androideabi-4.9/setup.mk
+                    'cflags': [
+                      # The tree-sra optimization (scalar replacement for
+                      # aggregates enabling subsequent optimizations) leads to
+                      # invalid code generation when using the Android NDK's
+                      # compiler (r5-r7). This can be verified using
+                      # webkit_unit_tests' WTF.Checked_int8_t test.
+                      '-fno-tree-sra',
+                      # The following option is disabled to improve binary
+                      # size and performance in gcc 4.9.
+                      '-fno-caller-saves',
+                      '-Wno-psabi',
+                    ],
+                    # Android now supports .relro sections properly.
+                    # NOTE: While these flags enable the generation of .relro
+                    # sections, the generated libraries can still be loaded on
+                    # older Android platform versions.
+                    'ldflags': [
+                        '-Wl,-z,relro',
+                        '-Wl,-z,now',
+                        '-fuse-ld=gold',
+                    ],
+                    'conditions': [
+                      ['gcc_version==48 and clang==0', {
+                        'cflags': [
+                          # The following 5 options are disabled to save on
+                          # binary size in GCC 4.8.
+                          '-fno-partial-inlining',
+                          '-fno-early-inlining',
+                          '-fno-tree-copy-prop',
+                          '-fno-tree-loop-optimize',
+                          '-fno-move-loop-invariants',
+                        ],
+                      }],
+                      ['arm_thumb==1', {
+                        'cflags': [ '-mthumb-interwork' ],
+                      }],
+                      ['profiling==1', {
+                        'cflags': [
+                          # Thumb code with frame pointer makes chrome crash
+                          # early.
+                          '-marm',
+                          '-mapcs-frame', # Required by -fno-omit-frame-pointer.
+                          # The perf report sometimes incorrectly attributes
+                          # code from tail calls.
+                          '-fno-optimize-sibling-calls',
+                        ],
+                        'cflags!': [
+                          '-fomit-frame-pointer',
+                        ],
+                      }],
+                      ['clang==1', {
+                        'cflags!': [
+                          # Clang does not support the following options.
+                          '-mthumb-interwork',
+                          '-finline-limit=64',
+                          '-fno-tree-sra',
+                          '-fno-caller-saves',
+                          '-Wno-psabi',
+                        ],
+                        'cflags': [
+                          # TODO(hans) Enable integrated-as (crbug.com/124610).
+                          '-no-integrated-as',
+                          '-B<(android_toolchain)',  # Else /usr/bin/as gets picked up.
+                        ],
+                        'ldflags': [
+                          # Let clang find the ld.gold in the NDK.
+                          '--gcc-toolchain=<(android_toolchain)/..',
+                        ],
+                      }],
+                      ['asan==1', {
+                        'cflags': [
+                          '-marm', # Required for frame pointer based stack traces.
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['chromecast==1', {
+                    'cflags': [
+                      # We set arm_arch to "" so that -march compiler option
+                      # is not set.  Otherwise a gcc bug that would complain
+                      # about it conflicting with '-mcpu=cortex-a9'. The flag
+                      # '-march=armv7-a' is actually redundant anyway because
+                      # it is enabled by default when we built the toolchain.
+                      # And using '-mcpu=cortex-a9' should be sufficient.
+                      '-mcpu=cortex-a9',
+                      '-funwind-tables',
+                      # Breakpad requires symbols with debugging information
+                      '-g',
+                    ],
+                    'ldflags': [
+                      # We want to statically link libstdc++/libgcc_s.
+                      '-static-libstdc++',
+                      '-static-libgcc',
+                    ],
+                    'cflags!': [
+                      # Some components in Chromium (e.g. v8, skia, ffmpeg)
+                      # define their own cflags for arm builds that could
+                      # conflict with the flags we set here (e.g.
+                      # '-mcpu=cortex-a9'). Remove these flags explicitly.
+                      '-march=armv7-a',
+                      '-mtune=cortex-a8',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm64"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['OS=="android"', {
+                    'cflags!': [
+                       '-fstack-protector',  # stack protector is always enabled on arm64.
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="mipsel"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['android_webview_build==0 and mips_arch_variant=="r6"', {
+                    'cflags': ['-mips32r6', '-Wa,-mips32r6'],
+                    'conditions': [
+                      ['OS=="android"', {
+                        'ldflags': ['-mips32r6', '-Wl,-melf32ltsmip',],
+                      }],
+                    ],
+                  }],
+                  ['android_webview_build==0 and mips_arch_variant=="r2"', {
+                    'cflags': ['-mips32r2', '-Wa,-mips32r2'],
+                  }],
+                  ['android_webview_build==0 and mips_arch_variant=="r1"', {
+                    'cflags': ['-mips32', '-Wa,-mips32'],
+                  }],
+                ],
+                'ldflags': [
+                  '-Wl,--no-keep-memory'
+                ],
+                'cflags_cc': [
+                  '-Wno-uninitialized',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="mips64el"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['android_webview_build==0 and mips_arch_variant=="r6"', {
+                    'cflags': ['-mips64r6', '-Wa,-mips64r6'],
+                    'ldflags': [ '-mips64r6' ],
+                  }],
+                  ['android_webview_build==0 and mips_arch_variant=="r2"', {
+                    'cflags': ['-mips64r2', '-Wa,-mips64r2'],
+                    'ldflags': [ '-mips64r2' ],
+                  }],
+                ],
+                'cflags_cc': [
+                  '-Wno-uninitialized',
+                ],
+              }],
+            ],
+          }],
+          ['linux_fpic==1', {
+            'cflags': [
+              '-fPIC',
+            ],
+            'ldflags': [
+              '-fPIC',
+            ],
+          }],
+          ['sysroot!=""', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(sysroot)',
+                  '<!(<(DEPTH)/build/linux/sysroot_ld_path.sh <(sysroot))',
+                ],
+              }]]
+          }],
+          ['clang==1', {
+            'cflags': [
+              # TODO(thakis): Remove, http://crbug.com/263960
+              '-Wno-reserved-user-defined-literal',
+            ],
+            'cflags_cc': [
+              # See the comment in the Mac section for what it takes to move
+              # this to -std=c++11.
+              '-std=gnu++11',
+            ],
+          }],
+          ['clang==0 and host_clang==1', {
+            'target_conditions': [
+              ['_toolset=="host"', {
+                'cflags_cc': [ '-std=gnu++11', ],
+              }],
+            ],
+          }],
+          ['clang==1 and clang_use_chrome_plugins==1', {
+            'cflags': [
+              '<@(clang_chrome_plugins_flags)',
+            ],
+          }],
+          ['clang==1 and clang_load!=""', {
+            'cflags': [
+              '-Xclang', '-load', '-Xclang', '<(clang_load)',
+            ],
+          }],
+          ['clang==1 and clang_add_plugin!=""', {
+            'cflags': [
+              '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+            ],
+          }],
+          ['clang==1 and target_arch=="ia32"', {
+            'cflags': [
+              # Else building libyuv gives clang's register allocator issues,
+              # see llvm.org/PR15798 / crbug.com/233709
+              '-momit-leaf-frame-pointer',
+            ],
+          }],
+          ['clang==1 and "<(GENERATOR)"=="ninja"', {
+            'cflags': [
+              # See http://crbug.com/110262
+              '-fcolor-diagnostics',
+            ],
+          }],
+          # Common options for AddressSanitizer, LeakSanitizer,
+          # ThreadSanitizer and MemorySanitizer.
+          ['asan==1 or lsan==1 or tsan==1 or msan==1 or ubsan==1 or ubsan_vptr==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-gline-tables-only',
+                ],
+                'cflags!': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+            ],
+          }],
+          ['asan==1 or lsan==1 or tsan==1 or msan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'ldflags!': [
+                  # Functions interposed by the sanitizers can make ld think
+                  # that some libraries aren't needed when they actually are,
+                  # http://crbug.com/234010. As workaround, disable --as-needed.
+                  '-Wl,--as-needed',
+                ],
+                'defines': [
+                  'MEMORY_TOOL_REPLACES_ALLOCATOR',
+                  'MEMORY_SANITIZER_INITIAL_SIZE',
+                ],
+              }],
+            ],
+          }],
+          ['asan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=address',
+                ],
+                'ldflags': [
+                  '-fsanitize=address',
+                ],
+              }],
+            ],
+            'conditions': [
+              ['OS=="mac"', {
+                'cflags': [
+                  '-mllvm -asan-globals=0',  # http://crbug.com/352073
+                ],
+              }],
+            ],
+          }],
+          ['ubsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=undefined',
+                  # -fsanitize=vptr is incompatible with -fno-rtti.
+                  '-fno-sanitize=vptr',
+                ],
+                'ldflags': [
+                  '-fsanitize=undefined',
+                  # -fsanitize=vptr is incompatible with -fno-rtti.
+                  '-fno-sanitize=vptr',
+                ],
+              }],
+            ],
+          }],
+          ['ubsan_vptr==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=vptr',
+                  '-fsanitize-blacklist=<(ubsan_vptr_blacklist)',
+                ],
+                'cflags_cc!': [
+                  '-fno-rtti',
+                ],
+                'cflags!': [
+                  '-fno-rtti',
+                ],
+                'ldflags': [
+                  '-fsanitize=vptr',
+                ],
+                'defines': [
+                  'UNDEFINED_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['asan_coverage!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-mllvm -asan-coverage=<(asan_coverage)',
+                ],
+              }],
+            ],
+          }],
+          ['lsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=leak',
+                ],
+                'ldflags': [
+                  '-fsanitize=leak',
+                ],
+                'defines': [
+                  'LEAK_SANITIZER',
+                  'WTF_USE_LEAK_SANITIZER=1',
+                ],
+              }],
+            ],
+          }],
+          ['tsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=thread',
+                  '-fPIC',
+                  '-fsanitize-blacklist=<(tsan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=thread',
+                ],
+                'defines': [
+                  'THREAD_SANITIZER',
+                  'DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1',
+                  'WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1',
+                ],
+                'target_conditions': [
+                  ['_type=="executable"', {
+                    'ldflags': [
+                      '-pie',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['msan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=memory',
+                  '-fsanitize-memory-track-origins=<(msan_track_origins)',
+                  '-fPIC',
+                  '-fsanitize-blacklist=<(msan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=memory',
+                ],
+                'defines': [
+                  'MEMORY_SANITIZER',
+                ],
+                'target_conditions': [
+                  ['_type=="executable"', {
+                    'ldflags': [
+                      '-pie',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['use_instrumented_libraries==1', {
+            'dependencies': [
+              '<(DEPTH)/third_party/instrumented_libraries/instrumented_libraries.gyp:instrumented_libraries',
+            ],
+          }],
+          ['use_custom_libcxx==1', {
+            'dependencies': [
+              '<(DEPTH)/third_party/libc++/libc++.gyp:libcxx_proxy',
+            ],
+          }],
+          ['order_profiling!=0 and (chromeos==1 or OS=="linux" or OS=="android")', {
+            'target_conditions' : [
+              # crazy_linker has an upstream gyp file we can't edit, and we
+              # don't want to instrument it.
+              ['_toolset=="target" and _target_name!="crazy_linker"', {
+                'cflags': [
+                  '-finstrument-functions',
+                  # Allow mmx intrinsics to inline, so that the
+                  #0 compiler can expand the intrinsics.
+                  '-finstrument-functions-exclude-file-list=mmintrin.h',
+                ],
+              }],
+              ['_toolset=="target" and OS=="android"', {
+                'cflags': [
+                  # Avoids errors with current NDK:
+                  # "third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/include/arm_neon.h:3426:3: error: argument must be a constant"
+                  '-finstrument-functions-exclude-file-list=arm_neon.h,SaturatedArithmeticARM.h',
+                ],
+              }],
+            ],
+          }],
+          ['linux_dump_symbols==1', {
+            'cflags': [ '-g' ],
+            'conditions': [
+              ['OS=="linux" and host_arch=="ia32" and linux_use_bundled_gold==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      # Attempt to use less memory to prevent the linker from
+                      # running out of address space. Considering installing a
+                      # 64-bit kernel and switching to a 64-bit linker.
+                      '-Wl,--no-keep-memory',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['use_allocator!="tcmalloc"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['linux_use_gold_flags==1', {
+            # Newer gccs and clangs support -fuse-ld, use the flag to force gold
+            # selection.
+            # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
+            'ldflags': [ '-fuse-ld=gold', ],
+
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'ldflags': [
+                  # Experimentation found that using four linking threads
+                  # saved ~20% of link time.
+                  # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+                  # Only apply this to the target linker, since the host
+                  # linker might not be gold, but isn't used much anyway.
+                  # TODO(raymes): Disable threading because gold is frequently
+                  # crashing on the bots: crbug.com/161942.
+                  # '-Wl,--threads',
+                  # '-Wl,--thread-count=4',
+                ],
+              }],
+            ],
+            'conditions': [
+              ['release_valgrind_build==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      '-Wl,--icf=safe',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_use_bundled_binutils==1', {
+            'cflags': [
+              '-B<!(cd <(DEPTH) && pwd -P)/<(binutils_dir)',
+            ],
+          }],
+          ['linux_use_bundled_gold==1', {
+            # Put our binutils, which contains gold in the search path. We pass
+            # the path to gold to the compiler. gyp leaves unspecified what the
+            # cwd is when running the compiler, so the normal gyp path-munging
+            # fails us. This hack gets the right path.
+            'ldflags': [
+              '-B<!(cd <(DEPTH) && pwd -P)/<(binutils_dir)',
+            ],
+          }],
+          # Some binutils 2.23 releases may or may not have new dtags enabled,
+          # but they are all compatible with --disable-new-dtags,
+          # because the new dynamic tags are not created by default.
+          ['binutils_version>=223', {
+            # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+            # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+            # FIXME(mithro): Figure out the --as-needed/--no-as-needed flags
+            # inside this file to allow usage of --no-as-needed and removal of
+            # this flag.
+            'ldflags': [
+              '-Wl,--disable-new-dtags',
+            ],
+          }],
+          ['gcc_version>=47 and clang==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags_cc': [
+                  '-std=gnu++11',
+                  # See comment for -Wno-c++11-narrowing.
+                  '-Wno-narrowing',
+                  # TODO(thakis): Remove, http://crbug.com/263960
+                  '-Wno-literal-suffix',
+                ],
+              }],
+            ],
+          }],
+          ['host_gcc_version>=47 and clang==0 and host_clang==0', {
+            'target_conditions': [
+              ['_toolset=="host"', {
+                'cflags_cc': [
+                  '-std=gnu++11',
+                  # See comment for -Wno-c++11-narrowing.
+                  '-Wno-narrowing',
+                  # TODO(thakis): Remove, http://crbug.com/263960
+                  '-Wno-literal-suffix',
+                ],
+              }],
+            ],
+          }],
+        ],
+      },
+    }],
+    # FreeBSD-specific options; note that most FreeBSD options are set above,
+    # with Linux.
+    ['OS=="freebsd"', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--no-keep-memory',
+        ],
+      },
+    }],
+    # Android-specific options; note that most are set above with Linux.
+    ['OS=="android"', {
+      'variables': {
+        # This is a unique identifier for a given build. It's used for
+        # identifying various build artifacts corresponding to a particular
+        # build of chrome (e.g. where to find archived symbols).
+        'chrome_build_id%': '',
+        'conditions': [
+          # Use shared stlport library when system one used.
+          # Figure this out early since it needs symbols from libgcc.a, so it
+          # has to be before that in the set of libraries.
+          ['use_system_stlport==1', {
+            'android_stlport_library': 'stlport',
+          }, {
+            'conditions': [
+              ['component=="shared_library"', {
+                  'android_stlport_library': 'stlport_shared',
+              }, {
+                  'android_stlport_library': 'stlport_static',
+              }],
+            ],
+          }],
+        ],
+
+        # Placing this variable here prevents from forking libvpx, used
+        # by remoting.  Remoting is off, so it needn't built,
+        # so forking it's deps seems like overkill.
+        # But this variable need defined to properly run gyp.
+        # A proper solution is to have an OS==android conditional
+        # in third_party/libvpx/libvpx.gyp to define it.
+        'libvpx_path': 'lib/linux/arm',
+      },
+      'target_defaults': {
+        'variables': {
+          'release_extra_cflags%': '',
+          'conditions': [
+            # If we're using the components build, append "cr" to all shared
+            # libraries to avoid naming collisions with android system library
+            # versions with the same name (e.g. skia, icu).
+            ['component=="shared_library"', {
+              'android_product_extension': 'cr.so',
+            }, {
+              'android_product_extension': 'so',
+            } ],
+          ],
+        },
+        'target_conditions': [
+          ['_type=="shared_library"', {
+            'product_extension': '<(android_product_extension)',
+          }],
+
+          # Settings for building device targets using Android's toolchain.
+          # These are based on the setup.mk file from the Android NDK.
+          #
+          # The NDK Android executable link step looks as follows:
+          #  $LDFLAGS
+          #  $(TARGET_CRTBEGIN_DYNAMIC_O)  <-- crtbegin.o
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #  $(TARGET_CRTEND_O)            <-- crtend.o
+          #
+          # For now the above are approximated for executables by adding
+          # crtbegin.o to the end of the ldflags and 'crtend.o' to the end
+          # of 'libraries'.
+          #
+          # The NDK Android shared library link step looks as follows:
+          #  $LDFLAGS
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  -l,--whole-archive
+          #  $(PRIVATE_WHOLE_STATIC_LIBRARIES)
+          #  -l,--no-whole-archive
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #
+          # For now, assume that whole static libraries are not needed.
+          #
+          # For both executables and shared libraries, add the proper
+          # libgcc.a to the start of libraries which puts it in the
+          # proper spot after .o and .a files get linked in.
+          #
+          # TODO: The proper thing to do longer-tem would be proper gyp
+          # support for a custom link command line.
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'cflags': [
+              '-ffunction-sections',
+              '-funwind-tables',
+              '-g',
+              '-fstack-protector',
+              '-fno-short-enums',
+              '-finline-limit=64',
+              '-Wa,--noexecstack',
+              '<@(release_extra_cflags)',
+            ],
+            'defines': [
+              'ANDROID',
+              '__GNU_SOURCE=1',  # Necessary for clone()
+              'USE_STLPORT=1',
+              '_STLP_USE_PTR_SPECIALIZATIONS=1',
+              'CHROME_BUILD_ID="<(chrome_build_id)"',
+            ],
+            'ldflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'ldflags': [
+              '-nostdlib',
+              '-Wl,--no-undefined',
+            ],
+            'libraries': [
+              '-l<(android_stlport_library)',
+              # Manually link the libgcc.a that the cross compiler uses.
+              '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+              '-lc',
+              '-ldl',
+              '-lm',
+            ],
+            'conditions': [
+              ['component=="static_library"', {
+                'ldflags': [
+                  '-Wl,--exclude-libs=ALL',
+                ],
+              }],
+              ['clang==1', {
+                'cflags': [
+                  # Work around incompatibilities between bionic and clang
+                  # headers.
+                  '-D__compiler_offsetof=__builtin_offsetof',
+                  '-Dnan=__builtin_nan',
+                ],
+                'conditions': [
+                  ['target_arch=="arm"', {
+                    'cflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                  }],
+                  ['target_arch=="ia32"', {
+                    'cflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                  }],
+                  # Place holder for x64 support, not tested.
+                  # TODO: Enable clang support for Android x64. http://crbug.com/346626
+                  ['target_arch=="x64"', {
+                    'cflags': [
+                      '-target x86_64-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86_64-linux-androideabi',
+                    ],
+                  }],
+                ],
+              }],
+              ['asan==1', {
+                'cflags': [
+                  # Android build relies on -Wl,--gc-sections removing
+                  # unreachable code. ASan instrumentation for globals inhibits
+                  # this and results in a library with unresolvable relocations.
+                  # TODO(eugenis): find a way to reenable this.
+                  '-mllvm -asan-globals=0',
+                ],
+              }],
+              ['android_webview_build==0', {
+                'defines': [
+                  # The NDK has these things, but doesn't define the constants
+                  # to say that it does. Define them here instead.
+                  'HAVE_SYS_UIO_H',
+                ],
+                'cflags': [
+                  '--sysroot=<(android_ndk_sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(android_ndk_sysroot)',
+                ],
+              }],
+              ['android_webview_build==1', {
+                'include_dirs': [
+                  # OpenAL headers from the Android tree.
+                  '<(android_src)/frameworks/wilhelm/include',
+                ],
+                'cflags': [
+                  # Android predefines this as 1; undefine it here so Chromium
+                  # can redefine it later to be 2 for chromium code and unset
+                  # for third party code. This works because cflags are added
+                  # before defines.
+                  '-U_FORTIFY_SOURCE',
+                  # Disable any additional warnings enabled by the Android build system but which
+                  # chromium does not build cleanly with (when treating warning as errors).
+                  # Things that are part of -Wextra:
+                  '-Wno-extra', # Enabled by -Wextra, but no specific flag
+                  '-Wno-ignored-qualifiers',
+                  '-Wno-type-limits',
+                  '-Wno-unused-but-set-variable',
+                ],
+                'cflags_cc': [
+                  # Other things unrelated to -Wextra:
+                  '-Wno-non-virtual-dtor',
+                  '-Wno-sign-promo',
+                ],
+              }],
+              ['android_webview_build==1', {
+                'target_conditions': [
+                  ['chromium_code==0', {
+                    'cflags': [
+                      # There is a class of warning which:
+                      #  1) Android always enables and also treats as errors
+                      #  2) Chromium ignores in third party code
+                      # So we re-enable those warnings when building Android.
+                      '-Wno-address',
+                      '-Wno-format-security',
+                      '-Wno-return-type',
+                      '-Wno-sequence-point',
+                    ],
+                    'cflags_cc': [
+                      '-Wno-non-virtual-dtor',
+                    ],
+                  }],
+                ],
+              }],
+              ['target_arch == "arm"', {
+                'ldflags': [
+                  # Enable identical code folding to reduce size.
+                  '-Wl,--icf=safe',
+                ],
+              }],
+              # NOTE: The stlport header include paths below are specified in
+              # cflags rather than include_dirs because they need to come
+              # after include_dirs. Think of them like system headers, but
+              # don't use '-isystem' because the arm-linux-androideabi-4.4.3
+              # toolchain (circa Gingerbread) will exhibit strange errors.
+              # The include ordering here is important; change with caution.
+              ['use_system_stlport==1', {
+                'cflags': [
+                  # For libstdc++/include, which is used by stlport.
+                  '-I<(android_src)/bionic',
+                  '-I<(android_src)/external/stlport/stlport',
+                ],
+              }, { # else: use_system_stlport!=1
+                'cflags': [
+                  '-isystem<(android_stlport_include)',
+                ],
+                'ldflags': [
+                  '-L<(android_stlport_libs_dir)',
+                ],
+              }],
+              ['target_arch=="ia32"', {
+                # The x86 toolchain currently has problems with stack-protector.
+                'cflags!': [
+                  '-fstack-protector',
+                ],
+                'cflags': [
+                  '-fno-stack-protector',
+                ],
+              }],
+            ],
+            'target_conditions': [
+              ['_type=="executable"', {
+                # Force android tools to export the "main" symbol so they can be
+                # loaded on ICS using the run_pie wrapper. See crbug.com/373219.
+                # TODO(primiano): remove -fvisibility and -rdynamic flags below
+                # when ICS support will be dropped.
+                'cflags': [
+                  '-fPIE',
+                  '-fvisibility=default',
+                ],
+                'ldflags': [
+                  '-Bdynamic',
+                  '-Wl,--gc-sections',
+                  '-Wl,-z,nocopyreloc',
+                  '-pie',
+                  '-rdynamic',
+                  # crtbegin_dynamic.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_dynamic.o',
+                ],
+                'libraries': [
+                  # crtend_android.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_android.o',
+                ],
+              }],
+              ['_type=="shared_library" or _type=="loadable_module"', {
+                'ldflags!': [
+                  '-Wl,--exclude-libs=ALL',
+                ],
+                'ldflags': [
+                  '-Wl,-shared,-Bsymbolic',
+                ],
+                'conditions': [
+                  ['android_webview_build==0', {
+                    'ldflags': [
+                      # crtbegin_so.o should be the last item in ldflags.
+                      '<(android_ndk_lib)/crtbegin_so.o',
+                    ],
+                    'libraries': [
+                      # crtend_so.o needs to be the last item in libraries.
+                      # Do not add any libraries after this!
+                      '<(android_ndk_lib)/crtend_so.o',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          # Settings for building host targets using the system toolchain.
+          ['_toolset=="host"', {
+            'cflags!': [
+              # Due to issues in Clang build system, using ASan on 32-bit
+              # binaries on x86_64 host is problematic.
+              # TODO(eugenis): re-enable.
+              '-fsanitize=address',
+            ],
+            'ldflags!': [
+              '-fsanitize=address',
+              '-Wl,-z,noexecstack',
+              '-Wl,--gc-sections',
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+              '-Wl,--warn-shared-textrel',
+              '-Wl,--fatal-warnings',
+            ],
+          }],
+          # Settings for building host targets on mac.
+          ['_toolset=="host" and host_os=="mac"', {
+            'ldflags!': [
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['OS=="solaris"', {
+      'cflags!': ['-fvisibility=hidden'],
+      'cflags_cc!': ['-fvisibility-inlines-hidden'],
+    }],
+    ['OS=="mac" or OS=="ios"', {
+      'target_defaults': {
+        'mac_bundle': 0,
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          # Don't link in libarclite_macosx.a, see http://crbug.com/156530.
+          'CLANG_LINK_OBJC_RUNTIME': 'NO',          # -fno-objc-link-runtime
+          'COPY_PHASE_STRIP': 'NO',
+          'GCC_C_LANGUAGE_STANDARD': 'c99',         # -std=c99
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+          'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+          'GCC_OBJC_CALL_CXX_CDTORS': 'YES',        # -fobjc-call-cxx-cdtors
+          'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES',      # -fvisibility=hidden
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',    # -Werror
+          'GCC_VERSION': '4.2',
+          'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',  # -Wnewline-eof
+          'USE_HEADERMAP': 'NO',
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-Wextra',
+            # Don't warn about unused function parameters.
+            '-Wno-unused-parameter',
+            # Don't warn about the "struct foo f = {0};" initialization
+            # pattern.
+            '-Wno-missing-field-initializers',
+          ],
+          'conditions': [
+            ['chromium_mac_pch', {'GCC_PRECOMPILE_PREFIX_HEADER': 'YES'},
+                                 {'GCC_PRECOMPILE_PREFIX_HEADER': 'NO'}
+            ],
+            # Note that the prebuilt Clang binaries should not be used for iOS
+            # development except for ASan builds.
+            ['clang==1', {
+              # gnu++11 instead of c++11 is needed because some code uses
+              # typeof() (a GNU extension).
+              # TODO(thakis): Eventually switch this to c++11 instead of
+              # gnu++11 (once typeof can be removed, which is blocked on c++11
+              # being available everywhere).
+              'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++11',  # -std=gnu++11
+              # Warn if automatic synthesis is triggered with
+              # the -Wobjc-missing-property-synthesis flag.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'WARNING_CFLAGS': [
+                # This warns on selectors from Cocoa headers (-length, -set).
+                # cfe-dev is currently discussing the merits of this warning.
+                # TODO(thakis): Reevaluate what to do with this, based one
+                # cfe-dev discussion.
+                '-Wno-selector-type-mismatch',
+              ],
+              'conditions': [
+                ['clang_xcode==0', {
+                  'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
+                  'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
+                }],
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_use_chrome_plugins==1', {
+              'OTHER_CFLAGS': [
+                '<@(clang_chrome_plugins_flags)',
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_load!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-load', '-Xclang', '<(clang_load)',
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_add_plugin!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+              ],
+            }],
+            ['clang==1 and "<(GENERATOR)"=="ninja"', {
+              'OTHER_CFLAGS': [
+                # See http://crbug.com/110262
+                '-fcolor-diagnostics',
+              ],
+            }],
+            ['OS=="ios" and target_subarch!="arm32" and \
+              "<(GENERATOR)"=="xcode"', {
+              'OTHER_CFLAGS': [
+                # TODO(ios): when building Chrome for iOS on 64-bit platform
+                # with Xcode, the -Wshorted-64-to-32 warning is automatically
+                # enabled. This cause failures when compiling protobuf code,
+                # so disable the warning. http://crbug.com/359107
+                '-Wno-shorten-64-to-32',
+              ],
+            }],
+          ],
+        },
+        'conditions': [
+          ['clang==1', {
+            'variables': {
+              'clang_dir': '../third_party/llvm-build/Release+Asserts/bin',
+            },
+          }],
+          ['asan==1', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fsanitize=address',
+                '-mllvm -asan-globals=0',  # http://crbug.com/352073
+                '-gline-tables-only',
+              ],
+            },
+          }],
+          ['asan_coverage!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-mllvm -asan-coverage=<(asan_coverage)',
+                ],
+              }],
+            ],
+          }],
+        ],
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+            'conditions': [
+              ['asan==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-fsanitize=address',
+                  ],
+                },
+              }],
+              ['mac_write_linker_maps==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-map,>(_target_name).map',
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['_mac_bundle', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+            'target_conditions': [
+              ['_type=="executable"', {
+                'conditions': [
+                  ['asan==1', {
+                    'postbuilds': [
+                      {
+                        'variables': {
+                          # Define copy_asan_dylib_path in a variable ending in
+                          # _path so that gyp understands it's a path and
+                          # performs proper relativization during dict merging.
+                          'copy_asan_dylib_path':
+                            'mac/copy_asan_runtime_dylib.sh',
+                        },
+                        'postbuild_name': 'Copy ASan runtime dylib',
+                        'action': [
+                          '<(copy_asan_dylib_path)',
+                        ],
+                      },
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac" or OS=="ios"
+    ['OS=="mac"', {
+      'target_defaults': {
+        'defines': [
+          # Prevent Mac OS X AssertMacros.h from defining macros that collide
+          # with common names, like 'check', 'require', and 'verify'.
+          # (Included by system header. Also exists on iOS but not included.)
+          # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+          '__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0',
+        ],
+        'variables': {
+          # These should end with %, but there seems to be a bug with % in
+          # variables that are intended to be set to different values in
+          # different targets, like these.
+          'mac_pie': 1,        # Most executables can be position-independent.
+          # Strip debugging symbols from the target.
+          'mac_strip': '<(mac_strip_release)',
+          'conditions': [
+            ['asan==1', {
+              'conditions': [
+                ['mac_want_real_dsym=="default"', {
+                  'mac_real_dsym': 1,
+                }, {
+                  'mac_real_dsym': '<(mac_want_real_dsym)'
+                }],
+              ],
+            }, {
+              'conditions': [
+                ['mac_want_real_dsym=="default"', {
+                  'mac_real_dsym': 0, # Fake .dSYMs are fine in most cases.
+                }, {
+                  'mac_real_dsym': '<(mac_want_real_dsym)'
+                }],
+              ],
+            }],
+          ],
+        },
+        'xcode_settings': {
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+          'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+          # Keep pch files below xcodebuild/.
+          'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders',
+          'OTHER_CFLAGS': [
+            # Someday this can be replaced by an 'GCC_STRICT_ALIASING': 'NO'
+            # xcode_setting, but not until all downstream projects' mac bots are
+            # using xcode >= 4.6, because that's when the default value of the
+            # flag in the compiler switched.  Pre-4.6, the value 'NO' for that
+            # setting is a no-op as far as xcode is concerned, but the compiler
+            # behaves differently based on whether -fno-strict-aliasing is
+            # specified or not.
+            '-fno-strict-aliasing',  # See http://crbug.com/32204.
+          ],
+        },
+        'target_conditions': [
+          ['_type=="executable"', {
+            'postbuilds': [
+              {
+                # Arranges for data (heap) pages to be protected against
+                # code execution when running on Mac OS X 10.7 ("Lion"), and
+                # ensures that the position-independent executable (PIE) bit
+                # is set for ASLR when running on Mac OS X 10.5 ("Leopard").
+                'variables': {
+                  # Define change_mach_o_flags in a variable ending in _path
+                  # so that GYP understands it's a path and performs proper
+                  # relativization during dict merging.
+                  'change_mach_o_flags_path':
+                      'mac/change_mach_o_flags_from_xcode.sh',
+                  'change_mach_o_flags_options%': [
+                  ],
+                  'target_conditions': [
+                    ['mac_pie==0 or release_valgrind_build==1', {
+                      # Don't enable PIE if it's unwanted. It's unwanted if
+                      # the target specifies mac_pie=0 or if building for
+                      # Valgrind, because Valgrind doesn't understand slide.
+                      # See the similar mac_pie/release_valgrind_build check
+                      # below.
+                      'change_mach_o_flags_options': [
+                        '--no-pie',
+                      ],
+                    }],
+                  ],
+                },
+                'postbuild_name': 'Change Mach-O Flags',
+                'action': [
+                  '<(change_mach_o_flags_path)',
+                  '>@(change_mach_o_flags_options)',
+                ],
+              },
+            ],
+            'conditions': [
+              ['asan==1', {
+                'variables': {
+                 'asan_saves_file': 'asan.saves',
+                },
+                'xcode_settings': {
+                  'CHROMIUM_STRIP_SAVE_FILE': '<(asan_saves_file)',
+                },
+              }],
+            ],
+            'target_conditions': [
+              ['mac_pie==1 and release_valgrind_build==0', {
+                # Turn on position-independence (ASLR) for executables. When
+                # PIE is on for the Chrome executables, the framework will
+                # also be subject to ASLR.
+                # Don't do this when building for Valgrind, because Valgrind
+                # doesn't understand slide. TODO: Make Valgrind on Mac OS X
+                # understand slide, and get rid of the Valgrind check.
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-pie',  # Position-independent executable (MH_PIE)
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['(_type=="executable" or _type=="shared_library" or \
+             _type=="loadable_module") and mac_strip!=0', {
+            'target_conditions': [
+              ['mac_real_dsym == 1', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'configurations': {
+                  'Release_Base': {
+                    'xcode_settings': {
+                      'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                      'DEPLOYMENT_POSTPROCESSING': 'YES',
+                      'STRIP_INSTALLED_PRODUCT': 'YES',
+                      'target_conditions': [
+                        ['_type=="shared_library" or _type=="loadable_module"', {
+                          # The Xcode default is to strip debugging symbols
+                          # only (-S).  Local symbols should be stripped as
+                          # well, which will be handled by -x.  Xcode will
+                          # continue to insert -S when stripping even when
+                          # additional flags are added with STRIPFLAGS.
+                          'STRIPFLAGS': '-x',
+                        }],  # _type=="shared_library" or _type=="loadable_module"
+                        ['_type=="executable"', {
+                          'conditions': [
+                            ['asan==1', {
+                              'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
+                            }]
+                          ],
+                        }],  # _type=="executable" and asan==1
+                      ],  # target_conditions
+                    },  # xcode_settings
+                  },  # configuration "Release"
+                },  # configurations
+              }, {  # mac_real_dsym != 1
+                # To get a fast fake .dSYM bundle, use a post-build step to
+                # produce the .dSYM and strip the executable.  strip_from_xcode
+                # only operates in the Release configuration.
+                'postbuilds': [
+                  {
+                    'variables': {
+                      # Define strip_from_xcode in a variable ending in _path
+                      # so that gyp understands it's a path and performs proper
+                      # relativization during dict merging.
+                      'strip_from_xcode_path': 'mac/strip_from_xcode',
+                    },
+                    'postbuild_name': 'Strip If Needed',
+                    'action': ['<(strip_from_xcode_path)'],
+                  },
+                ],  # postbuilds
+              }],  # mac_real_dsym
+            ],  # target_conditions
+          }],  # (_type=="executable" or _type=="shared_library" or
+               #  _type=="loadable_module") and mac_strip!=0
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac"
+    ['OS=="ios"', {
+      'target_defaults': {
+        'xcode_settings' : {
+          # TODO(stuartmorgan): switch to c++0x (see TODOs in the clang
+          # section above).
+          'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++0x',
+
+          'conditions': [
+            # Older Xcodes do not support -Wno-deprecated-register, so pass an
+            # additional flag to suppress the "unknown compiler option" error.
+            # Restrict this flag to builds that are either compiling with Xcode
+            # or compiling with Xcode's Clang.  This will allow Ninja builds to
+            # continue failing on unknown compiler options.
+            # TODO(rohitrao): This flag is temporary and should be removed as
+            # soon as the iOS bots are updated to use Xcode 5.1.
+            ['clang_xcode==1', {
+              'WARNING_CFLAGS': [
+                '-Wno-unknown-warning-option',
+              ],
+            }],
+
+            # Limit the valid architectures depending on "target_subarch".
+            # This need to include the "arm" architectures but also the "x86"
+            # ones (they are used when building for the simulator).
+            ['target_subarch=="arm32"', {
+              'VALID_ARCHS': ['armv7', 'i386'],
+            }],
+            ['target_subarch=="arm64"', {
+              'VALID_ARCHS': ['arm64', 'x86_64'],
+            }],
+            ['target_subarch=="both"', {
+              'VALID_ARCHS': ['arm64', 'armv7', 'x86_64', 'i386'],
+            }],
+            ['use_system_libcxx==1', {
+              'target_conditions': [
+                # Only use libc++ when building target for iOS not when building
+                # tools for the host (OS X) as Mac targets OS X SDK 10.6 which
+                # does not support libc++.
+                ['_toolset=="target"', {
+                  'CLANG_CXX_LIBRARY': 'libc++',  # -stdlib=libc++
+                }]
+              ],
+            }, {
+              # The default for deployment target of 7.0+ is libc++, so force
+              # the old behavior unless libc++ is enabled.
+              'CLANG_CXX_LIBRARY': 'libstdc++',  # -stdlib=libstdc++
+            }],
+          ],
+        },
+        'target_conditions': [
+          ['_toolset=="host"', {
+            'xcode_settings': {
+              'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+              'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+              'VALID_ARCHS': [
+                'x86_64',
+              ],
+              'ARCHS': [
+                'x86_64',
+              ],
+            },
+          }],
+          ['_toolset=="target"', {
+            'xcode_settings': {
+              # This section should be for overriding host settings. But,
+              # since we can't negate the iphone deployment target above, we
+              # instead set it here for target only.
+              'IPHONEOS_DEPLOYMENT_TARGET': '<(ios_deployment_target)',
+              'ARCHS': ['$(ARCHS_STANDARD_INCLUDING_64_BIT)'],
+            },
+          }],
+          ['_type=="executable"', {
+            'configurations': {
+              'Release_Base': {
+                'xcode_settings': {
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              },
+              'Debug_Base': {
+                'xcode_settings': {
+                  # Remove dSYM to reduce build time.
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf',
+                },
+              },
+            },
+            'xcode_settings': {
+              'conditions': [
+                ['chromium_ios_signing', {
+                  # iOS SDK wants everything for device signed.
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+                }, {
+                  'CODE_SIGNING_REQUIRED': 'NO',
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+                }],
+              ],
+            },
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="ios"
+    ['OS=="win"', {
+      'target_defaults': {
+        'defines': [
+          '_WIN32_WINNT=0x0602',
+          'WINVER=0x0602',
+          'WIN32',
+          '_WINDOWS',
+          'NOMINMAX',
+          'PSAPI_VERSION=1',
+          '_CRT_RAND_S',
+          'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS',
+          'WIN32_LEAN_AND_MEAN',
+          '_ATL_NO_OPENGL',
+          # _HAS_EXCEPTIONS must match ExceptionHandling in msvs_settings.
+          '_HAS_EXCEPTIONS=0',
+        ],
+        'conditions': [
+          ['buildtype=="Official"', {
+              # In official builds, targets can self-select an optimization
+              # level by defining a variable named 'optimize', and setting it
+              # to one of
+              # - "size", optimizes for minimal code size - the default.
+              # - "speed", optimizes for speed over code size.
+              # - "max", whole program optimization and link-time code
+              #   generation. This is very expensive and should be used
+              #   sparingly.
+              'variables': {
+                'optimize%': 'size',
+              },
+              'msvs_settings': {
+                'VCLinkerTool': {
+                  # Set /LTCG for the official builds.
+                  'LinkTimeCodeGeneration': '1',
+                },
+              },
+              'target_conditions': [
+                ['optimize=="size"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 1, optimizeMinSpace, Minimize Size (/O1)
+                        'Optimization': '1',
+                        # 2, favorSize - Favor small code (/Os)
+                        'FavorSizeOrSpeed': '2',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="speed"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="max"', {
+                    # Disable Warning 4702 ("Unreachable code") for the WPO/PGO
+                    # builds. Probably anything that this would catch that
+                    # wouldn't be caught in a normal build isn't going to
+                    # actually be a bug, so the incremental value of C4702 for
+                    # PGO builds is likely very small.
+                    'msvs_disabled_warnings': [
+                      4702
+                    ],
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                        # This implies link time code generation.
+                        'WholeProgramOptimization': 'true',
+                      },
+                    },
+                  },
+                ],
+              ],
+            },
+          ],
+          ['secure_atl', {
+            'defines': [
+              '_SECURE_ATL',
+            ],
+          }],
+          ['msvs_express', {
+            'configurations': {
+              'x86_Base': {
+                'msvs_settings': {
+                  'VCLinkerTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/i386'],
+                  },
+                  'VCLibrarianTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/i386'],
+                  },
+                },
+              },
+              'x64_Base': {
+                'msvs_settings': {
+                  'VCLibrarianTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/amd64'],
+                  },
+                  'VCLinkerTool': {
+                    'AdditionalLibraryDirectories':
+                      ['<(windows_driver_kit_path)/lib/ATL/amd64'],
+                  },
+                },
+              },
+            },
+            # https://code.google.com/p/chromium/issues/detail?id=372451#c20
+            # Warning 4702 ("Unreachable code") should be re-enabled once
+            # Express users are updated to VS2013 Update 2.
+            'msvs_disabled_warnings': [
+              4702
+            ],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions!': [
+                    '/Zc:inline',  # Not supported on non-updated Express.
+                ],
+              },
+              'VCLinkerTool': {
+                # Explicitly required when using the ATL with express
+                'AdditionalDependencies': ['atlthunk.lib'],
+
+                # ATL 8.0 included in WDK 7.1 makes the linker to generate
+                # almost eight hundred LNK4254 and LNK4078 warnings:
+                #   - warning LNK4254: section 'ATL' (50000040) merged into
+                #     '.rdata' (40000040) with different attributes
+                #   - warning LNK4078: multiple 'ATL' sections found with
+                #     different attributes
+                'AdditionalOptions': ['/ignore:4254', '/ignore:4078'],
+              },
+            },
+            'msvs_system_include_dirs': [
+              '<(windows_driver_kit_path)/inc/atl71',
+              '<(windows_driver_kit_path)/inc/mfc42',
+            ],
+          }],
+        ],
+        'msvs_system_include_dirs': [
+          '<(windows_sdk_path)/Include/shared',
+          '<(windows_sdk_path)/Include/um',
+          '<(windows_sdk_path)/Include/winrt',
+          '$(VSInstallDir)/VC/atlmfc/include',
+        ],
+        'msvs_cygwin_shell': 0,
+        'msvs_disabled_warnings': [
+          # C4127: conditional expression is constant
+          # This warning can in theory catch dead code and other problems, but
+          # triggers in far too many desirable cases where the conditional
+          # expression is either set by macros or corresponds some legitimate
+          # compile-time constant expression (due to constant template args,
+          # conditionals comparing the sizes of different types, etc.).  Some of
+          # these can be worked around, but it's not worth it.
+          4127,
+
+          # C4351: new behavior: elements of array 'array' will be default
+          #        initialized
+          # This is a silly "warning" that basically just alerts you that the
+          # compiler is going to actually follow the language spec like it's
+          # supposed to, instead of not following it like old buggy versions
+          # did.  There's absolutely no reason to turn this on.
+          4351,
+
+          # C4355: 'this': used in base member initializer list
+          # It's commonly useful to pass |this| to objects in a class'
+          # initializer list.  While this warning can catch real bugs, most of
+          # the time the constructors in question don't attempt to call methods
+          # on the passed-in pointer (until later), and annotating every legit
+          # usage of this is simply more hassle than the warning is worth.
+          4355,
+
+          # C4503: 'identifier': decorated name length exceeded, name was
+          #        truncated
+          # This only means that some long error messages might have truncated
+          # identifiers in the presence of lots of templates.  It has no effect
+          # on program correctness and there's no real reason to waste time
+          # trying to prevent it.
+          4503,
+
+          # C4611: interaction between 'function' and C++ object destruction is
+          #        non-portable
+          # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+          # suggests using exceptions instead of setjmp/longjmp for C++, but
+          # Chromium code compiles without exception support.  We therefore have
+          # to use setjmp/longjmp for e.g. JPEG decode error handling, which
+          # means we have to turn off this warning (and be careful about how
+          # object destruction happens in such cases).
+          4611,
+
+          # TODO(maruel): These warnings are level 4. They will be slowly
+          # removed as code is fixed.
+          4100, # Unreferenced formal parameter
+          4121, # Alignment of a member was sensitive to packing
+          4189, # Local variable is initialized but not referenced
+          4244, # Conversion from 'type1' to 'type2', possible loss of data
+          4481, # Nonstandard extension used: override specifier 'keyword'
+          4505, # Unreferenced local function has been removed
+          4510, # Default constructor could not be generated
+          4512, # Assignment operator could not be generated
+          4610, # Object can never be instantiated
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'AdditionalOptions': ['/MP'],
+            'MinimalRebuild': 'false',
+            'BufferSecurityCheck': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'RuntimeTypeInfo': 'false',
+            'WarningLevel': '4',
+            'WarnAsError': 'true',
+            'DebugInformationFormat': '3',
+            # ExceptionHandling must match _HAS_EXCEPTIONS above.
+            'ExceptionHandling': '0',
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': ['/ignore:4221'],
+            'AdditionalLibraryDirectories': [
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+          },
+          'VCLinkerTool': {
+            'AdditionalDependencies': [
+              'wininet.lib',
+              'dnsapi.lib',
+              'version.lib',
+              'msimg32.lib',
+              'ws2_32.lib',
+              'usp10.lib',
+              'psapi.lib',
+              'dbghelp.lib',
+              'winmm.lib',
+              'shlwapi.lib',
+            ],
+            'AdditionalLibraryDirectories': [
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+            'GenerateDebugInformation': 'true',
+            'MapFileName': '$(OutDir)\\$(TargetName).map',
+            'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+            'FixedBaseAddress': '1',
+            # SubSystem values:
+            #   0 == not set
+            #   1 == /SUBSYSTEM:CONSOLE
+            #   2 == /SUBSYSTEM:WINDOWS
+            # Most of the executables we'll ever create are tests
+            # and utilities with console output.
+            'SubSystem': '1',
+          },
+          'VCMIDLTool': {
+            'GenerateStublessProxies': 'true',
+            'TypeLibraryName': '$(InputName).tlb',
+            'OutputDirectory': '$(IntDir)',
+            'HeaderFileName': '$(InputName).h',
+            'DLLDataFileName': '$(InputName).dlldata.c',
+            'InterfaceIdentifierFileName': '$(InputName)_i.c',
+            'ProxyFileName': '$(InputName)_p.c',
+          },
+          'VCResourceCompilerTool': {
+            'Culture' : '1033',
+            'AdditionalIncludeDirectories': [
+              '<(DEPTH)',
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'target_conditions': [
+            ['_type=="executable"', {
+              'VCManifestTool': {
+                'EmbedManifest': 'true',
+              },
+            }],
+            ['_type=="executable" and ">(win_exe_compatibility_manifest)"!=""', {
+              'VCManifestTool': {
+                'AdditionalManifestFiles': [
+                  '>(win_exe_compatibility_manifest)',
+                ],
+              },
+            }],
+          ],
+          'conditions': [
+            ['clang==1', {
+              # Building with Clang on Windows is a work in progress and very
+              # experimental. See crbug.com/82385.
+              'VCCLCompilerTool': {
+                'WarnAsError': 'false',
+                'RuntimeTypeInfo': 'false',
+                'AdditionalOptions': [
+                  '-fmsc-version=1800',
+                  '/fallback',
+
+                  # Many files use intrinsics without including this header.
+                  # TODO(hans): Fix those files, or move this to sub-GYPs.
+                  '/FIIntrin.h',
+
+                  # TODO(hans): Make this list shorter eventually.
+                  '-Qunused-arguments',
+                  '-Wno-c++11-compat-deprecated-writable-strings',
+                  '-Wno-deprecated-declarations',
+                  '-Wno-empty-body',
+                  '-Wno-enum-conversion',
+                  '-Wno-extra-tokens',
+                  '-Wno-ignored-attributes',
+                  '-Wno-incompatible-pointer-types',
+                  '-Wno-int-to-void-pointer-cast',
+                  '-Wno-invalid-noreturn',
+                  '-Wno-logical-op-parentheses',
+                  '-Wno-microsoft',
+                  '-Wno-missing-braces',
+                  '-Wno-missing-declarations',
+                  '-Wno-msvc-include',
+                  '-Wno-null-dereference',
+                  '-Wno-overloaded-virtual',
+                  '-Wno-parentheses',
+                  '-Wno-pointer-sign',
+                  '-Wno-reorder',
+                  '-Wno-return-type-c-linkage',
+                  '-Wno-self-assign',
+                  '-Wno-sometimes-uninitialized',
+                  '-Wno-switch',
+                  '-Wno-tautological-compare',
+                  '-Wno-unknown-pragmas',
+                  '-Wno-unsequenced',
+                  '-Wno-unused-function',
+                  '-Wno-unused-private-field',
+                  '-Wno-unused-value',
+                  '-Wno-unused-variable',
+                  '-Wno-unused-local-typedef',  # http://crbug.com/411648
+                ],
+              },
+            }],
+            ['asan==1', {
+              # ASan on Windows is a work in progress and very experimental.
+              # See crbug.com/345874.
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fsanitize=address',
+                ],
+                'AdditionalIncludeDirectories': [
+                  # MSVC needs to be able to find the sanitizer headers when
+                  # invoked via /fallback. This is critical for using macros
+                  # like ASAN_UNPOISON_MEMORY_REGION in files where we fall
+                  # back.
+                  '<(DEPTH)/<(make_clang_dir)/lib/clang/3.6.0/include_sanitizer',
+                ],
+              },
+              'VCLinkerTool': {
+                'AdditionalLibraryDirectories': [
+                  # TODO(hans): If make_clang_dir is absolute, this breaks.
+                  '<(DEPTH)/<(make_clang_dir)/lib/clang/3.6.0/lib/windows',
+                ],
+              },
+              'target_conditions': [
+                ['component=="shared_library"', {
+                  'VCLinkerTool': {
+                    'AdditionalDependencies': [
+                       'clang_rt.asan_dynamic-i386.lib',
+                       'clang_rt.asan_dynamic_runtime_thunk-i386.lib',
+                    ],
+                  },
+                }],
+                ['_type=="executable" and component=="static_library"', {
+                  'VCLinkerTool': {
+                    'AdditionalDependencies': [
+                       'clang_rt.asan-i386.lib',
+                    ],
+                  },
+                }],
+                ['(_type=="shared_library" or _type=="loadable_module") and component=="static_library"', {
+                  'VCLinkerTool': {
+                    'AdditionalDependencies': [
+                       'clang_rt.asan_dll_thunk-i386.lib',
+                    ],
+                  },
+                }],
+              ],
+            }],
+          ],
+        },
+      },
+    }],
+    ['disable_nacl==1', {
+      'target_defaults': {
+        'defines': [
+          'DISABLE_NACL',
+        ],
+      },
+    }],
+    ['OS=="win" and msvs_use_common_linker_extras', {
+      'target_defaults': {
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'DelayLoadDLLs': [
+              'dbghelp.dll',
+              'dwmapi.dll',
+              'shell32.dll',
+              'uxtheme.dll',
+            ],
+          },
+        },
+        'configurations': {
+          'x86_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  '/safeseh',
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+                'conditions': [
+                  ['syzyasan==0', {
+                    'AdditionalOptions': ['/largeaddressaware'],
+                  }],
+                ],
+              },
+            },
+          },
+          'x64_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  # safeseh is not compatible with x64
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+        },
+      },
+    }],
+    ['enable_new_npdevice_api==1', {
+      'target_defaults': {
+        'defines': [
+          'ENABLE_NEW_NPDEVICE_API',
+        ],
+      },
+    }],
+    # Don't warn about the "typedef 'foo' locally defined but not used"
+    # for gcc 4.8 and higher.
+    # TODO: remove this flag once all builds work. See crbug.com/227506
+    ['gcc_version>=48 and clang==0', {
+      'target_defaults': {
+        'cflags': [
+          '-Wno-unused-local-typedefs',
+        ],
+      },
+    }],
+    ['gcc_version>=48 and clang==0 and host_clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="host"', { 'cflags!': [ '-Wno-unused-local-typedefs' ]}],
+        ],
+      },
+    }],
+    # We need a special case to handle the android webview build on mac because
+    # the host gcc there doesn't accept this flag, but the target gcc may
+    # require it.
+    ['gcc_version>=48 and android_webview_build==1 and host_os=="mac"', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="host"', {
+            'cflags!': [
+              '-Wno-unused-local-typedefs',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['clang==1 and ((OS!="mac" and OS!="ios") or clang_xcode==0) '
+        'and OS!="win"', {
+      'make_global_settings': [
+        ['CC', '<(make_clang_dir)/bin/clang'],
+        ['CXX', '<(make_clang_dir)/bin/clang++'],
+        ['CC.host', '$(CC)'],
+        ['CXX.host', '$(CXX)'],
+      ],
+    }],
+    ['clang==1 and OS=="win"', {
+      'make_global_settings': [
+        # On Windows, gyp's ninja generator only looks at CC.
+        ['CC', '<(make_clang_dir)/bin/clang-cl'],
+      ],
+    }],
+    ['OS=="android" and clang==0', {
+      # Hardcode the compiler names in the Makefile so that
+      # it won't depend on the environment at make time.
+      'make_global_settings': [
+        ['CC', '<!(/bin/echo -n <(android_toolchain)/*-gcc)'],
+        ['CXX', '<!(/bin/echo -n <(android_toolchain)/*-g++)'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+    ['OS=="linux" and target_arch=="mipsel"', {
+      'make_global_settings': [
+        ['CC', '<(sysroot)/../bin/mipsel-linux-gnu-gcc'],
+        ['CXX', '<(sysroot)/../bin/mipsel-linux-gnu-g++'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+    ['OS=="linux" and target_arch=="arm" and host_arch!="arm" and chromeos==0 and clang==0', {
+      # Set default ARM cross compiling on linux.  These can be overridden
+      # using CC/CXX/etc environment variables.
+      'make_global_settings': [
+        ['CC', '<!(which arm-linux-gnueabihf-gcc)'],
+        ['CXX', '<!(which arm-linux-gnueabihf-g++)'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+
+    # TODO(yyanagisawa): supports GENERATOR==make
+    #  make generator doesn't support CC_wrapper without CC
+    #  in make_global_settings yet.
+    ['use_goma==1 and ("<(GENERATOR)"=="ninja" or clang==1)', {
+      'make_global_settings': [
+       ['CC_wrapper', '<(gomadir)/gomacc'],
+       ['CXX_wrapper', '<(gomadir)/gomacc'],
+       ['CC.host_wrapper', '<(gomadir)/gomacc'],
+       ['CXX.host_wrapper', '<(gomadir)/gomacc'],
+      ],
+    }],
+  ],
+  'xcode_settings': {
+    # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+    # This block adds *project-wide* configuration settings to each project
+    # file.  It's almost always wrong to put things here.  Specify your
+    # custom xcode_settings in target_defaults to add them to targets instead.
+
+    'conditions': [
+      # In an Xcode Project Info window, the "Base SDK for All Configurations"
+      # setting sets the SDK on a project-wide basis. In order to get the
+      # configured SDK to show properly in the Xcode UI, SDKROOT must be set
+      # here at the project level.
+      ['OS=="mac"', {
+        'conditions': [
+          ['mac_sdk_path==""', {
+            'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+          }, {
+            'SDKROOT': '<(mac_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        'conditions': [
+          ['ios_sdk_path==""', {
+            'conditions': [
+              # TODO(justincohen): Ninja only supports simulator for now.
+              ['"<(GENERATOR)"=="xcode"', {
+                'SDKROOT': 'iphoneos<(ios_sdk)',  # -isysroot
+              }, {
+                'SDKROOT': 'iphonesimulator<(ios_sdk)',  # -isysroot
+              }],
+            ],
+          }, {
+            'SDKROOT': '<(ios_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        # Target both iPhone and iPad.
+        'TARGETED_DEVICE_FAMILY': '1,2',
+      }, {  # OS!="ios"
+        'conditions': [
+          ['target_arch=="x64"', {
+            'ARCHS': [
+              'x86_64'
+            ],
+          }],
+          ['target_arch=="ia32"', {
+            'ARCHS': [
+              'i386'
+            ],
+          }],
+        ],
+      }],
+    ],
+
+    # The Xcode generator will look for an xcode_settings section at the root
+    # of each dict and use it to apply settings on a file-wide basis.  Most
+    # settings should not be here, they should be in target-specific
+    # xcode_settings sections, or better yet, should use non-Xcode-specific
+    # settings in target dicts.  SYMROOT is a special case, because many other
+    # Xcode variables depend on it, including variables such as
+    # PROJECT_DERIVED_FILE_DIR.  When a source group corresponding to something
+    # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the
+    # files to appear (when present) in the UI as actual files and not red
+    # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR,
+    # and therefore SYMROOT, needs to be set at the project level.
+    'SYMROOT': '<(DEPTH)/xcodebuild',
+  },
+}
diff --git a/build/common_untrusted.gypi b/build/common_untrusted.gypi
new file mode 100644
index 0000000..bcc3686
--- /dev/null
+++ b/build/common_untrusted.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This GYP file should be included for every target in Chromium that is built
+# using the NaCl toolchain.
+{
+  'includes': [
+    '../native_client/build/untrusted.gypi',
+  ],
+  'target_defaults': {
+    'conditions': [
+      # TODO(bradnelson): Drop this once the nacl side does the same.
+      ['target_arch=="x64"', {
+        'variables': {
+          'enable_x86_32': 0,
+        },
+      }],
+      ['target_arch=="ia32" and OS!="win"', {
+        'variables': {
+          'enable_x86_64': 0,
+        },
+      }],
+      ['target_arch=="arm"', {
+        'variables': {
+          'clang': 1,
+        },
+        'defines': [
+          # Needed by build/build_config.h processor architecture detection.
+          '__ARMEL__',
+          # Needed by base/third_party/nspr/prtime.cc.
+          '__arm__',
+          # Disable ValGrind. The assembly code it generates causes the build
+          # to fail.
+          'NVALGRIND',
+        ],
+      }],
+    ],
+  },
+}
diff --git a/build/compiled_action.gni b/build/compiled_action.gni
new file mode 100644
index 0000000..b750af0
--- /dev/null
+++ b/build/compiled_action.gni
@@ -0,0 +1,171 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file introduces two related templates that act like action and
+# action_foreach but instead of running a Python script, it will compile a
+# given tool in the host toolchain and run that (either once or over the list
+# of inputs, depending on the variant).
+#
+# Parameters
+#
+#   tool (required)
+#       [label] Label of the tool to run. This should be an executable, and
+#       this label should not include a toolchain (anything in parens). The
+#       host compile of this tool will be used.
+#
+#   outputs (required)
+#       [list of files] Like the outputs of action (if using "compiled_action",
+#       this would be just the list of outputs), or action_foreach (if using
+#       "compiled_action_foreach", this would contain source expansions mapping
+#       input to output files).
+#
+#   args (required)
+#       [list of strings] Same meaning as action/action_foreach.
+#
+#   inputs (optional)
+#       Files the binary takes as input. The step will be re-run whenever any
+#       of these change. If inputs is empty, the step will run only when the
+#       binary itself changes.
+#
+#   visibility
+#   deps
+#   args   (all optional)
+#       Same meaning as action/action_foreach.
+#
+#
+# Example of usage:
+#
+#   compiled_action("run_my_tool") {
+#     tool = "//tools/something:mytool"
+#     outputs = [
+#       "$target_gen_dir/mysource.cc",
+#       "$target_gen_dir/mysource.h",
+#     ]
+#
+#     # The tool takes this input.
+#     inputs = [ "my_input_file.idl" ]
+#
+#     # In this case, the tool takes as arguments the input file and the output
+#     # build dir (both relative to the "cd" that the script will be run in)
+#     # and will produce the output files listed above.
+#     args = [
+#       rebase_path("my_input_file.idl", root_build_dir),
+#       "--output-dir", rebase_path(target_gen_dir, root_build_dir),
+#     ]
+#   }
+#
+# You would typically declare your tool like this:
+#   if (host_toolchain == current_toolchain) {
+#     executable("mytool") {
+#       ...
+#     }
+#   }
+# The if statement around the executable is optional. That says "I only care
+# about this target in the host toolchain". Usually this is what you want, and
+# saves unnecessarily compiling your tool for the target platform. But if you
+# need a target build of your tool as well, just leave off the if statement.
+
+if (build_os == "win") {
+  _host_executable_suffix = ".exe"
+} else {
+  _host_executable_suffix = ""
+}
+
+template("compiled_action") {
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  assert(!defined(invoker.sources),
+         "compiled_action doesn't take a sources arg. Use inputs instead.")
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    } else {
+      inputs = []
+    }
+    outputs = invoker.outputs
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable = get_label_info(host_tool, "root_out_dir") + "/" +
+      get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps = [ host_tool ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [
+      rebase_path(host_executable, root_build_dir)
+    ] + invoker.args
+  }
+}
+
+template("compiled_action_foreach") {
+  assert(defined(invoker.sources), "sources must be defined for $target_name")
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  action_foreach(target_name) {
+    # Otherwise this is a standalone action, define visibility if requested.
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/gn_run_binary.py"
+    sources = invoker.sources
+
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    } else {
+      inputs = []
+    }
+    outputs = invoker.outputs
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable = get_label_info(host_tool, "root_out_dir") + "/" +
+      get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps = [ host_tool ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [
+      rebase_path(host_executable, root_build_dir)
+    ] + invoker.args
+  }
+}
diff --git a/build/compiler_version.py b/build/compiler_version.py
new file mode 100755
index 0000000..05faf54
--- /dev/null
+++ b/build/compiler_version.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+compiler_version_cache = {}  # Map from (compiler, tool) -> version.
+
+
+def Usage(program_name):
+  print '%s MODE TOOL' % os.path.basename(program_name)
+  print 'MODE: host or target.'
+  print 'TOOL: assembler or compiler or linker.'
+  return 1
+
+
+def ParseArgs(args):
+  if len(args) != 2:
+    raise Exception('Invalid number of arguments')
+  mode = args[0]
+  tool = args[1]
+  if mode not in ('host', 'target'):
+    raise Exception('Invalid mode: %s' % mode)
+  if tool not in ('assembler', 'compiler', 'linker'):
+    raise Exception('Invalid tool: %s' % tool)
+  return mode, tool
+
+
+def GetEnvironFallback(var_list, default):
+  """Look up an environment variable from a possible list of variable names."""
+  for var in var_list:
+    if var in os.environ:
+      return os.environ[var]
+  return default
+
+
+def GetVersion(compiler, tool):
+  tool_output = tool_error = None
+  cache_key = (compiler, tool)
+  cached_version = compiler_version_cache.get(cache_key)
+  if cached_version:
+    return cached_version
+  try:
+    # Note that compiler could be something tricky like "distcc g++".
+    if tool == "compiler":
+      compiler = compiler + " -dumpversion"
+      # 4.6
+      version_re = re.compile(r"(\d+)\.(\d+)")
+    elif tool == "assembler":
+      compiler = compiler + " -Xassembler --version -x assembler -c /dev/null"
+      # Unmodified: GNU assembler (GNU Binutils) 2.24
+      # Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU assembler version 2.23.2
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    elif tool == "linker":
+      compiler = compiler + " -Xlinker --version"
+      # Using BFD linker
+      # Unmodified: GNU ld (GNU Binutils) 2.24
+      # Ubuntu: GNU ld (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU ld version 2.23.2
+      # Using Gold linker
+      # Unmodified: GNU gold (GNU Binutils 2.24) 1.11
+      # Ubuntu: GNU gold (GNU Binutils for Ubuntu 2.22) 1.11
+      # Fedora: GNU gold (version 2.23.2) 1.11
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    else:
+      raise Exception("Unknown tool %s" % tool)
+
+    # Force the locale to C otherwise the version string could be localized
+    # making regex matching fail.
+    env = os.environ.copy()
+    env["LC_ALL"] = "C"
+    pipe = subprocess.Popen(compiler, shell=True, env=env,
+                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    tool_output, tool_error = pipe.communicate()
+    if pipe.returncode:
+      raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+    parsed_output = version_re.match(tool_output)
+    result = parsed_output.group(1) + parsed_output.group(2)
+    compiler_version_cache[cache_key] = result
+    return result
+  except Exception, e:
+    if tool_error:
+      sys.stderr.write(tool_error)
+    print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+    print >> sys.stderr, e
+    return ""
+
+
+def main(args):
+  try:
+    (mode, tool) = ParseArgs(args[1:])
+  except Exception, e:
+    sys.stderr.write(e.message + '\n\n')
+    return Usage(args[0])
+
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    print result
+  return ret_code
+
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  (mode, tool) = ParseArgs(args)
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    return result
+  raise Exception("Failed to extract compiler version for args: %s" % args)
+
+
+def ExtractVersion(mode, tool):
+  # Check if various CXX environment variables exist and use them if they
+  # exist. The preferences and fallback order is a close approximation of
+  # GenerateOutputForConfig() in GYP's ninja generator.
+  # The main difference being not supporting GYP's make_global_settings.
+  environments = ['CXX_target', 'CXX']
+  if mode == 'host':
+    environments = ['CXX_host'] + environments;
+  compiler = GetEnvironFallback(environments, 'c++')
+
+  if compiler:
+    compiler_version = GetVersion(compiler, tool)
+    if compiler_version != "":
+      return (0, compiler_version)
+  return (1, None)
+
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn
new file mode 100644
index 0000000..45b61cf
--- /dev/null
+++ b/build/config/BUILD.gn
@@ -0,0 +1,297 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/allocator.gni")
+import("//build/config/crypto.gni")
+import("//build/config/features.gni")
+import("//build/config/ui.gni")
+
+declare_args() {
+  # When set, turns off the (normally-on) iterator debugging and related stuff
+  # that is normally turned on for Debug builds. These are generally useful for
+  # catching bugs but in some cases may cause conflicts or excessive slowness.
+  disable_iterator_debugging = false
+
+  # Set to true to not store any build metadata (this isn't working yet but
+  # this flag will help us to get there). See http://crbug.com/314403.
+  # TODO(sebmarchand): Update this comment once this flag guarantee that
+  #     there's no build metadata in the build artifacts.
+  dont_embed_build_metadata = false
+}
+
+# TODO(brettw) Most of these should be removed. Instead of global feature
+# flags, we should have more modular flags that apply only to a target and its
+# dependents. For example, depending on the "x11" meta-target should define
+# USE_X11 for all dependents so that everything that could use X11 gets the
+# define, but anything that doesn't depend on X11 doesn't see it.
+#
+# For now we define these globally to match the current GYP build.
+config("feature_flags") {
+  # TODO(brettw) most of these need to be parameterized.
+  defines = [
+      "CHROMIUM_BUILD",
+      "ENABLE_EGLIMAGE=1",
+      "ENABLE_BACKGROUND=1",
+      "V8_DEPRECATION_WARNINGS",  # Don't use deprecated V8 APIs anywhere.
+  ]
+
+  if (cld_version > 0) {
+    defines += [ "CLD_VERSION=$cld_version" ]
+  }
+  if (enable_mdns) {
+    defines += [ "ENABLE_MDNS=1" ]
+  }
+  if (enable_notifications) {
+    defines += [ "ENABLE_NOTIFICATIONS" ]
+  }
+  if (enable_pepper_cdms) {
+    # TODO(brettw) should probably be "=1"
+    defines += [ "ENABLE_PEPPER_CDMS" ]
+  }
+  if (enable_browser_cdms) {
+    # TODO(brettw) should probably be "=1"
+    defines += [ "ENABLE_BROWSER_CDMS" ]
+  }
+  if (enable_plugins) {
+    defines += [ "ENABLE_PLUGINS=1" ]
+  }
+  if (printing_mode > 0) {
+    defines += [ "ENABLE_PRINTING=1" ]
+    if (printing_mode < 2) {
+      defines += [ "ENABLE_FULL_PRINTING=1" ]
+    }
+  }
+  if (enable_spellcheck) {
+    defines += [ "ENABLE_SPELLCHECK=1" ]
+  }
+  if (dont_embed_build_metadata) {
+    defines += [ "DONT_EMBED_BUILD_METADATA" ]
+  }
+  if (use_udev) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "USE_UDEV" ]
+  }
+  if (toolkit_views) {
+    defines += [ "TOOLKIT_VIEWS=1" ]
+  }
+  if (ui_compositor_image_transport) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "UI_COMPOSITOR_IMAGE_TRANSPORT" ]
+  }
+  if (use_ash) {
+    defines += [ "USE_ASH=1" ]
+  }
+  if (use_aura) {
+    defines += [ "USE_AURA=1" ]
+  }
+  if (use_cairo) {
+    defines += [ "USE_CAIRO=1" ]
+  }
+  if (use_clipboard_aurax11) {
+    defines += [ "USE_CLIPBOARD_AURAX11=1" ]
+  }
+  if (use_default_render_theme) {
+    defines += [ "USE_DEFAULT_RENDER_THEME=1" ]
+  }
+  if (use_glib) {
+    defines += [ "USE_GLIB=1" ]
+  }
+  if (use_openssl) {
+    defines += [ "USE_OPENSSL=1" ]
+    if (use_openssl_certs) {
+      defines += [ "USE_OPENSSL_CERTS=1" ]
+    }
+  } else if (use_nss_certs) {
+    # USE_NSS really means "use nss for certificate validation and storage"
+    # (like USE_OPENSSL_CERTS) and not "we're linking to NSS." It might be nice
+    # to rename this but we're hoping to transition away from NSS.
+    defines += [ "USE_NSS=1" ]
+  }
+  if (use_ozone) {
+    defines += [ "USE_OZONE=1" ]
+  }
+  if (use_x11) {
+    defines += [ "USE_X11=1" ]
+    if (use_xi2_mt > 0) {
+      defines += [ "USE_XI2_MT=$use_xi2_mt" ]
+    }
+  }
+  if (use_allocator != "tcmalloc") {
+    defines += [ "NO_TCMALLOC" ]
+  }
+  if (enable_webrtc) {
+    defines += [ "ENABLE_WEBRTC=1" ]
+  }
+  if (disable_ftp_support) {
+    defines += [ "DISABLE_FTP_SUPPORT=1" ]
+  }
+  if (!enable_nacl) {
+    defines += [ "DISABLE_NACL" ]
+  }
+  if (enable_extensions) {
+    defines += [ "ENABLE_EXTENSIONS=1" ]
+  }
+  if (enable_configuration_policy) {
+    defines += [ "ENABLE_CONFIGURATION_POLICY" ]
+  }
+  if (enable_task_manager) {
+    defines += [ "ENABLE_TASK_MANAGER=1" ]
+  }
+  if (enable_themes) {
+    defines += [ "ENABLE_THEMES=1" ]
+  }
+  if (enable_captive_portal_detection) {
+    defines += [ "ENABLE_CAPTIVE_PORTAL_DETECTION=1" ]
+  }
+  if (enable_session_service) {
+    defines += [ "ENABLE_SESSION_SERVICE=1" ]
+  }
+  if (enable_rlz) {
+    defines += [ "ENABLE_RLZ" ]
+  }
+  if (enable_plugin_installation) {
+    defines += [ "ENABLE_PLUGIN_INSTALLATION=1" ]
+  }
+  if (enable_app_list) {
+    defines += [ "ENABLE_APP_LIST=1" ]
+  }
+  if (enable_settings_app) {
+    defines += [ "ENABLE_SETTINGS_APP=1" ]
+  }
+  if (enable_managed_users) {
+    defines += [ "ENABLE_MANAGED_USERS=1" ]
+  }
+  if (enable_service_discovery) {
+    defines += [ "ENABLE_SERVICE_DISCOVERY=1" ]
+  }
+  if (enable_autofill_dialog) {
+    defines += [ "ENABLE_AUTOFILL_DIALOG=1" ]
+  }
+  if (enable_wifi_bootstrapping) {
+    defines += [ "ENABLE_WIFI_BOOTSTRAPPING=1" ]
+  }
+  if (enable_image_loader_extension) {
+    defines += [ "IMAGE_LOADER_EXTENSION=1" ]
+  }
+  if (enable_remoting) {
+    defines += [ "ENABLE_REMOTING=1" ]
+  }
+  if (enable_google_now) {
+    defines += [ "ENABLE_GOOGLE_NOW=1" ]
+  }
+  if (enable_one_click_signin) {
+    defines += [ "ENABLE_ONE_CLICK_SIGNIN" ]
+  }
+  if (use_athena) {
+    defines += [ "USE_ATHENA=1" ]
+  }
+  if (enable_hidpi) {
+    defines += [ "ENABLE_HIDPI=1" ]
+  }
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+  defines = [
+    "_DEBUG",
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+    "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+  ]
+
+  if (is_win) {
+    if (disable_iterator_debugging) {
+      # Iterator debugging is enabled by the compiler on debug builds, and we
+      # have to tell it to turn it off.
+      defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+    }
+  } else if (is_linux && !is_android && cpu_arch == "x64" &&
+             !disable_iterator_debugging) {
+    # Enable libstdc++ debugging facilities to help catch problems early, see
+    # http://crbug.com/65151 .
+    # TODO(phajdan.jr): Should we enable this for all of POSIX?
+    defines += [ "_GLIBCXX_DEBUG=1" ]
+  }
+}
+
+config("release") {
+  defines = [
+    "NDEBUG",
+  ]
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+  if (is_win) {
+    # TODO(brettw) this list of defaults should probably be smaller, and
+    # instead the targets that use the less common ones (e.g. wininet or
+    # winspool) should include those explicitly.
+    libs = [
+      "advapi32.lib",
+      "comdlg32.lib",
+      "dbghelp.lib",
+      "delayimp.lib",
+      "dnsapi.lib",
+      "gdi32.lib",
+      "kernel32.lib",
+      "msimg32.lib",
+      "odbc32.lib",
+      "odbccp32.lib",
+      "ole32.lib",
+      "oleaut32.lib",
+      "psapi.lib",
+      "shell32.lib",
+      "shlwapi.lib",
+      "user32.lib",
+      "usp10.lib",
+      "uuid.lib",
+      "version.lib",
+      "wininet.lib",
+      "winmm.lib",
+      "winspool.lib",
+      "ws2_32.lib",
+      # Please don't add more stuff here. We should actually be making this
+      # list smaller, since all common things should be covered. If you need
+      # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+      # target that needs it.
+    ]
+  } else if (is_android) {
+    # Android uses -nostdlib so we need to add even libc here.
+    libs = [
+      # TODO(brettw) write a version of this, hopefully we can express this
+      # without forking out to GCC just to get the library name. The android
+      # toolchain directory should probably be extracted into a .gni file that
+      # this file and the android toolchain .gn file can share.
+      #   # Manually link the libgcc.a that the cross compiler uses.
+      #   '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+      "c",
+      "dl",
+      "m"
+    ]
+  } else if (is_mac) {
+    libs = [
+      "AppKit.framework",
+      "ApplicationServices.framework",
+      "Carbon.framework",
+      "CoreFoundation.framework",
+      "Foundation.framework",
+      "IOKit.framework",
+      "Security.framework",
+    ]
+  } else if (is_ios) {
+    libs = [
+      "CoreFoundation.framework",
+      "CoreGraphics.framework",
+      "CoreText.framework",
+      "Foundation.framework",
+      "UIKit.framework",
+    ]
+  } else if (is_linux) {
+    libs = [
+      "dl",
+    ]
+  }
+}
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
new file mode 100644
index 0000000..7994dd7
--- /dev/null
+++ b/build/config/BUILDCONFIG.gn
@@ -0,0 +1,692 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values. GN requires listing them explicitly so it can validate input and have
+# a central place to manage the build flags.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given here, otherwise the default will be injected into the root scope.
+#
+# KEEP IN ALPHABETICAL ORDER and write a good description for everything.
+# Use "is_*" names for intrinsic platform descriptions and build modes, and
+# "use_*" names for optional features libraries, and configurations.
+declare_args() {
+  # How many symbols to include in the build. This affects the performance of
+  # the build since the symbols are large and dealing with them is slow.
+  #   2 means regular build with symbols.
+  #   1 means minimal symbols, usually enough for backtraces only.
+  #   0 means no symbols.
+  #   -1 means auto-set (off in release, regular in debug).
+  symbol_level = -1
+
+  # Component build.
+  is_component_build = false
+  # Debug build.
+  is_debug = true
+
+  # Set to true when compiling with the Clang compiler. Typically this is used
+  # to configure warnings.
+  is_clang = (os == "mac" || os == "ios" || os == "linux")
+
+  # Forces a 64-bit build on Windows. Does nothing on other platforms. Normally
+  # we build 32-bit on Windows regardless of the current host OS bit depth.
+  # Setting this flag will override this logic and generate 64-bit toolchains.
+  #
+  # Normally this would get set automatically when you specify a target using
+  # the 64-bit toolchain. You can also set this on the command line to convert
+  # the default toolchain to 64-bit.
+  force_win64 = false
+
+  # Selects the desired build flavor. Official builds get additional
+  # processing to prepare for release. Normally you will want to develop and
+  # test with this flag off.
+  is_official_build = false
+
+  # Select the desired branding flavor. False means normal Chromium branding,
+  # true means official Google Chrome branding (requires extra Google-internal
+  # resources).
+  is_chrome_branded = false
+
+  # Compile for Address Sanitizer to find memory bugs.
+  is_asan = false
+
+  # Compile for Leak Sanitizer to find leaks.
+  is_lsan = false
+
+  # Compile for Memory Sanitizer to find uninitialized reads.
+  is_msan = false
+
+  # Compile for Thread Sanitizer to find threading bugs.
+  is_tsan = false
+
+  if (os == "chromeos") {
+    # Allows the target toolchain to be injected as arguments. This is needed
+    # to support the CrOS build system which supports per-build-configuration
+    # toolchains.
+    cros_use_custom_toolchain = false
+  }
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+#   except Windows).
+# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
+#   generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the os value
+# directly.
+
+if (os == "win") {
+  is_android = false
+  is_chromeos = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = false
+  is_win = true
+} else if (os == "mac") {
+  is_android = false
+  is_chromeos = false
+  is_ios = false
+  is_linux = false
+  is_mac = true
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (os == "android") {
+  is_android = true
+  is_chromeos = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (os == "chromeos") {
+  is_android = false
+  is_chromeos = true
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (os == "nacl") {
+  # os == "nacl" will be passed by the nacl toolchain definition. It is not
+  # set by default or on the command line. We treat is as a Posix variant.
+  is_android = false
+  is_chromeos = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = true
+  is_posix = true
+  is_win = false
+} else if (os == "ios") {
+  is_android = false
+  is_chromeos = false
+  is_ios = true
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (os == "linux") {
+  is_android = false
+  is_chromeos = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+}
+
+is_desktop_linux = is_linux && !is_chromeos
+
+# =============================================================================
+# CPU ARCHITECTURE
+# =============================================================================
+
+if (is_win) {
+  # Always use 32-bit on Windows, even when compiling on a 64-bit host OS,
+  # unless the override flag is specified.
+  if (force_win64) {
+    cpu_arch = "x64"
+  } else {
+    cpu_arch = "x86"
+  }
+}
+
+# =============================================================================
+# SOURCES FILTERS
+# =============================================================================
+#
+# These patterns filter out platform-specific files when assigning to the
+# sources variable. The magic variable |sources_assignment_filter| is applied
+# to each assignment or appending to the sources variable and matches are
+# automatcally removed.
+#
+# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
+# boundary = end of string or slash) are supported, and the entire string
+# muct match the pattern (so you need "*.cc" to match all .cc files, for
+# example).
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+sources_assignment_filter = []
+if (!is_posix) {
+  sources_assignment_filter += [
+    "*_posix.h",
+    "*_posix.cc",
+    "*_posix_unittest.h",
+    "*_posix_unittest.cc",
+    "*\bposix/*",
+  ]
+}
+if (!is_win) {
+  sources_assignment_filter += [
+    "*_win.cc",
+    "*_win.h",
+    "*_win_unittest.cc",
+    "*\bwin/*",
+    "*.rc",
+  ]
+}
+if (!is_mac) {
+  sources_assignment_filter += [
+    "*_mac.h",
+    "*_mac.cc",
+    "*_mac.mm",
+    "*_mac_unittest.h",
+    "*_mac_unittest.cc",
+    "*_mac_unittest.mm",
+    "*\bmac/*",
+    "*_cocoa.h",
+    "*_cocoa.cc",
+    "*_cocoa.mm",
+    "*_cocoa_unittest.h",
+    "*_cocoa_unittest.cc",
+    "*_cocoa_unittest.mm",
+    "*\bcocoa/*",
+  ]
+}
+if (!is_ios) {
+  sources_assignment_filter += [
+    "*_ios.h",
+    "*_ios.cc",
+    "*_ios.mm",
+    "*_ios_unittest.h",
+    "*_ios_unittest.cc",
+    "*_ios_unittest.mm",
+    "*\bios/*",
+  ]
+}
+if (!is_mac && !is_ios) {
+  sources_assignment_filter += [
+    "*.mm",
+  ]
+}
+if (!is_linux) {
+  sources_assignment_filter += [
+    "*_linux.h",
+    "*_linux.cc",
+    "*_linux_unittest.h",
+    "*_linux_unittest.cc",
+    "*\blinux/*",
+  ]
+}
+if (!is_android) {
+  sources_assignment_filter += [
+    "*_android.h",
+    "*_android.cc",
+    "*_android_unittest.h",
+    "*_android_unittest.cc",
+    "*\bandroid/*",
+  ]
+}
+if (!is_chromeos) {
+  sources_assignment_filter += [
+    "*_chromeos.h",
+    "*_chromeos.cc",
+    "*_chromeos_unittest.h",
+    "*_chromeos_unittest.cc",
+    "*\bchromeos/*",
+  ]
+}
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+
+# Actually save this list.
+#
+# These patterns are executed for every file in the source tree of every run.
+# Therefore, adding more patterns slows down the build for everybody. We should
+# only add automatic patterns for configurations affecting hundreds of files
+# across many projects in the tree.
+#
+# Therefore, we only add rules to this list corresponding to platforms on the
+# Chromium waterfall.  This is not for non-officially-supported platforms
+# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
+# write a conditional in the target to remove the file(s) from the list when
+# your platform/toolkit/feature doesn't apply.
+set_sources_assignment_filter(sources_assignment_filter)
+
+# =============================================================================
+# BUILD OPTIONS
+# =============================================================================
+
+# These Sanitizers all imply using the Clang compiler. On Windows they either
+# don't work or work differently.
+if (!is_clang && (is_asan || is_lsan || is_tsan || is_msan)) {
+  is_clang = true
+}
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+
+# Holds all configs used for making native executables and libraries, to avoid
+# duplication in each target below.
+_native_compiler_configs = [
+  "//build/config:feature_flags",
+
+  "//build/config/compiler:compiler",
+  "//build/config/compiler:compiler_arm_fpu",
+  "//build/config/compiler:chromium_code",
+  "//build/config/compiler:default_include_dirs",
+  "//build/config/compiler:default_warnings",
+  "//build/config/compiler:no_rtti",
+  "//build/config/compiler:runtime_library",
+]
+if (is_win) {
+  _native_compiler_configs += [
+    "//build/config/win:lean_and_mean",
+    "//build/config/win:nominmax",
+    "//build/config/win:sdk",
+    "//build/config/win:unicode",
+    "//build/config/win:winver",
+  ]
+}
+if (is_posix) {
+  _native_compiler_configs += [
+    "//build/config/gcc:no_exceptions",
+    "//build/config/gcc:symbol_visibility_hidden",
+  ]
+}
+
+if (is_linux) {
+  _native_compiler_configs += [ "//build/config/linux:sdk", ]
+} else if (is_mac) {
+  _native_compiler_configs += [ "//build/config/mac:sdk", ]
+} else if (is_ios) {
+  _native_compiler_configs += [ "//build/config/ios:sdk", ]
+} else if (is_android) {
+  _native_compiler_configs += [ "//build/config/android:sdk", ]
+}
+
+if (is_clang) {
+  _native_compiler_configs += [
+    "//build/config/clang:find_bad_constructs",
+    "//build/config/clang:extra_warnings",
+  ]
+}
+
+# Optimizations and debug checking.
+if (is_debug) {
+  _native_compiler_configs += [ "//build/config:debug" ]
+  _default_optimization_config = "//build/config/compiler:no_optimize"
+} else {
+  _native_compiler_configs += [ "//build/config:release" ]
+  _default_optimization_config = "//build/config/compiler:optimize"
+}
+_native_compiler_configs += [ _default_optimization_config ]
+
+# If it wasn't manually set, set to an appropriate default.
+if (symbol_level == -1) {
+  # Linux is slowed by having symbols as part of the target binary, whereas
+  # Mac and Windows have them separate, so in Release Linux, default them off.
+  if (is_debug || !is_linux) {
+    symbol_level = 2
+  } else {
+    symbol_level = 0
+  }
+}
+
+# Symbol setup.
+if (symbol_level == 2) {
+  _default_symbols_config = "//build/config/compiler:symbols"
+} else if (symbol_level == 1) {
+  _default_symbols_config = "//build/config/compiler:minimal_symbols"
+} else if (symbol_level == 0) {
+  _default_symbols_config = "//build/config/compiler:no_symbols"
+} else {
+  assert(false, "Bad value for symbol_level.")
+}
+_native_compiler_configs += [ _default_symbols_config ]
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+  if (is_debug) {
+    _default_incremental_linking_config =
+      "//build/config/win:incremental_linking"
+  } else {
+    _default_incremental_linking_config =
+      "//build/config/win:no_incremental_linking"
+  }
+  _windows_linker_configs = [
+    _default_incremental_linking_config,
+    "//build/config/win:sdk_link",
+    "//build/config/win:common_linker_setup",
+    # Default to console-mode apps. Most of our targets are tests and such
+    # that shouldn't use the windows subsystem.
+    "//build/config/win:console",
+  ]
+}
+
+# Executable defaults.
+_executable_configs = _native_compiler_configs + [
+  "//build/config:default_libs",
+]
+if (is_win) {
+  _executable_configs += _windows_linker_configs
+} else if (is_mac) {
+  _executable_configs += [
+    "//build/config/mac:mac_dynamic_flags",
+    "//build/config/mac:mac_executable_flags" ]
+} else if (is_linux || is_android) {
+  _executable_configs += [ "//build/config/gcc:executable_ldconfig" ]
+}
+set_defaults("executable") {
+  configs = _executable_configs
+}
+
+# Static library defaults.
+set_defaults("static_library") {
+  configs = _native_compiler_configs
+}
+
+# Shared library defaults (also for components in component mode).
+_shared_library_configs = _native_compiler_configs + [
+  "//build/config:default_libs",
+]
+if (is_win) {
+  _shared_library_configs += _windows_linker_configs
+} else if (is_mac) {
+  _shared_library_configs += [ "//build/config/mac:mac_dynamic_flags" ]
+}
+set_defaults("shared_library") {
+  configs = _shared_library_configs
+}
+if (is_component_build) {
+  set_defaults("component") {
+    configs = _shared_library_configs
+  }
+}
+
+# Source set defaults (also for components in non-component mode).
+set_defaults("source_set") {
+  configs = _native_compiler_configs
+}
+if (!is_component_build) {
+  set_defaults("component") {
+    configs = _native_compiler_configs
+  }
+}
+
+# Test defaults.
+set_defaults("test") {
+  if (is_android) {
+    configs = _shared_library_configs
+  } else {
+    configs = _executable_configs
+  }
+}
+
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+
+if (is_win) {
+  # TODO(brettw) name the toolchains the same as cpu_arch as with Linux below
+  # to eliminate these conditionals.
+  if (build_cpu_arch == "x64") {
+    host_toolchain = "//build/toolchain/win:64"
+  } else if (build_cpu_arch == "x86") {
+    host_toolchain = "//build/toolchain/win:32"
+  }
+
+  if (cpu_arch == "x64") {
+    set_default_toolchain("//build/toolchain/win:64")
+  } else if (cpu_arch == "x86") {
+    set_default_toolchain("//build/toolchain/win:32")
+  }
+} else if (is_android) {
+  # Use clang for the x86/64 Linux host builds.
+  if (build_cpu_arch == "x86" || build_cpu_arch == "x64") {
+    host_toolchain = "//build/toolchain/linux:clang_$build_cpu_arch"
+  } else {
+    host_toolchain = "//build/toolchain/linux:$build_cpu_arch"
+  }
+  set_default_toolchain("//build/toolchain/android:$cpu_arch")
+} else if (is_linux) {
+  if (is_clang) {
+    host_toolchain = "//build/toolchain/linux:clang_$build_cpu_arch"
+    set_default_toolchain("//build/toolchain/linux:clang_$cpu_arch")
+  } else {
+    host_toolchain = "//build/toolchain/linux:$build_cpu_arch"
+    set_default_toolchain("//build/toolchain/linux:$cpu_arch")
+  }
+  if (is_chromeos && cros_use_custom_toolchain) {
+    set_default_toolchain("//build/toolchain/cros:target")
+  }
+} else if (is_mac) {
+  host_toolchain = "//build/toolchain/mac:clang"
+  set_default_toolchain(host_toolchain)
+} else if (is_ios) {
+  host_toolchain = "//build/toolchain/mac:host_clang"
+  set_default_toolchain("//build/toolchain/mac:clang")
+}
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# TODO(brettw) erase this once the built-in "component" function is removed.
+if (is_component_build) {
+  component_mode = "shared_library"
+} else {
+  component_mode = "source_set"
+}
+
+template("component") {
+  if (is_component_build) {
+    shared_library(target_name) {
+      # Configs will always be defined since we set_defaults for a component
+      # above. We want to use those rather than whatever came with the nested
+      # shared/static library inside the component.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # The sources assignment filter will have already been applied when the
+      # code was originally executed. We don't want to apply it again, since
+      # the original target may have override it for some assignments.
+      set_sources_assignment_filter([])
+
+      if (defined(invoker.all_dependent_configs)) { all_dependent_configs = invoker.all_dependent_configs }
+      if (defined(invoker.allow_circular_includes_from)) { allow_circular_includes_from = invoker.allow_circular_includes_from }
+      if (defined(invoker.cflags)) { cflags = invoker.cflags }
+      if (defined(invoker.cflags_c)) { cflags_c = invoker.cflags_c }
+      if (defined(invoker.cflags_cc)) { cflags_cc = invoker.cflags_cc }
+      if (defined(invoker.cflags_objc)) { cflags_objc = invoker.cflags_objc }
+      if (defined(invoker.cflags_objcc)) { cflags_objcc = invoker.cflags_objcc }
+      if (defined(invoker.check_includes)) { check_includes = invoker.check_includes }
+      if (defined(invoker.data)) { data = invoker.data }
+      if (defined(invoker.datadeps)) { datadeps = invoker.datadeps }
+      if (defined(invoker.defines)) { defines = invoker.defines }
+      if (defined(invoker.deps)) { deps = invoker.deps }
+      if (defined(invoker.direct_dependent_configs)) { direct_dependent_configs = invoker.direct_dependent_configs }
+      if (defined(invoker.forward_dependent_configs_from)) { forward_dependent_configs_from = invoker.forward_dependent_configs_from }
+      if (defined(invoker.include_dirs)) { include_dirs = invoker.include_dirs }
+      if (defined(invoker.ldflags)) { ldflags = invoker.ldflags }
+      if (defined(invoker.lib_dirs)) { lib_dirs = invoker.lib_dirs }
+      if (defined(invoker.libs)) { libs = invoker.libs }
+      if (defined(invoker.output_extension)) { output_extension = invoker.output_extension }
+      if (defined(invoker.output_name)) { output_name = invoker.output_name }
+      if (defined(invoker.public)) { public = invoker.public }
+      if (defined(invoker.public_configs)) { public_configs = invoker.public_configs }
+      if (defined(invoker.public_deps)) { public_deps = invoker.public_deps }
+      if (defined(invoker.sources)) { sources = invoker.sources }
+      if (defined(invoker.testonly)) { testonly = invoker.testonly }
+      if (defined(invoker.visibility)) { visibility = invoker.visibility }
+    }
+  } else {
+    source_set(target_name) {
+      # See above.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # See above call.
+      set_sources_assignment_filter([])
+
+      if (defined(invoker.all_dependent_configs)) { all_dependent_configs = invoker.all_dependent_configs }
+      if (defined(invoker.allow_circular_includes_from)) { allow_circular_includes_from = invoker.allow_circular_includes_from }
+      if (defined(invoker.cflags)) { cflags = invoker.cflags }
+      if (defined(invoker.cflags_c)) { cflags_c = invoker.cflags_c }
+      if (defined(invoker.cflags_cc)) { cflags_cc = invoker.cflags_cc }
+      if (defined(invoker.cflags_objc)) { cflags_objc = invoker.cflags_objc }
+      if (defined(invoker.cflags_objcc)) { cflags_objcc = invoker.cflags_objcc }
+      if (defined(invoker.check_includes)) { check_includes = invoker.check_includes }
+      if (defined(invoker.data)) { data = invoker.data }
+      if (defined(invoker.datadeps)) { datadeps = invoker.datadeps }
+      if (defined(invoker.defines)) { defines = invoker.defines }
+      if (defined(invoker.deps)) { deps = invoker.deps }
+      if (defined(invoker.direct_dependent_configs)) { direct_dependent_configs = invoker.direct_dependent_configs }
+      if (defined(invoker.forward_dependent_configs_from)) { forward_dependent_configs_from = invoker.forward_dependent_configs_from }
+      if (defined(invoker.include_dirs)) { include_dirs = invoker.include_dirs }
+      if (defined(invoker.ldflags)) { ldflags = invoker.ldflags }
+      if (defined(invoker.lib_dirs)) { lib_dirs = invoker.lib_dirs }
+      if (defined(invoker.libs)) { libs = invoker.libs }
+      if (defined(invoker.output_extension)) { output_extension = invoker.output_extension }
+      if (defined(invoker.output_name)) { output_name = invoker.output_name }
+      if (defined(invoker.public)) { public = invoker.public }
+      if (defined(invoker.public_configs)) { public_configs = invoker.public_configs }
+      if (defined(invoker.public_deps)) { public_deps = invoker.public_deps }
+      if (defined(invoker.sources)) { sources = invoker.sources }
+      if (defined(invoker.testonly)) { testonly = invoker.testonly }
+      if (defined(invoker.visibility)) { visibility = invoker.visibility }
+    }
+  }
+}
+
+# ==============================================================================
+# TEST SETUP
+# ==============================================================================
+
+# Define a test as an executable (or shared_library on Android) with the
+# "testonly" flag set.
+template("test") {
+  if (is_android) {
+    shared_library(target_name) {
+      # Configs will always be defined since we set_defaults for a component
+      # above. We want to use those rather than whatever came with the nested
+      # shared/static library inside the component.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # See above call.
+      set_sources_assignment_filter([])
+
+      testonly = true
+
+      if (defined(invoker.all_dependent_configs)) { all_dependent_configs = invoker.all_dependent_configs }
+      if (defined(invoker.allow_circular_includes_from)) { allow_circular_includes_from = invoker.allow_circular_includes_from }
+      if (defined(invoker.cflags)) { cflags = invoker.cflags }
+      if (defined(invoker.cflags_c)) { cflags_c = invoker.cflags_c }
+      if (defined(invoker.cflags_cc)) { cflags_cc = invoker.cflags_cc }
+      if (defined(invoker.cflags_objc)) { cflags_objc = invoker.cflags_objc }
+      if (defined(invoker.cflags_objcc)) { cflags_objcc = invoker.cflags_objcc }
+      if (defined(invoker.check_includes)) { check_includes = invoker.check_includes }
+      if (defined(invoker.data)) { data = invoker.data }
+      if (defined(invoker.datadeps)) { datadeps = invoker.datadeps }
+      if (defined(invoker.defines)) { defines = invoker.defines }
+      if (defined(invoker.deps)) { deps = invoker.deps }
+      if (defined(invoker.direct_dependent_configs)) { direct_dependent_configs = invoker.direct_dependent_configs }
+      if (defined(invoker.forward_dependent_configs_from)) { forward_dependent_configs_from = invoker.forward_dependent_configs_from }
+      if (defined(invoker.include_dirs)) { include_dirs = invoker.include_dirs }
+      if (defined(invoker.ldflags)) { ldflags = invoker.ldflags }
+      if (defined(invoker.lib_dirs)) { lib_dirs = invoker.lib_dirs }
+      if (defined(invoker.libs)) { libs = invoker.libs }
+      if (defined(invoker.output_extension)) { output_extension = invoker.output_extension }
+      if (defined(invoker.output_name)) { output_name = invoker.output_name }
+      if (defined(invoker.public)) { public = invoker.public }
+      if (defined(invoker.public_configs)) { public_configs = invoker.public_configs }
+      if (defined(invoker.public_deps)) { public_deps = invoker.public_deps }
+      if (defined(invoker.sources)) { sources = invoker.sources }
+      if (defined(invoker.visibility)) { visibility = invoker.visibility }
+    }
+  } else {
+    executable(target_name) {
+      # See above.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # See above call.
+      set_sources_assignment_filter([])
+
+      testonly = true
+
+      if (defined(invoker.all_dependent_configs)) { all_dependent_configs = invoker.all_dependent_configs }
+      if (defined(invoker.allow_circular_includes_from)) { allow_circular_includes_from = invoker.allow_circular_includes_from }
+      if (defined(invoker.cflags)) { cflags = invoker.cflags }
+      if (defined(invoker.cflags_c)) { cflags_c = invoker.cflags_c }
+      if (defined(invoker.cflags_cc)) { cflags_cc = invoker.cflags_cc }
+      if (defined(invoker.cflags_objc)) { cflags_objc = invoker.cflags_objc }
+      if (defined(invoker.cflags_objcc)) { cflags_objcc = invoker.cflags_objcc }
+      if (defined(invoker.check_includes)) { check_includes = invoker.check_includes }
+      if (defined(invoker.data)) { data = invoker.data }
+      if (defined(invoker.datadeps)) { datadeps = invoker.datadeps }
+      if (defined(invoker.defines)) { defines = invoker.defines }
+      if (defined(invoker.deps)) { deps = invoker.deps }
+      if (defined(invoker.direct_dependent_configs)) { direct_dependent_configs = invoker.direct_dependent_configs }
+      if (defined(invoker.forward_dependent_configs_from)) { forward_dependent_configs_from = invoker.forward_dependent_configs_from }
+      if (defined(invoker.include_dirs)) { include_dirs = invoker.include_dirs }
+      if (defined(invoker.ldflags)) { ldflags = invoker.ldflags }
+      if (defined(invoker.lib_dirs)) { lib_dirs = invoker.lib_dirs }
+      if (defined(invoker.libs)) { libs = invoker.libs }
+      if (defined(invoker.output_extension)) { output_extension = invoker.output_extension }
+      if (defined(invoker.output_name)) { output_name = invoker.output_name }
+      if (defined(invoker.public)) { public = invoker.public }
+      if (defined(invoker.public_configs)) { public_configs = invoker.public_configs }
+      if (defined(invoker.public_deps)) { public_deps = invoker.public_deps }
+      if (defined(invoker.sources)) { sources = invoker.sources }
+      if (defined(invoker.visibility)) { visibility = invoker.visibility }
+    }
+  }
+}
diff --git a/build/config/OWNERS b/build/config/OWNERS
new file mode 100644
index 0000000..9b79b9a
--- /dev/null
+++ b/build/config/OWNERS
@@ -0,0 +1,2 @@
+set noparent
+brettw@chromium.org
diff --git a/build/config/allocator.gni b/build/config/allocator.gni
new file mode 100644
index 0000000..3c4fe90
--- /dev/null
+++ b/build/config/allocator.gni
@@ -0,0 +1,14 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_android || cpu_arch == "mipsel" || is_mac) {
+  _default_allocator = "none"
+} else {
+  _default_allocator = "tcmalloc"
+}
+
+declare_args() {
+  # Memory allocator to use. Set to "none" to use default allocator.
+  use_allocator = _default_allocator
+}
diff --git a/build/config/android/BUILD.gn b/build/config/android/BUILD.gn
new file mode 100644
index 0000000..185db59
--- /dev/null
+++ b/build/config/android/BUILD.gn
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/sysroot.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    sysroot_ld_path = rebase_path("//build/config/linux/sysroot_ld_path.py")
+    ldflags += [ exec_script(sysroot_ld_path,
+        [ rebase_path("//build/linux/sysroot_ld_path.sh"), sysroot ],
+        "value")
+    ]
+  }
+}
diff --git a/build/config/android/OWNERS b/build/config/android/OWNERS
new file mode 100644
index 0000000..3759e93
--- /dev/null
+++ b/build/config/android/OWNERS
@@ -0,0 +1 @@
+cjhopman@chromium.org
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
new file mode 100644
index 0000000..7603280
--- /dev/null
+++ b/build/config/android/config.gni
@@ -0,0 +1,152 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains common system config stuff for the Android build.
+
+if (is_android) {
+  declare_args() {
+    # Absolute directory containing the Android source code.
+    android_src = ""
+
+    android_sdk_root = "//third_party/android_tools/sdk"
+    android_sdk_version = "20"
+
+    # This is set when building the Android WebView inside the Android build
+    # system, using the 'android' gyp backend. The WebView code is still built
+    # when this is unset, but builds using the normal chromium build system.
+    is_android_webview_build = false
+
+    android_default_keystore_path = "//build/android/ant/chromium-debug.keystore"
+    android_default_keystore_name = "chromiumdebugkey"
+    android_default_keystore_password = "chromium"
+
+    # This is a unique identifier for a given build. It's used for
+    # identifying various build artifacts corresponding to a particular build of
+    # chrome (e.g. where to find archived symbols).
+    android_chrome_build_id = "\"\""
+  }
+
+  if (is_android_webview_build) {
+    assert(android_src != "",
+           "You must specify android_src for an Android WebView build.")
+  }
+
+
+  # Host stuff -----------------------------------------------------------------
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (build_cpu_arch == "x64") {
+    android_host_arch = "x86_64"
+  } else if (build_cpu_arch == "x86") {
+    android_host_arch = "x86"
+  } else {
+    assert(false, "Need Android toolchain support for your build CPU arch.")
+  }
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (build_os == "linux") {
+    android_host_os = "linux"
+  } else {
+    assert(false, "Need Android toolchain support for your build OS.")
+  }
+
+  # Directories and files ------------------------------------------------------
+  #
+  # We define may of the dirs strings here for each output architecture (rather
+  # than just the current one) since these are needed by the Android toolchain
+  # file to define toolchains for all possible targets in one pass.
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  # Path to the Android NDK and SDK.
+  android_ndk_root = "//third_party/android_tools/ndk"
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  android_sdk_tools = "${android_sdk_root}/tools"
+  android_sdk_build_tools = "${android_sdk_root}/build-tools/20.0.0"
+
+  # Path to the SDK's android.jar
+  android_sdk_jar = "$android_sdk/android.jar"
+
+  zipalign_path = "$android_sdk_build_tools/zipalign"
+
+  # Subdirectories inside android_ndk_root that contain the sysroot for the
+  # associated platform.
+  _android_api_level = 14
+  x86_android_sysroot_subdir = "platforms/android-${_android_api_level}/arch-x86"
+  arm_android_sysroot_subdir = "platforms/android-${_android_api_level}/arch-arm"
+  mips_android_sysroot_subdir = "platforms/android-${_android_api_level}/arch-mips"
+
+  # Toolchain root directory for each build. The actual binaries are inside
+  # a "bin" directory inside of these.
+  _android_toolchain_version = "4.9"
+  x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+
+  # Location of libgcc. This is only needed for the current GN toolchain, so we
+  # only need to define the current one, rather than one for every platform
+  # like the toolchain roots.
+  if (cpu_arch == "x86") {
+    android_prebuilt_arch = "android-x86"
+    _binary_prefix = "i686-linux-android"
+    android_toolchain_root = "$x86_android_toolchain_root"
+    android_libgcc_file =
+      "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (cpu_arch == "arm") {
+    android_prebuilt_arch = "android-arm"
+    _binary_prefix = "arm-linux-androideabi"
+    android_toolchain_root = "$arm_android_toolchain_root"
+    android_libgcc_file =
+      "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_version}/libgcc.a"
+  } else if (cpu_arch == "mipsel") {
+    android_prebuilt_arch = "android-mips"
+    _binary_prefix = "mipsel-linux-android"
+    android_toolchain_root = "$mips_android_toolchain_root"
+    android_libgcc_file =
+      "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else {
+    assert(false, "Need android libgcc support for your target arch.")
+  }
+
+  android_readelf = "$android_toolchain_root/bin/$_binary_prefix-readelf"
+  android_gdbserver = "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
+  # stlport stuff --------------------------------------------------------------
+
+  use_system_stlport = is_android_webview_build
+
+  if (use_system_stlport) {
+    android_stlport_library = "stlport"
+  } else if (component_mode == "shared_library") {
+    android_stlport_library = "stlport_shared"
+  } else {
+    android_stlport_library = "stlport_static"
+  }
+
+  # ABI ------------------------------------------------------------------------
+
+  if (cpu_arch == "x86") {
+    android_app_abi = "x86"
+  } else if (cpu_arch == "arm") {
+    import("//build/config/arm.gni")
+    if (arm_version < 7) {
+      android_app_abi = "armeabi"
+    } else {
+      android_app_abi = "armeabi-v7a"
+    }
+  } else if (cpu_arch == "mipsel") {
+    android_app_abi = "mips"
+  } else {
+    assert(false, "Unknown Android ABI: " + cpu_arch)
+  }
+} else {
+  if (!defined(is_android_webview_build)) {
+    is_android_webview_build = false
+  }
+  use_system_stlport = false
+}
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
new file mode 100644
index 0000000..42d3b3a
--- /dev/null
+++ b/build/config/android/internal_rules.gni
@@ -0,0 +1,739 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+
+assert(is_android)
+
+
+rebased_android_sdk = rebase_path(android_sdk, root_build_dir)
+rebased_android_sdk_root = rebase_path(android_sdk_root, root_build_dir)
+rebased_android_sdk_build_tools = rebase_path(android_sdk_build_tools, root_build_dir)
+
+android_sdk_jar = "$android_sdk/android.jar"
+rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+
+template("android_lint") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  jar_path = invoker.jar_path
+  android_manifest = invoker.android_manifest
+  java_files = invoker.java_files
+  base_path = "$target_gen_dir/$target_name"
+
+  action(target_name) {
+    script = "//build/android/gyp/lint.py"
+    result_path = base_path + "/result.xml"
+    config_path = base_path + "/config.xml"
+    suppressions_file = "//build/android/lint/suppressions.xml"
+    inputs = [
+      suppressions_file,
+      android_manifest,
+      jar_path,
+    ] + java_files
+
+    outputs = [
+      config_path,
+      result_path
+    ]
+
+    rebased_java_files = rebase_path(java_files, root_build_dir)
+
+    args = [
+      "--lint-path=$rebased_android_sdk_root/tools/lint",
+      "--config-path", rebase_path(suppressions_file, root_build_dir),
+      "--manifest-path", rebase_path(android_manifest, root_build_dir),
+      "--product-dir=.",
+      "--jar-path", rebase_path(jar_path, root_build_dir),
+      "--processed-config-path", rebase_path(config_path, root_build_dir),
+      "--result-path", rebase_path(result_path, root_build_dir),
+      "--java-files=$rebased_java_files",
+      "--enable",
+    ]
+  }
+}
+
+
+# Write the target's .build_config file. This is a json file that contains a
+# dictionary of information about how to build this target (things that
+# require knowledge about this target's dependencies and cannot be calculated
+# at gn-time). There is a special syntax to add a value in that dictionary to
+# an action/action_foreachs args:
+#   --python-arg=@FileArg($rebased_build_config_path:key0:key1)
+# At runtime, such an arg will be replaced by the value in the build_config.
+# See build/android/gyp/write_build_config.py and
+# build/android/gyp/util/build_utils.py:ExpandFileArgs
+template("write_build_config") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.type))
+  assert(defined(invoker.build_config))
+
+  type = invoker.type
+  build_config = invoker.build_config
+
+  assert(type == "android_apk" || type == "android_library" || type == "android_resources")
+
+  action(target_name) {
+    script = "//build/android/gyp/write_build_config.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = []
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    outputs = [
+      depfile,
+      build_config
+    ]
+
+    possible_deps_configs = []
+    foreach(d, deps) {
+      dep_gen_dir = get_label_info(d, "target_gen_dir")
+      dep_name = get_label_info(d, "name")
+      possible_deps_configs += [ "$dep_gen_dir/$dep_name.build_config" ]
+    }
+    rebase_possible_deps_configs = rebase_path(possible_deps_configs)
+
+    args = [
+      "--type", type,
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--possible-deps-configs=$rebase_possible_deps_configs",
+      "--build-config", rebase_path(build_config, root_build_dir),
+    ]
+
+    if (type == "android_library" || type == "android_apk") {
+      args += [
+        "--jar-path", rebase_path(invoker.jar_path, root_build_dir),
+        "--dex-path", rebase_path(invoker.dex_path, root_build_dir),
+      ]
+    }
+
+    if (type == "android_resources" || type == "android_apk") {
+      assert(defined(invoker.resources_zip))
+      args += [
+        "--resources-zip", rebase_path(invoker.resources_zip, root_build_dir),
+      ]
+      if (defined(invoker.android_manifest)) {
+        inputs += [
+          invoker.android_manifest
+        ]
+        args += [
+          "--android-manifest", rebase_path(invoker.android_manifest, root_build_dir),
+        ]
+      }
+      if (defined(invoker.custom_package)) {
+        args += [
+          "--package-name", invoker.custom_package
+        ]
+      }
+    }
+
+    if (type == "android_apk") {
+      if (defined(invoker.native_libs)) {
+        rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
+        rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
+        args += [
+          "--native-libs=$rebased_native_libs",
+          "--readelf-path=$rebased_android_readelf",
+        ]
+      }
+    }
+
+    if (defined(invoker.srcjar)) {
+      args += [
+        "--srcjar", rebase_path(invoker.srcjar, root_build_dir)
+      ]
+    }
+  }
+}
+
+
+# Creates a zip archive of the inputs.
+# If base_dir is provided, the archive paths will be relative to it.
+template("zip") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.inputs))
+  assert(defined(invoker.output))
+
+  rebase_inputs = rebase_path(invoker.inputs, root_build_dir)
+  rebase_output = rebase_path(invoker.output, root_build_dir)
+  action(target_name) {
+    script = "//build/android/gn/zip.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = invoker.inputs
+    outputs = [
+      depfile,
+      invoker.output
+    ]
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--inputs=$rebase_inputs",
+      "--output=$rebase_output",
+    ]
+    if (defined(invoker.base_dir)) {
+      args += [
+        "--base-dir", rebase_path(invoker.base_dir, root_build_dir)
+      ]
+    }
+  }
+}
+
+template("dex") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.output))
+  action(target_name) {
+    script = "//build/android/gyp/dex.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    sources = invoker.sources
+    outputs = [depfile, invoker.output]
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    rebased_output = rebase_path(invoker.output, root_build_dir)
+
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--android-sdk-tools", rebased_android_sdk_build_tools,
+      "--dex-path", rebased_output,
+    ]
+
+    if (defined(invoker.no_locals) && invoker.no_locals) {
+      args += [
+        "--no-locals=1"
+      ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    args += rebase_path(invoker.sources, root_build_dir)
+  }
+}
+
+# Packages resources, assets, dex, and native libraries into an apk. Signs and
+# zipaligns the apk.
+template("create_apk") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  _android_manifest = invoker.android_manifest
+  _base_path = invoker.base_path
+  _final_apk_path = invoker.apk_path
+  _resources_zip = invoker.resources_zip
+  _dex_path = invoker.dex_path
+  _keystore_path = invoker.keystore_path
+  _keystore_name = invoker.keystore_name
+  _keystore_password = invoker.keystore_password
+
+  _deps = []
+  if (defined(invoker.deps)) {
+    _deps = invoker.deps
+  }
+
+  _native_libs_dir = "//build/android/empty/res"
+  if (defined(invoker.native_libs_dir)) {
+      _native_libs_dir = invoker.native_libs_dir
+  }
+
+  _asset_location = "//build/android/empty/res"
+  if (defined(invoker.asset_location)) {
+    _asset_location = invoker.asset_location
+  }
+
+  _version_code = "0"
+  _version_name = "Developer Build"
+
+  _base_apk_path = _base_path + ".apk_intermediates"
+
+  _resource_packaged_apk_path = _base_apk_path + ".ap_"
+  _packaged_apk_path = _base_apk_path + ".unfinished.apk"
+
+
+  _configuration_name = "Release"
+  if (is_debug) {
+    _configuration_name = "Debug"
+  }
+
+  action("${target_name}__package_resources") {
+    deps = _deps
+
+    script = "//build/android/gyp/package_resources.py"
+    depfile = "${target_gen_dir}/${target_name}.d"
+    source_prereqs = [
+      _android_manifest,
+      _resources_zip,
+    ]
+    outputs = [depfile, _resource_packaged_apk_path]
+
+    _rebased_resources_zips = [rebase_path(_resources_zip, root_build_dir)]
+    args = [
+        "--depfile", rebase_path(depfile, root_build_dir),
+        "--android-sdk", rebased_android_sdk,
+        "--android-sdk-tools", rebased_android_sdk_build_tools,
+
+        "--configuration-name=$_configuration_name",
+
+        "--android-manifest", rebase_path(_android_manifest, root_build_dir),
+        "--version-code", _version_code,
+        "--version-name", _version_name,
+
+        "--asset-dir", rebase_path(_asset_location, root_build_dir),
+        "--resource-zips=$_rebased_resources_zips",
+
+        "--apk-path", rebase_path(_resource_packaged_apk_path, root_build_dir),
+      ]
+  }
+
+  action("${target_name}__package") {
+    script = "//build/android/gyp/ant.py"
+    _ant_script = "//build/android/ant/apk-package.xml"
+
+    depfile = "$target_gen_dir/$target_name.d"
+
+    source_prereqs = [
+      _dex_path,
+      _resource_packaged_apk_path,
+      _ant_script
+    ]
+
+    outputs = [
+      depfile,
+      _packaged_apk_path,
+    ]
+
+    _rebased_emma_jar = ""
+    _rebased_resource_packaged_apk_path = rebase_path(
+        _resource_packaged_apk_path, root_build_dir)
+    _rebased_packaged_apk_path = rebase_path(_packaged_apk_path, root_build_dir)
+    _rebased_native_libs_dir = rebase_path(_native_libs_dir, root_build_dir)
+    _rebased_dex_path = rebase_path(_dex_path, root_build_dir)
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--",
+      "-quiet",
+      "-DANDROID_SDK_ROOT=$rebased_android_sdk_root",
+      "-DANDROID_SDK_TOOLS=$rebased_android_sdk_build_tools",
+      "-DRESOURCE_PACKAGED_APK_NAME=$_rebased_resource_packaged_apk_path",
+      "-DCONFIGURATION_NAME=$_configuration_name",
+      "-DNATIVE_LIBS_DIR=$_rebased_native_libs_dir",
+      "-DOUT_DIR=",
+      "-DUNSIGNED_APK_PATH=$_rebased_packaged_apk_path",
+      "-DEMMA_INSTRUMENT=0",
+      "-DEMMA_DEVICE_JAR=$_rebased_emma_jar",
+      "-DDEX_FILE_PATH=$_rebased_dex_path",
+
+      "-Dbasedir=.",
+      "-buildfile", rebase_path(_ant_script, root_build_dir)
+    ]
+  }
+
+  action("${target_name}__finalize") {
+    script = "//build/android/gyp/finalize_apk.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    sources = [_packaged_apk_path]
+    source_prereqs = [_keystore_path]
+    outputs = [depfile, _final_apk_path]
+
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--zipalign-path", rebase_path(zipalign_path, root_build_dir),
+      "--unsigned-apk-path", rebase_path(_packaged_apk_path, root_build_dir),
+      "--final-apk-path", rebase_path(_final_apk_path, root_build_dir),
+      "--key-path", rebase_path(_keystore_path, root_build_dir),
+      "--key-name", _keystore_name,
+      "--key-passwd", _keystore_password,
+    ]
+  }
+
+  group(target_name) {
+    deps = [":${target_name}__finalize"]
+  }
+}
+
+template("java_prebuilt") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  _input_jar_path = invoker.input_jar_path
+  _output_jar_path = invoker.output_jar_path
+  _jar_toc_path = _output_jar_path + ".TOC"
+
+  assert(invoker.build_config != "")
+
+  if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+    _proguard_jar_path = "$android_sdk_root/tools/proguard/lib/proguard.jar"
+    _proguard_config_path = invoker.proguard_config
+    _build_config = invoker.build_config
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    action("${target_name}__proguard_process") {
+      script = "//build/android/gyp/proguard.py"
+      inputs = [
+        android_sdk_jar,
+        _proguard_jar_path,
+        _build_config,
+        _input_jar_path,
+        _proguard_config_path,
+      ]
+      depfile = "${target_gen_dir}/${target_name}.d"
+      outputs = [
+        depfile,
+        _output_jar_path,
+      ]
+      args = [
+        "--depfile", rebase_path(depfile, root_build_dir),
+        "--proguard-path", rebase_path(_proguard_jar_path, root_build_dir),
+        "--input-path", rebase_path(_input_jar_path, root_build_dir),
+        "--output-path", rebase_path(_output_jar_path, root_build_dir),
+        "--proguard-config", rebase_path(_proguard_config_path, root_build_dir),
+        "--classpath", rebased_android_sdk_jar,
+        "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+      ]
+    }
+  } else {
+    copy("${target_name}__copy_jar") {
+      sources = [_input_jar_path]
+      outputs = [_output_jar_path]
+    }
+  }
+
+  action("${target_name}__jar_toc") {
+    script = "//build/android/gyp/jar_toc.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      _jar_toc_path,
+      _jar_toc_path + ".md5.stamp"
+    ]
+    inputs = [ _output_jar_path ]
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--jar-path", rebase_path(_output_jar_path, root_build_dir),
+      "--toc-path", rebase_path(_jar_toc_path, root_build_dir),
+    ]
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__jar_toc"
+    ]
+  }
+}
+
+# Compiles and jars a set of java files.
+#
+# Outputs:
+#  $jar_path.jar
+#  $jar_path.jar.TOC
+#
+# Variables
+#   java_files: List of .java files to compile.
+#   java_deps: List of java dependencies. These should all have a .jar output
+#     at "${target_gen_dir}/${target_name}.jar.
+#   chromium_code: If true, enable extra warnings.
+#   srcjar_deps: List of srcjar dependencies. The .java files contained in the
+#     dependencies srcjar outputs will be compiled and added to the output jar.
+#   jar_path: Use this to explicitly set the output jar path. Defaults to
+#     "${target_gen_dir}/${target_name}.jar.
+template("java_library") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.java_files))
+  assert(defined(invoker.build_config))
+  assert(defined(invoker.jar_path))
+
+  _java_files = invoker.java_files
+  _final_jar_path = invoker.jar_path
+  _intermediate_jar_path = "$target_gen_dir/$target_name.initial.jar"
+
+  _build_config = invoker.build_config
+
+  _jar_excluded_patterns = []
+  if (defined(invoker.jar_excluded_patterns)) {
+    _jar_excluded_patterns += invoker.jar_excluded_patterns
+  }
+
+  _chromium_code = false
+  if (defined(invoker.chromium_code)) {
+    _chromium_code = invoker.chromium_code
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps += invoker.srcjar_deps
+  }
+
+  _java_srcjars = []
+  foreach(dep, _srcjar_deps) {
+    _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+    _dep_name = get_label_info(dep, "name")
+    _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+  }
+  # Mark srcjar_deps as used.
+  assert(_srcjar_deps == [] || true)
+
+  _system_jars = [ android_sdk_jar ]
+  action("${target_name}__javac") {
+    script = "//build/android/gyp/javac.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      _intermediate_jar_path,
+      _intermediate_jar_path + ".md5.stamp"
+    ]
+    sources = _java_files + _java_srcjars
+    inputs = _system_jars + [ _build_config ]
+
+    _rebased_system_jars = rebase_path(_system_jars, root_build_dir)
+    _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    _rebased_depfile = rebase_path(depfile, root_build_dir)
+    _rebased_jar_path = rebase_path(_intermediate_jar_path, root_build_dir)
+    args = [
+      "--depfile=$_rebased_depfile",
+      "--classpath=$_rebased_system_jars",
+      "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+      "--jar-path=$_rebased_jar_path",
+      "--java-srcjars=$_rebased_java_srcjars",
+      "--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)",
+      "--jar-excluded-classes=$_jar_excluded_patterns",
+    ]
+    if (_chromium_code) {
+      args += [ "--chromium-code" ]
+    }
+
+    args += rebase_path(_java_files, root_build_dir)
+  }
+
+  java_prebuilt("${target_name}__finish") {
+    build_config = _build_config
+    input_jar_path = _intermediate_jar_path
+    output_jar_path = _final_jar_path
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = invoker.proguard_preprocess
+      proguard_config = invoker.proguard_config
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__javac",
+      ":${target_name}__finish",
+    ]
+  }
+}
+
+
+# This adds Android-specific parts to the java_library template.
+#
+# Runs Android lint against the compiled java files.
+# Dexes the output jar for inclusion in an APK.
+template("android_java_library") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.java_files) || defined(invoker.DEPRECATED_java_in_dir))
+  assert(defined(invoker.build_config))
+  assert(defined(invoker.jar_path))
+  assert(defined(invoker.dex_path))
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps = invoker.srcjar_deps
+  }
+
+  _java_files = []
+  if (defined(invoker.java_files)) {
+    _java_files = invoker.java_files
+  } else {
+    _java_files_build_rel = exec_script(
+        "//build/android/gyp/find.py",
+        [
+          "--pattern",
+          "*.java",
+          rebase_path(invoker.DEPRECATED_java_in_dir, root_build_dir)
+        ],
+        "list lines"
+        )
+    _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
+  }
+  assert(_java_files != [] || _srcjar_deps != [])
+
+  _jar_path = invoker.jar_path
+  _dex_path = invoker.dex_path
+
+  _android_manifest = "//build/android/AndroidManifest.xml"
+  if (defined(invoker.android_manifest)) {
+    _android_manifest = invoker.android_manifest
+  }
+  assert(_android_manifest != "")
+
+  _final_deps = []
+  _final_datadeps = []
+
+  java_library("${target_name}__java_library") {
+    jar_path = _jar_path
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    build_config = invoker.build_config
+    java_files = _java_files
+    srcjar_deps = _srcjar_deps
+
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = invoker.proguard_preprocess
+      proguard_config = invoker.proguard_config
+    }
+
+    if (defined(invoker.dist_jar_path)) {
+      dist_jar_path = invoker.dist_jar_path
+    }
+  }
+
+  if (defined(invoker.chromium_code) && invoker.chromium_code) {
+    _final_datadeps += [ ":${target_name}__lint" ]
+    android_lint("${target_name}__lint") {
+      android_manifest = _android_manifest
+      jar_path = _jar_path
+      java_files = _java_files
+    }
+  }
+
+  dex("${target_name}__dex") {
+    sources = [_jar_path]
+    output = _dex_path
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__java_library",
+      ":${target_name}__dex",
+    ] + _final_deps + _final_datadeps
+  }
+}
+
+# Runs process_resources.py
+template("process_resources") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  zip_path = invoker.zip_path
+  srcjar_path = invoker.srcjar_path
+  build_config = invoker.build_config
+  resource_dirs = invoker.resource_dirs
+  android_manifest = invoker.android_manifest
+
+  non_constant_id = true
+  if (defined(invoker.generate_constant_ids) && invoker.generate_constant_ids) {
+    non_constant_id = false
+  }
+
+  action(target_name) {
+    script = "//build/android/gyp/process_resources.py"
+
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      zip_path,
+      srcjar_path,
+    ]
+
+    sources_build_rel = exec_script(
+        "//build/android/gyp/find.py",
+        rebase_path(resource_dirs, root_build_dir),
+        "list lines"
+        )
+    sources = rebase_path(sources_build_rel, ".", root_build_dir)
+
+    source_prereqs = [
+      build_config,
+      android_manifest,
+    ]
+
+    rebase_resource_dirs = rebase_path(resource_dirs, root_build_dir)
+    rebase_build_config = rebase_path(build_config, root_build_dir)
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--android-sdk", rebase_path(android_sdk, root_build_dir),
+      "--android-sdk-tools", rebase_path(android_sdk_build_tools, root_build_dir),
+      "--android-manifest", rebase_path(android_manifest, root_build_dir),
+
+      "--resource-dirs=$rebase_resource_dirs",
+      "--srcjar-out", rebase_path(srcjar_path, root_build_dir),
+      "--resource-zip-out", rebase_path(zip_path, root_build_dir),
+
+      "--dependencies-res-zips=@FileArg($rebase_build_config:resources:dependency_zips)",
+      "--extra-res-packages=@FileArg($rebase_build_config:resources:extra_package_names)",
+    ]
+
+    if (non_constant_id) {
+      args += [ "--non-constant-id" ]
+    }
+
+    if (defined(invoker.custom_package)) {
+      args += [
+        "--custom-package", invoker.custom_package,
+      ]
+    }
+
+    if (defined(invoker.v14_verify_only) && invoker.v14_verify_only) {
+      args += ["--v14-verify-only"]
+    }
+
+    if (defined(invoker.all_resources_zip_path)) {
+      all_resources_zip = invoker.all_resources_zip_path
+      outputs += [ all_resources_zip ]
+      args += [
+        "--all-resources-zip-out", rebase_path(all_resources_zip, root_build_dir)
+      ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+  }
+}
+
+template("copy_ex") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  action(target_name) {
+    script = "//build/android/gyp/copy_ex.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+    ]
+    sources = []
+    if (defined(invoker.sources)) {
+      sources += invoker.sources
+    }
+
+    inputs = []
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--dest", rebase_path(invoker.dest, root_build_dir),
+    ]
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+    rebased_sources = rebase_path(sources, root_build_dir)
+    args += [ "--files=$rebased_sources" ]
+
+    if (defined(invoker.clear_dir) && invoker.clear_dir) {
+      args += ["--clear"]
+    }
+  }
+}
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
new file mode 100644
index 0000000..4b5d62b
--- /dev/null
+++ b/build/config/android/rules.gni
@@ -0,0 +1,1145 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/android/internal_rules.gni")
+import("//tools/grit/grit_rule.gni")
+
+assert(is_android)
+
+
+# Declare a jni target
+#
+# This target generates the native jni bindings for a set of .java files.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+#   sources: list of .java files to generate jni for
+#   jni_package: subdirectory path for generated bindings
+#
+# Example
+#   generate_jni("foo_jni") {
+#     sources = [
+#       "android/java/src/org/chromium/foo/Foo.java",
+#       "android/java/src/org/chromium/foo/FooUtil.java",
+#     ]
+#     jni_package = "foo"
+#   }
+template("generate_jni") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.jni_package))
+  jni_package = invoker.jni_package
+  base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
+  jni_output_dir = "${base_output_dir}/jni"
+
+  jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
+
+  foreach_target_name = "${target_name}__jni_gen"
+  action_foreach(foreach_target_name) {
+    script = "//base/android/jni_generator/jni_generator.py"
+    depfile = "$target_gen_dir/$target_name.{{source_name_part}}.d"
+    sources = invoker.sources
+    inputs = [ jni_generator_include ]
+    outputs = [
+      depfile,
+      "${jni_output_dir}/{{source_name_part}}_jni.h"
+    ]
+
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--input_file={{source}}",
+      "--optimize_generation=1",
+      "--ptr_type=long",
+      "--output_dir", rebase_path(jni_output_dir, root_build_dir),
+      "--includes", rebase_path(jni_generator_include, "//"),
+    ]
+    if (defined(invoker.jni_generator_jarjar_file)) {
+      args += [
+        "--jarjar", rebase_path(jni_generator_jarjar_file, root_build_dir),
+      ]
+    }
+  }
+
+  config("jni_includes_${target_name}") {
+    include_dirs = [ base_output_dir ]
+  }
+
+  group(target_name) {
+    deps = [ ":$foreach_target_name" ]
+    public_configs = [ ":jni_includes_${target_name}" ]
+
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+  }
+}
+
+
+# Declare a jni target for a prebuilt jar
+#
+# This target generates the native jni bindings for a set of classes in a .jar.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+#   classes: list of .class files in the jar to generate jni for. These should
+#     include the full path to the .class file.
+#   jni_package: subdirectory path for generated bindings
+#   jar_file: the path to the .jar. If not provided, will default to the sdk's
+#     android.jar
+#
+#   deps, public_deps: As normal
+#
+# Example
+#   generate_jar_jni("foo_jni") {
+#     classes = [
+#       "android/view/Foo.class",
+#     ]
+#     jni_package = "foo"
+#   }
+template("generate_jar_jni") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.classes))
+  assert(defined(invoker.jni_package))
+
+  if (defined(invoker.jar_file)) {
+    jar_file = invoker.jar_file
+  } else {
+    jar_file = android_sdk_jar
+  }
+
+  jni_package = invoker.jni_package
+  base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
+  jni_output_dir = "${base_output_dir}/jni"
+
+  jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
+
+  # TODO(cjhopman): make jni_generator.py support generating jni for multiple
+  # .class files from a .jar.
+  jni_actions = []
+  foreach(class, invoker.classes) {
+    _classname_list = []
+    _classname_list = process_file_template(
+        [class], "{{source_name_part}}")
+    classname = _classname_list[0]
+    jni_target_name = "${target_name}__jni_${classname}"
+    jni_actions += [ ":$jni_target_name" ]
+    action(jni_target_name) {
+      depfile = "$target_gen_dir/$target_name.d"
+      script = "//base/android/jni_generator/jni_generator.py"
+      sources = [
+        jni_generator_include,
+        jar_file,
+      ]
+      outputs = [
+        depfile,
+        "${jni_output_dir}/${classname}_jni.h"
+      ]
+
+      args = [
+        "--depfile", rebase_path(depfile, root_build_dir),
+        "--jar_file", rebase_path(jar_file, root_build_dir),
+        "--input_file", class,
+        "--optimize_generation=1",
+        "--ptr_type=long",
+        "--output_dir", rebase_path(jni_output_dir, root_build_dir),
+        "--includes", rebase_path(jni_generator_include, root_build_dir),
+      ]
+    }
+  }
+
+  config("jni_includes_${target_name}") {
+    include_dirs = [ base_output_dir ]
+  }
+
+  group(target_name) {
+    deps = jni_actions
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    public_configs = [ ":jni_includes_${target_name}" ]
+  }
+}
+
+
+# Declare a target for c-preprocessor-generated java files
+#
+# This target generates java files using the host C pre-processor. Each file in
+# sources will be compiled using the C pre-processor. If include_path is
+# specified, it will be passed (with --I) to the pre-processor.
+#
+# This target will create a single .srcjar. Adding this target to an
+# android_library target's srcjar_deps will make the generated java files be
+# included in that library's final outputs.
+#
+# Variables
+#   sources: list of files to be processed by the C pre-processor. For each
+#     file in sources, there will be one .java file in the final .srcjar. For a
+#     file named FooBar.template, a java file will be created with name
+#     FooBar.java.
+#   inputs: additional compile-time dependencies. Any files
+#     `#include`-ed in the templates should be listed here.
+#   package_name: this will be the subdirectory for each .java file in the
+#     .srcjar.
+#
+# Example
+#   java_cpp_template("foo_generated_enum") {
+#     sources = [
+#       "android/java/templates/Foo.template",
+#     ]
+#     inputs = [
+#       "android/java/templates/native_foo_header.h",
+#     ]
+#
+#     package_name = "org/chromium/base/library_loader"
+#     include_path = "android/java/templates"
+#   }
+template("java_cpp_template") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.sources))
+  package_name = invoker.package_name + ""
+
+  if (defined(invoker.include_path)) {
+    include_path = invoker.include_path + ""
+  } else {
+    include_path = "//"
+  }
+
+  action_foreach("${target_name}__apply_gcc") {
+    script = "//build/android/gyp/gcc_preprocess.py"
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs + []
+    }
+    depfile = "${target_gen_dir}/${target_name}_{{source_name_part}}.d"
+
+    sources = invoker.sources
+
+    gen_dir = "${target_gen_dir}/${target_name}/java_cpp_template/${package_name}"
+    gcc_template_output_pattern = "${gen_dir}/{{source_name_part}}.java"
+
+    outputs = [
+      depfile,
+      gcc_template_output_pattern
+    ]
+
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--include-path", rebase_path(include_path, root_build_dir),
+      "--output", rebase_path(gen_dir, root_build_dir) + "/{{source_name_part}}.java",
+      "--template={{source}}",
+    ]
+
+    if (defined(invoker.defines)) {
+      foreach(def, invoker.defines) {
+        args += ["--defines", def]
+      }
+    }
+  }
+
+  apply_gcc_outputs = get_target_outputs(":${target_name}__apply_gcc")
+  base_gen_dir = get_label_info(":${target_name}__apply_gcc", "target_gen_dir")
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  zip("${target_name}__zip_srcjar") {
+    inputs = apply_gcc_outputs
+    output = srcjar_path
+    base_dir = base_gen_dir
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__zip_srcjar"
+    ]
+  }
+}
+
+# Declare a target for generating Java classes from C++ enums.
+#
+# This target generates Java files from C++ enums using a script.
+#
+# This target will create a single .srcjar. Adding this target to an
+# android_library target's srcjar_deps will make the generated java files be
+# included in that library's final outputs.
+#
+# Variables
+#   sources: list of files to be processed by the script. For each annotated
+#     enum contained in the sources files the script will generate a .java
+#     file with the same name as the name of the enum.
+#
+#   outputs: list of outputs, relative to the output_dir. These paths are
+#     verified at build time by the script. To get the list programatically run:
+#       python build/android/gyp/java_cpp_enum.py --output_dir=. \
+#         --print_output_only path/to/header/file.h
+#
+# Example
+#   java_cpp_enum("foo_generated_enum") {
+#     sources = [
+#       "src/native_foo_header.h",
+#     ]
+#     outputs = [
+#       "org/chromium/FooEnum.java",
+#     ]
+#   }
+template("java_cpp_enum") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.outputs))
+
+  action("${target_name}__generate_enum") {
+    sources = invoker.sources
+    script = "//build/android/gyp/java_cpp_enum.py"
+    gen_dir = "${target_gen_dir}/${target_name}/enums"
+    outputs = get_path_info(
+        rebase_path(invoker.outputs, ".", gen_dir), "abspath")
+
+    args = [
+      "--output_dir", rebase_path(gen_dir, root_build_dir),
+    ]
+    foreach(output, rebase_path(outputs, root_build_dir)) {
+      args += ["--assert_file", output]
+    }
+    args += rebase_path(invoker.sources, root_build_dir)
+  }
+
+  generate_enum_outputs = get_target_outputs(":${target_name}__generate_enum")
+  base_gen_dir = get_label_info(":${target_name}__generate_enum",
+                                "target_gen_dir")
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  zip("${target_name}__zip_srcjar") {
+    inputs = generate_enum_outputs
+    output = srcjar_path
+    base_dir = base_gen_dir
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__zip_srcjar"
+    ]
+  }
+}
+
+
+# Declare an Android resources target
+#
+# This creates a resources zip file that will be used when building an Android
+# library or apk and included into a final apk.
+#
+# To include these resources in a library/apk, this target should be listed in
+# the library's deps. A library/apk will also include any resources used by its
+# own dependencies.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Any Android resources
+#     listed in deps will be included by libraries/apks that depend on this
+#     target.
+#   resource_dirs: List of directories containing resources for this target.
+#   android_manifest: AndroidManifest.xml for this target. Defaults to
+#     //build/android/AndroidManifest.xml.
+#   custom_package: java package for generated .java files.
+#   v14_verify_only: If true, don't generate v14/v17 resources and just verify
+#     that the resources are v14-compliant (see
+#     build/android/gyp/generate_v14_compatible_resources.py). Defaults to
+#     false.
+#
+# Example
+#   android_resources("foo_resources") {
+#     deps = [":foo_strings_grd"]
+#     resource_dirs = ["res"]
+#     custom_package = "org.chromium.foo"
+#   }
+template("android_resources") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.resource_dirs))
+  assert(defined(invoker.android_manifest) || defined(invoker.custom_package))
+
+  base_path = "$target_gen_dir/$target_name"
+  zip_path = base_path + ".resources.zip"
+  srcjar_path = base_path + ".srcjar"
+  build_config = base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    type = "android_resources"
+    resources_zip = zip_path
+    srcjar = srcjar_path
+    if (defined(invoker.deps)) { deps = invoker.deps }
+    if (defined(invoker.android_manifest)) { android_manifest = invoker.android_manifest }
+    if (defined(invoker.custom_package)) { custom_package = invoker.custom_package }
+  }
+
+  android_manifest = "//build/android/AndroidManifest.xml"
+  if (defined(invoker.android_manifest)) {
+    android_manifest = invoker.android_manifest
+  }
+
+  process_resources("${target_name}__process_resources") {
+    resource_dirs = invoker.resource_dirs
+    if (defined(invoker.custom_package)) {
+      custom_package = invoker.custom_package
+    }
+
+    if (defined(invoker.v14_verify_only)) {
+      v14_verify_only = invoker.v14_verify_only
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__process_resources",
+    ]
+  }
+}
+
+
+# Declare a target that generates localized strings.xml from a .grd file.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the strings.xml will be included with that target.
+#
+# Variables
+#   deps: Specifies the dependencies of this target.
+#   grd_file: Path to the .grd file to generate strings.xml from.
+#   outputs: Expected grit outputs (see grit rule).
+#
+# Example
+#  java_strings_grd("foo_strings_grd") {
+#    grd_file = "foo_strings.grd"
+#  }
+template("java_strings_grd") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  base_path = "$target_gen_dir/$target_name"
+  resources_zip = base_path + ".resources.zip"
+  build_config = base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    type = "android_resources"
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  # Put grit files into this subdirectory of target_gen_dir.
+  extra_output_path = target_name + "_grit_output"
+
+  grit_target_name = "${target_name}__grit"
+  grit_output_dir = "$target_gen_dir/$extra_output_path"
+  grit(grit_target_name) {
+    grit_flags = [
+      "-E", "ANDROID_JAVA_TAGGED_ONLY=false",
+    ]
+    output_dir = grit_output_dir
+    resource_ids = ""
+    source = invoker.grd_file
+    outputs = invoker.outputs
+  }
+
+  # This needs to get outputs from grit's internal target, not the final
+  # source_set.
+  generate_strings_outputs = get_target_outputs(":${grit_target_name}_grit")
+
+  zip("${target_name}__zip") {
+    base_dir = grit_output_dir
+    inputs = generate_strings_outputs
+    output = resources_zip
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__zip",
+    ]
+  }
+}
+
+
+# Declare an Android library target
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath. Android resources in dependencies
+#     will be used when building this library.
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   jar_excluded_patterns: List of patterns of .class files to exclude from the
+#     final jar.
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+# Example
+#   android_library("foo_java") {
+#     java_files = [
+#       "android/org/chromium/foo/Foo.java",
+#       "android/org/chromium/foo/FooInterface.java",
+#       "android/org/chromium/foo/FooService.java",
+#     ]
+#     deps = [
+#       ":bar_java"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     jar_excluded_patterns = [
+#       "*/FooService.class", "*/FooService##*.class"
+#     ]
+#   }
+template("android_library") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.java_files) || defined(invoker.DEPRECATED_java_in_dir))
+  _base_path = "$target_gen_dir/$target_name"
+  _build_config = _base_path + ".build_config"
+  _jar_path = _base_path + ".jar"
+  _dex_path = _base_path + ".dex.jar"
+
+  write_build_config("${target_name}__build_config") {
+    type = "android_library"
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    build_config = _build_config
+    jar_path = _jar_path
+    dex_path = _dex_path
+  }
+
+  _chromium_code = true
+  if (defined(invoker.chromium_code)) {
+    _chromium_code = invoker.chromium_code
+  }
+
+  android_java_library(target_name) {
+    chromium_code = _chromium_code
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    } else {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    build_config = _build_config
+    jar_path = _jar_path
+    dex_path = _dex_path
+
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = true
+      proguard_config = invoker.proguard_config
+    }
+
+    jar_excluded_patterns = [
+      "*/R.class", "*/R##*.class",
+      "*/Manifest.class", "*/Manifest##*.class",
+    ]
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns += invoker.jar_excluded_patterns
+    }
+
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+  }
+}
+
+
+# Declare an Android library target for a prebuilt jar
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath. Android resources in dependencies
+#     will be used when building this library.
+#   jar_path: Path to the prebuilt jar.
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+# Example
+#   android_java_prebuilt("foo_java") {
+#     jar_path = "foo.jar"
+#     deps = [
+#       ":foo_resources",
+#       ":bar_java"
+#     ]
+#   }
+template("android_java_prebuilt") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.jar_path))
+  _base_path = "${target_gen_dir}/$target_name"
+  _jar_path = _base_path + ".jar"
+  _dex_path = _base_path + ".dex.jar"
+  _build_config = _base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    type = "android_library"
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    build_config = _build_config
+    jar_path = _jar_path
+    dex_path = _dex_path
+  }
+
+  java_prebuilt("${target_name}__process_jar") {
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = true
+      proguard_config = invoker.proguard_config
+    }
+
+    build_config = _build_config
+    input_jar_path = invoker.jar_path
+    output_jar_path = _jar_path
+  }
+
+  dex("${target_name}__dex") {
+    sources = [_jar_path]
+    output = _dex_path
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__dex",
+    ]
+  }
+}
+
+
+
+# Declare an Android apk target
+#
+# This target creates an Android APK containing java code, resources, assets,
+# and (possibly) native libraries.
+#
+# Variables
+#   android_manifest: Path to AndroidManifest.xml.
+#   datadeps: List of dependencies needed at runtime. These will be built but
+#     won't change the generated .apk in any way (in fact they may be built
+#     after the .apk is).
+#   deps: List of dependencies. All Android java resources and libraries in the
+#     "transitive closure" of these dependencies will be included in the apk.
+#     Note: this "transitive closure" actually only includes such targets if
+#     they are depended on through android_library or android_resources targets
+#     (and so not through builtin targets like 'action', 'group', etc).
+#   java_files: List of .java files to include in the apk.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#      will be added to java_files and be included in this apk.
+#   apk_name: Name for final apk.
+#   final_apk_path: Path to final built apk. Default is
+#     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+#   native_libs: List paths of native libraries to include in this apk. If these
+#     libraries depend on other shared_library targets, those dependencies will
+#     also be included in the apk.
+#   testonly: Marks this target as "test-only".
+#
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+# Example
+#   android_apk("foo_apk") {
+#     android_manifest = "AndroidManifest.xml"
+#     java_files = [
+#       "android/org/chromium/foo/FooApplication.java",
+#       "android/org/chromium/foo/FooActivity.java",
+#     ]
+#     deps = [
+#       ":foo_support_java"
+#       ":foo_resources"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     native_libs = [
+#       native_lib_path
+#     ]
+#   }
+template("android_apk") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  assert(defined(invoker.final_apk_path) || defined(invoker.apk_name))
+  gen_dir = "$target_gen_dir/$target_name"
+  base_path = "$gen_dir/$target_name"
+  build_config = "$base_path.build_config"
+  resources_zip_path = "$base_path.resources.zip"
+  all_resources_zip_path = "$base_path.resources.all.zip"
+  jar_path = "$base_path.jar"
+  final_dex_path = "$gen_dir/classes.dex"
+  _template_name = target_name
+  _final_apk_path = ""
+  if (defined(invoker.final_apk_path)) {
+    _final_apk_path = invoker.final_apk_path
+  } else if (defined(invoker.apk_name)) {
+    _final_apk_path = "$root_build_dir/apks/" + invoker.apk_name + ".apk"
+  }
+  _dist_jar_path_list = process_file_template(
+      [ _final_apk_path ],
+      "$root_build_dir/test.lib.java/{{source_name_part}}.jar"
+      )
+  _dist_jar_path = _dist_jar_path_list[0]
+
+  _native_libs = []
+  if (defined(invoker.native_libs)) {
+    _native_libs = invoker.native_libs
+    _native_libs_dir = base_path + "/libs"
+  }
+
+  _keystore_path = android_default_keystore_path
+  _keystore_name = android_default_keystore_name
+  _keystore_password = android_default_keystore_password
+
+  if (defined(invoker.keystore_path)) {
+    _keystore_path = invoker.keystore_path
+    _keystore_name = invoker.keystore_name
+    _keystore_password = invoker.keystore_password
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps += invoker.srcjar_deps
+  }
+
+  _rebased_build_config = rebase_path(build_config, root_build_dir)
+
+  write_build_config("${_template_name}__build_config") {
+    type = "android_apk"
+    dex_path = final_dex_path
+    resources_zip = resources_zip_path
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    native_libs = _native_libs
+  }
+
+  final_deps = []
+
+  final_deps += [":${_template_name}__process_resources"]
+  process_resources("${_template_name}__process_resources") {
+    srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+    android_manifest = invoker.android_manifest
+    resource_dirs = ["//build/android/ant/empty/res"]
+    zip_path = resources_zip_path
+    generate_constant_ids = true
+  }
+  _srcjar_deps += [":${_template_name}__process_resources"]
+
+  if (_native_libs != []) {
+    _use_chromium_linker = false
+    _enable_chromium_linker_tests = false
+    _load_library_from_apk = false
+    _native_lib_version_name = ""
+
+
+    java_cpp_template("${_template_name}__native_libraries_java") {
+      package_name = "org/chromium/base/library_loader"
+      sources = [
+        "//base/android/java/templates/NativeLibraries.template",
+      ]
+      inputs = [
+        build_config,
+      ]
+
+      defines = [
+        "NATIVE_LIBRARIES_LIST=" +
+          "@FileArg($_rebased_build_config:native:java_libraries_list)",
+        "NATIVE_LIBRARIES_VERSION_NUMBER=\"$_native_lib_version_name\"",
+      ]
+      if (_use_chromium_linker) {
+        defines += ["ENABLED_CHROMIUM_LINKER"]
+      }
+      if (_load_library_from_apk) {
+        defines += ["ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE"]
+      }
+      if (_enable_chromium_linker_tests) {
+        defines += ["ENABLE_CHROMIUM_LINKER_TESTS"]
+      }
+    }
+    _srcjar_deps += [ ":${_template_name}__native_libraries_java" ]
+  }
+
+  final_deps += [ ":${_template_name}__java" ]
+  android_java_library("${_template_name}__java") {
+    android_manifest = invoker.android_manifest
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    } else if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    } else {
+      java_files = []
+    }
+    srcjar_deps = _srcjar_deps
+    dex_path = base_path + ".dex.jar"
+  }
+
+  if (_dist_jar_path != "") {
+    # TODO(cjhopman): This is only ever needed to calculate the list of tests to
+    # run. See build/android/pylib/instrumentation/test_jar.py. We should be
+    # able to just do that calculation at build time instead.
+    action("${_template_name}__create_dist_jar") {
+      script = "//build/android/gyp/create_dist_jar.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [ build_config ]
+      outputs = [
+        depfile,
+        _dist_jar_path,
+      ]
+      args = [
+        "--depfile", rebase_path(depfile, root_build_dir),
+        "--output", rebase_path(_dist_jar_path, root_build_dir),
+        "--inputs=@FileArg($_rebased_build_config:dist_jar:dependency_jars)",
+      ]
+      inputs += [ jar_path ]
+      _rebased_jar_path = rebase_path([ jar_path ], root_build_dir)
+      args += [
+        "--inputs=$_rebased_jar_path",
+      ]
+    }
+  }
+
+  final_deps += [":${_template_name}__final_dex"]
+  dex("${_template_name}__final_dex") {
+    deps = [ ":${_template_name}__java" ]
+    sources = [ jar_path ]
+    inputs = [ build_config ]
+    output = final_dex_path
+    dex_arg_key = "${_rebased_build_config}:apk_dex:dependency_dex_files"
+    args = [ "--inputs=@FileArg($dex_arg_key)" ]
+  }
+
+  if (_native_libs != []) {
+    copy_ex("${_template_name}__prepare_native") {
+      clear_dir = true
+      inputs = [
+        build_config
+      ]
+      dest = "$_native_libs_dir/$android_app_abi"
+      args = [
+        "--files=@FileArg(${_rebased_build_config}:native:libraries)",
+      ]
+      if (is_debug) {
+        rebased_gdbserver = rebase_path(android_gdbserver, root_build_dir)
+        args += [
+          "--files=[\"$rebased_gdbserver\"]"
+        ]
+      }
+    }
+  }
+
+  final_deps += [":${_template_name}__create"]
+  create_apk("${_template_name}__create") {
+    apk_path = _final_apk_path
+    android_manifest = invoker.android_manifest
+    resources_zip = all_resources_zip_path
+    dex_path = final_dex_path
+
+    if (defined(invoker.asset_location)) {
+      asset_location = invoker.asset_location
+    }
+
+    keystore_name = _keystore_name
+    keystore_path = _keystore_path
+    keystore_password = _keystore_password
+
+    if (_native_libs != []) {
+      native_libs_dir = _native_libs_dir
+      deps = [":${_template_name}__prepare_native"]
+    }
+  }
+
+  group(target_name) {
+    deps = final_deps
+    if (defined(invoker.datadeps)) {
+      # TODO(cjhopman): Fix this when group datadeps works.
+      deps += invoker.datadeps
+    }
+  }
+}
+
+
+# Declare an Android gtest apk
+#
+# This target creates an Android apk for running gtest-based unittests.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. These will be passed to
+#     the underlying android_apk invocation and should include the java and
+#     resource dependencies of the apk.
+#   unittests_dep: This should be the label of the gtest native target. This
+#     target must be defined previously in the same file.
+#   unittests_binary: The name of the binary produced by the unittests_dep
+#     target, relative to the root build directory. If unspecified, it assumes
+#     the name of the unittests_dep target (which will be correct unless that
+#     target specifies an "output_name".
+#     TODO(brettw) make this automatic by allowing get_target_outputs to
+#     support executables.
+#
+# Example
+#   unittest_apk("foo_unittests_apk") {
+#     deps = [ ":foo_java", ":foo_resources" ]
+#     unittests_dep = ":foo_unittests"
+#   }
+template("unittest_apk") {
+  testonly = true
+
+  assert(defined(invoker.unittests_dep), "Need unittests_dep for $target_name")
+
+  test_suite_name = get_label_info(invoker.unittests_dep, "name")
+
+  if (defined(invoker.unittests_binary)) {
+    unittests_binary = root_out_dir + "/" + invoker.unittests_binary
+  } else {
+    unittests_binary = root_out_dir + "/lib.stripped/lib" + test_suite_name + ".so"
+  }
+
+  android_apk(target_name) {
+    _apk_name = test_suite_name
+    final_apk_path = "$root_build_dir/${_apk_name}_apk/${_apk_name}-debug.apk"
+    java_files = [
+      "//testing/android/java/src/org/chromium/native_test/ChromeNativeTestActivity.java"
+    ]
+    android_manifest = "//testing/android/java/AndroidManifest.xml"
+    unittests_outputs = [ unittests_binary ]
+    native_libs = [unittests_outputs[0]]
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    datadeps = [
+      "//tools/android/md5sum",
+    ]
+  }
+}
+
+# Generate .java files from .aidl files.
+#
+# This target will store the .java files in a srcjar and should be included in
+# an android_library or android_apk's srcjar_deps.
+#
+# Variables
+#   sources: Paths to .aidl files to compile.
+#   import_include: Path to directory containing .java files imported by the
+#     .aidl files.
+#   interface_file: Preprocessed aidl file to import.
+#
+# Example
+#   android_aidl("foo_aidl") {
+#     import_include = "java/src"
+#     sources = [
+#       "java/src/com/foo/bar/FooBarService.aidl",
+#       "java/src/com/foo/bar/FooBarServiceCallback.aidl",
+#     ]
+#   }
+template("android_aidl") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  aidl_path = "${android_sdk_build_tools}/aidl"
+  framework_aidl = "$android_sdk/framework.aidl"
+
+  action(target_name) {
+    script = "//build/android/gyp/aidl.py"
+    sources = invoker.sources
+
+    imports = [ framework_aidl ]
+    if (defined(invoker.interface_file)) {
+      assert(invoker.interface_file != "")
+      imports += [ invoker.interface_file ]
+    }
+
+    inputs = [
+      aidl_path,
+    ] + imports
+
+    depfile = "${target_gen_dir}/${target_name}.d"
+    outputs = [
+      depfile,
+      srcjar_path
+    ]
+    rebased_imports = rebase_path(imports, root_build_dir)
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--aidl-path", rebase_path(aidl_path, root_build_dir),
+      "--imports=$rebased_imports",
+      "--srcjar", rebase_path(srcjar_path, root_build_dir),
+    ]
+    if (defined(invoker.import_include) && invoker.import_include != "") {
+      # TODO(cjhopman): aidl supports creating a depfile. We should be able to
+      # switch to constructing a depfile for the overall action from that
+      # instead of having all the .java files in the include paths as inputs.
+      rebased_import_includes = rebase_path(
+          [invoker.import_include], root_build_dir)
+      args += [ "--includes=$rebased_import_includes" ]
+
+      _java_files_build_rel = exec_script(
+          "//build/android/gyp/find.py",
+          rebase_path([invoker.import_include], root_build_dir),
+          "list lines"
+          )
+      _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
+      inputs += _java_files
+    }
+    args += rebase_path(sources, root_build_dir)
+  }
+}
+
+# Creates a dist directory for a native executable.
+#
+# Running a native executable on a device requires all the shared library
+# dependencies of that executable. To make it easier to install and run such an
+# executable, this will create a directory containing the native exe and all
+# it's library dependencies.
+#
+# Note: It's usually better to package things as an APK than as a native
+# executable.
+#
+# Variables
+#   dist_dir: Directory for the exe and libraries. Everything in this directory
+#     will be deleted before copying in the exe and libraries.
+#   binary: Path to (stripped) executable.
+#
+# Example
+#   create_native_executable_dist("foo_dist") {
+#     dist_dir = "$root_build_dir/foo_dist"
+#     binary = "$root_build_dir/exe.stripped/foo"
+#   }
+template("create_native_executable_dist") {
+  if (defined(invoker.testonly)) { testonly = invoker.testonly }
+
+  dist_dir = invoker.dist_dir
+  binary = invoker.binary
+  final_deps = []
+  template_name = target_name
+
+  libraries_list = "${target_gen_dir}/${template_name}_library_dependencies.list"
+
+  # TODO(gyp)
+  #'dependencies': [
+  #'<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+  #],
+
+  stripped_libraries_dir = "$root_build_dir/lib.stripped"
+  final_deps += [ ":${template_name}__find_library_dependencies" ]
+  action("${template_name}__find_library_dependencies") {
+    script = "//build/android/gyp/write_ordered_libraries.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = [
+      binary,
+      android_readelf,
+    ]
+    outputs = [
+      depfile,
+      libraries_list,
+    ]
+    rebased_binaries = rebase_path([ binary ], root_build_dir)
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--input-libraries=$rebased_binaries",
+      "--libraries-dir", rebase_path(stripped_libraries_dir, root_build_dir),
+      "--output", rebase_path(libraries_list, root_build_dir),
+      "--readelf", rebase_path(android_readelf, root_build_dir),
+    ]
+  }
+
+  final_deps += [ ":${template_name}__copy_libraries_and_exe" ]
+  copy_ex("${template_name}__copy_libraries_and_exe") {
+    clear_dir = true
+    inputs = [
+      binary,
+      libraries_list
+    ]
+    dest = dist_dir
+    rebased_binaries_list = rebase_path([ binary ], root_build_dir)
+    rebased_libraries_list = rebase_path(libraries_list, root_build_dir)
+    args = [
+      "--files=$rebased_binaries_list",
+      "--files=@FileArg($rebased_libraries_list:libraries)",
+    ]
+  }
+
+  group(target_name) {
+    deps = final_deps
+  }
+}
+
+
+# Compile a protocol buffer to java.
+#
+# This generates java files from protocol buffers and creates an Android library
+# containing the classes.
+#
+# Variables
+#   sources: Paths to .proto files to compile.
+#   proto_path: Root directory of .proto files.
+#
+# Example:
+#  proto_java_library("foo_proto_java") {
+#    proto_path = [ "src/foo" ]
+#    sources = [ "$proto_path/foo.proto" ]
+#  }
+template("proto_java_library") {
+  _protoc_dep = "//third_party/android_protobuf:android_protoc($host_toolchain)"
+  _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
+  _protoc_bin = "$_protoc_out_dir/android_protoc"
+  _proto_path = invoker.proto_path
+
+  _template_name = target_name
+
+  action("${_template_name}__protoc_java") {
+    srcjar_path = "$target_gen_dir/$target_name.srcjar"
+    script = "//build/protoc_java.py"
+    deps = [
+      _protoc_dep
+    ]
+    sources = invoker.sources
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      srcjar_path,
+    ]
+    args = [
+      "--depfile", rebase_path(depfile, root_build_dir),
+      "--protoc", rebase_path(_protoc_bin, root_build_dir),
+      "--proto-path", rebase_path(_proto_path, root_build_dir),
+      "--srcjar", rebase_path(srcjar_path, root_build_dir),
+    ] + rebase_path(sources, root_build_dir)
+  }
+
+  android_library(target_name) {
+    java_files = []
+    srcjar_deps = [ ":${_template_name}__protoc_java" ]
+    deps = [
+      "//third_party/android_protobuf:protobuf_nano_javalib",
+    ]
+  }
+}
diff --git a/build/config/arm.gni b/build/config/arm.gni
new file mode 100644
index 0000000..59de668
--- /dev/null
+++ b/build/config/arm.gni
@@ -0,0 +1,66 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (cpu_arch == "arm") {
+  declare_args() {
+    # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+    # platforms.
+    arm_version = 7
+
+    # The ARM floating point mode. This is either the string "hard", "soft", or
+    # "softfp". An empty string means to use the default one for the
+    # arm_version.
+    arm_float_abi = ""
+
+    # The ARM variant-specific tuning mode. This will be a string like "armv6"
+    # or "cortex-a15". An empty string means to use the default for the
+    # arm_version.
+    arm_tune = ""
+
+    # Whether to use the neon FPU instruction set or not.
+    arm_use_neon = true
+  }
+
+  assert(arm_float_abi == "" ||
+         arm_float_abi == "hard" ||
+         arm_float_abi == "soft" ||
+         arm_float_abi == "softfp")
+
+  if (is_android) {
+    arm_use_neon = false
+  }
+  arm_optionally_use_neon = true
+
+  if (arm_version == 6) {
+    arm_arch = "armv6"
+    if (arm_tune != "") {
+      arm_tune = ""
+    }
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+    arm_fpu = "vfp"
+    # Thumb is a reduced instruction set available on some ARM processors that
+    # has increased code density.
+    arm_use_thumb = false
+
+  } else if (arm_version == 7) {
+    arm_arch = "armv7-a"
+    if (arm_tune == "") {
+      arm_tune = "generic-armv7-a"
+    }
+
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+
+    arm_use_thumb = true
+
+    if (arm_use_neon) {
+      arm_fpu = "neon"
+    } else {
+      arm_fpu = "vfpv3-d16"
+    }
+  }
+}
diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn
new file mode 100644
index 0000000..00fb9e0
--- /dev/null
+++ b/build/config/clang/BUILD.gn
@@ -0,0 +1,40 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+  if (clang_use_chrome_plugins) {
+    cflags = [
+      "-Xclang", "-load",
+      "-Xclang",
+    ]
+
+    if (is_mac || is_ios) {
+      cflags += [ rebase_path(
+        "//third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.dylib",
+        root_build_dir) ]
+    } else if (is_linux) {
+      cflags += [ rebase_path(
+        "//third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.so",
+        root_build_dir) ]
+    }
+
+    cflags += [
+      "-Xclang", "-add-plugin",
+      "-Xclang", "find-bad-constructs",
+    ]
+  }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+  cflags = [
+    "-Wheader-hygiene",
+
+    # Warns when a const char[] is converted to bool.
+    "-Wstring-conversion",
+  ]
+}
diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni
new file mode 100644
index 0000000..3b2d76f
--- /dev/null
+++ b/build/config/clang/clang.gni
@@ -0,0 +1,9 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Indicates if the build should use the Chrome-specific plugins for enforcing
+  # coding guidelines, etc. Only used when compiling with Clang.
+  clang_use_chrome_plugins = is_clang
+}
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
new file mode 100644
index 0000000..42c254c
--- /dev/null
+++ b/build/config/compiler/BUILD.gn
@@ -0,0 +1,946 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+if (cpu_arch == "arm") {
+  import("//build/config/arm.gni")
+}
+if (is_posix) {
+  import("//build/config/gcc/gcc_version.gni")
+}
+
+declare_args() {
+  # Normally, Android builds are lightly optimized, even for debug builds, to
+  # keep binary size down. Setting this flag to true disables such optimization
+  android_full_debug = false
+
+  # Whether to use the binary binutils checked into third_party/binutils.
+  # These are not multi-arch so cannot be used except on x86 and x86-64 (the
+  # only two architectures that are currently checked in). Turn this off when
+  # you are using a custom toolchain and need to control -B in cflags.
+  linux_use_bundled_binutils = is_linux && cpu_arch == "x64"
+}
+
+use_gold = is_linux && cpu_arch == "x64"
+
+if (!is_win) {
+  # linux_use_debug_fission: whether to use split DWARF debug info
+  # files. This can reduce link time significantly, but is incompatible
+  # with some utilities such as icecc and ccache. Requires gold and
+  # gcc >= 4.8 or clang.
+  # http://gcc.gnu.org/wiki/DebugFission
+  use_debug_fission = use_gold && linux_use_bundled_binutils
+}
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+  include_dirs = [
+    "//",
+    root_gen_dir,
+  ]
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discusison about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  ldflags = []
+  defines = []
+
+  # In general, Windows is totally different, but all the other builds share
+  # some common GCC configuration. This section sets up Windows and the common
+  # GCC flags, and then we handle the other non-Windows platforms specifically
+  # below.
+  if (is_win) {
+    # Windows compiler flags setup.
+    # -----------------------------
+    cflags += [
+      "/Gy",  # Enable function-level linking.
+      "/GS",  # Enable buffer security checking.
+      "/FS",  # Preserve previous PDB behavior.
+    ]
+    if (is_component_build) {
+      cflags += [
+        "/EHsc",  # Assume C functions can't throw exceptions and don't catch
+                  # structured exceptions (only C++ ones).
+      ]
+    }
+  } else {
+    # Common GCC compiler flags setup.
+    # --------------------------------
+    cflags += [
+      "-fno-strict-aliasing",  # See http://crbug.com/32204
+    ]
+    cflags_cc += [
+      "-fno-threadsafe-statics",
+      # Not exporting C++ inline functions can generally be applied anywhere
+      # so we do so here. Normal function visibility is controlled by
+      # //build/config/gcc:symbol_visibility_hidden.
+      "-fvisibility-inlines-hidden",
+    ]
+
+    # Stack protection.
+    if (is_mac) {
+      cflags += [ "-fstack-protector-all" ]
+    } else if (is_linux) {
+      cflags += [ "-fstack-protector", "--param=ssp-buffer-size=4" ]
+    }
+
+    # Linker warnings.
+    if (!(is_chromeos && cpu_arch == "arm") && !is_mac) {
+      # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+      ldflags += [ "-Wl,--fatal-warnings" ]
+    }
+  }
+
+  if (is_clang && is_debug) {
+    # Allow comparing the address of references and 'this' against 0
+    # in debug builds. Technically, these can never be null in
+    # well-defined C/C++ and Clang can optimize such checks away in
+    # release builds, but they may be used in asserts in debug builds.
+    cflags_cc += [
+      "-Wno-undefined-bool-conversion",
+      "-Wno-tautological-undefined-compare",
+    ]
+  }
+
+  if (is_clang && !is_win) {
+    # This is here so that all files get recompiled after a clang roll and
+    # when turning clang on or off. (defines are passed via the command line,
+    # and build system rebuild things when their commandline changes). Nothing
+    # should ever read this define.
+    defines += [
+      "CR_CLANG_REVISION=" +
+          exec_script(
+              "//tools/clang/scripts/posix-print-revision.py", [], "value")
+    ]
+  }
+
+  # Mac-specific compiler flags setup.
+  # ----------------------------------
+  if (is_mac || is_ios) {
+    # These flags are shared between the C compiler and linker.
+    common_mac_flags = []
+
+    # CPU architecture.
+    if (cpu_arch == "x64") {
+      common_mac_flags += [ "-arch", "x86_64" ]
+    } else if (cpu_arch == "x86") {
+      common_mac_flags += [ "-arch", "i386" ]
+    }
+
+    cflags += common_mac_flags
+
+    # Without this, the constructors and destructors of a C++ object inside
+    # an Objective C struct won't be called, which is very bad.
+    cflags_objcc = [ "-fobjc-call-cxx-cdtors", ]
+
+    cflags_c += [ "-std=c99" ]
+    cflags_cc += [ "-std=gnu++11" ]
+
+    ldflags += common_mac_flags
+  } else if (is_posix) {
+    # Non-Mac Posix compiler flags setup.
+    # -----------------------------------
+    if (gcc_version >= 48) {
+      cflags_cc += [
+        "-std=gnu++11",
+      ]
+    }
+
+    # CPU architecture. We may or may not be doing a cross compile now, so for
+    # simplicity we always explicitly set the architecture.
+    if (cpu_arch == "x64") {
+      cflags += [ "-m64", "-march=x86-64", ]
+      ldflags += [ "-m64" ]
+    } else if (cpu_arch == "x86") {
+      cflags += [ "-m32" ]
+      ldflags += [ "-m32" ]
+    } else if (cpu_arch == "arm") {
+      # Don't set the compiler flags for the WebView build. These will come
+      # from the Android build system.
+      if (!is_android_webview_build) {
+        cflags += [
+          "-march=$arm_arch",
+          "-mfloat-abi=$arm_float_abi",
+        ]
+        if (arm_tune != "") {
+          cflags += [ "-mtune=$arm_tune" ]
+        }
+        if (arm_use_thumb) {
+          cflags += [ "-mthumb" ]
+          if (is_android && !is_clang) {  # Clang doesn't support this option.
+            cflags += [ "-mthumb-interwork" ]
+          }
+        }
+        if (!is_clang) {
+          # Clang doesn't support these flags.
+          cflags += [
+            # The tree-sra optimization (scalar replacement for
+            # aggregates enabling subsequent optimizations) leads to
+            # invalid code generation when using the Android NDK's
+            # compiler (r5-r7). This can be verified using
+            # webkit_unit_tests' WTF.Checked_int8_t test.
+            "-fno-tree-sra",
+            # The following option is disabled to improve binary
+            # size and performance in gcc 4.9.
+            "-fno-caller-saves",
+          ]
+        }
+      }
+    }
+
+    defines += [ "_FILE_OFFSET_BITS=64" ]
+
+    # Omit unwind support in official builds to save space. We can use breakpad
+    # for these builds.
+    if (is_chrome_branded && is_official_build) {
+      cflags += [
+        "-fno-unwind-tables",
+        "-fno-asynchronous-unwind-tables",
+      ]
+    } else {
+      cflags += [ "-funwind-tables" ]
+    }
+  }
+
+  # Linux/Android common flags setup.
+  # ---------------------------------
+  if (is_linux || is_android) {
+    cflags += [
+      "-fPIC",
+      "-pipe",  # Use pipes for communicating between sub-processes. Faster.
+    ]
+
+    ldflags += [
+      "-fPIC",
+      "-Wl,-z,noexecstack",
+      "-Wl,-z,now",
+      "-Wl,-z,relro",
+    ]
+  }
+
+  # Linux-specific compiler flags setup.
+  # ------------------------------------
+  if (is_linux) {
+    cflags += [ "-pthread" ]
+    ldflags += [
+      "-pthread",
+    ]
+  }
+  if (use_gold) {
+    # Use gold for linking on 64-bit Linux only (on 32-bit it runs out of
+    # address space, and it doesn't support cross-compiling).
+    gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                            root_build_dir)
+    ldflags += [
+      "-B$gold_path",
+
+      # Newer gccs and clangs support -fuse-ld, use the flag to force gold
+      # selection.
+      # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
+      "-fuse-ld=gold",
+
+      # There seems to be a conflict of --icf and -pie in gold which can
+      # generate crashy binaries. As a security measure, -pie takes
+      # precedence for now.
+      # TODO(brettw) common.gypi has this only for target toolset.
+      #"-Wl,--icf=safe",
+      "-Wl,--icf=none",
+
+      # Experimentation found that using four linking threads
+      # saved ~20% of link time.
+      # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+      # Only apply this to the target linker, since the host
+      # linker might not be gold, but isn't used much anyway.
+      # TODO(raymes): Disable threading because gold is frequently
+      # crashing on the bots: crbug.com/161942.
+      #"-Wl,--threads",
+      #"-Wl,--thread-count=4",
+    ]
+  }
+
+  if (linux_use_bundled_binutils) {
+    binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                                root_build_dir)
+    cflags += [ "-B$binutils_path" ]
+  }
+
+  # Clang-specific compiler flags setup.
+  # ------------------------------------
+  if (is_clang) {
+    cflags += [
+      "-fcolor-diagnostics",
+    ]
+    cflags_cc += [
+      "-std=gnu++11",
+    ]
+  }
+
+  # Android-specific flags setup.
+  # -----------------------------
+  if (is_android) {
+    cflags += [
+      "-ffunction-sections",
+      "-funwind-tables",
+      "-fno-short-enums",
+    ]
+    if (!is_clang) {
+      # Clang doesn't support these flags.
+      cflags += [
+        "-finline-limit=64",
+      ]
+    }
+    if (is_android_webview_build) {
+      # Android predefines this as 1; undefine it here so Chromium can redefine
+      # it later to be 2 for chromium code and unset for third party code. This
+      # works because cflags are added before defines.
+      # TODO(brettw) the above comment seems incorrect. We specify defines
+      # before cflags on our compiler command lines.
+      cflags += [ "-U_FORTIFY_SOURCE" ]
+    }
+
+    if (is_asan) {
+      # Android build relies on -Wl,--gc-sections removing unreachable code.
+      # ASan instrumentation for globals inhibits this and results in a library
+      # with unresolvable relocations.
+      # TODO(eugenis): find a way to reenable this.
+      cflags += [ "-mllvm -asan-globals=0" ]
+    }
+
+    defines += [ "ANDROID" ]
+    if (!is_android_webview_build) {
+      # The NDK has these things, but doesn't define the constants
+      # to say that it does. Define them here instead.
+      defines += [ "HAVE_SYS_UIO_H" ]
+    }
+
+    # Use gold for Android for most CPU architectures.
+    if (cpu_arch == "x86" || cpu_arch == "x64" || cpu_arch == "arm") {
+      ldflags += [ "-fuse-ld=gold" ]
+      if (is_clang) {
+        # Let clang find the ld.gold in the NDK.
+        ldflags += [ "--gcc-toolchain=" + rebase_path(android_toolchain_root,
+                                                      root_build_dir) ]
+      }
+    }
+
+    ldflags += [
+      "-Wl,--no-undefined",
+      # Don't export symbols from statically linked libraries.
+      "-Wl,--exclude-libs=ALL",
+    ]
+    if (cpu_arch == "arm") {
+      ldflags += [
+        # Enable identical code folding to reduce size.
+        "-Wl,--icf=safe",
+      ]
+    }
+
+    if (is_clang) {
+      if (cpu_arch == "arm") {
+        cflags += [
+          "-target arm-linux-androideabi",
+        ]
+        ldflags += [ "-target arm-linux-androideabi" ]
+      } else if (cpu_arch == "x86") {
+        cflags += [ "-target x86-linux-androideabi" ]
+        ldflags += [ "-target x86-linux-androideabi" ]
+      }
+    }
+  }
+}
+
+config("compiler_arm_fpu") {
+  if (cpu_arch == "arm" && !is_android_webview_build) {
+    cflags = [
+      "-mfpu=$arm_fpu",
+    ]
+  }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+  cflags = []
+  defines = []
+  ldflags = []
+  lib_dirs = []
+  libs = []
+
+  if (is_component_build) {
+    # Component mode: dynamic CRT.
+    defines += [ "COMPONENT_BUILD" ]
+    if (is_win) {
+      # Since the library is shared, it requires exceptions or will give errors
+      # about things not matching, so keep exceptions on.
+      if (is_debug) {
+        cflags += [ "/MDd" ]
+      } else {
+        cflags += [ "/MD" ]
+      }
+    }
+  } else {
+    # Static CRT.
+    if (is_win) {
+      # We don't use exceptions, and when we link statically we can just get
+      # rid of them entirely.
+      defines += [ "_HAS_EXCEPTIONS=0" ]
+      if (is_debug) {
+        cflags += [ "/MTd" ]
+      } else {
+        cflags += [ "/MT" ]
+      }
+    }
+  }
+
+  if (is_win) {
+    defines += [
+      "__STD_C",
+      "__STDC_CONSTANT_MACROS",
+      "__STDC_FORMAT_MACROS",
+      "_CRT_RAND_S",
+      "_CRT_SECURE_NO_DEPRECATE",
+      "_SCL_SECURE_NO_DEPRECATE",
+    ]
+  }
+
+  # Stlport setup. Android uses a different (smaller) version of the STL.
+  if (is_android) {
+    if (is_clang) {
+      # Work around incompatibilities between bionic and clang headers.
+      defines += [
+        "__compiler_offsetof=__builtin_offsetof",
+        "nan=__builtin_nan",
+      ]
+    }
+
+    defines += [
+      "USE_STLPORT=1",
+      "_STLP_USE_PTR_SPECIALIZATIONS=1",
+      "__GNU_SOURCE=1",  # Necessary for clone().
+    ]
+
+    ldflags += [
+      "-Wl,--warn-shared-textrel",
+      "-nostdlib",
+    ]
+
+    # NOTE: The stlport header include paths below are specified in cflags
+    # rather than include_dirs because they need to come after include_dirs.
+    # Think of them like system headers, but don't use '-isystem' because the
+    # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
+    # strange errors. The include ordering here is important; change with
+    # caution.
+    if (use_system_stlport) {
+      cflags += [
+        # For libstdc++/include, which is used by stlport.
+        "-I" + rebase_path("$android_src/bionic", root_build_dir),
+        "-I" + rebase_path("$android_src/external/stlport/stlport",
+                           root_build_dir),
+      ]
+      libs += [
+        "stlport",
+      ]
+    } else {
+      android_stlport_root = "$android_ndk_root/sources/cxx-stl/stlport"
+
+      cflags += [
+        "-isystem" + rebase_path("$android_stlport_root/stlport",
+                                 root_build_dir)
+      ]
+      lib_dirs += [ "$android_stlport_root/libs/$android_app_abi" ]
+
+      if (component_mode == "shared_library") {
+        libs += [ "stlport_shared" ]
+      } else {
+        libs += [ "stlport_static" ]
+      }
+    }
+
+    if (cpu_arch == "mipsel") {
+      libs += [
+        # ld linker is used for mips Android, and ld does not accept library
+        # absolute path prefixed by "-l"; Since libgcc does not exist in mips
+        # sysroot the proper library will be linked.
+        # TODO(gordanac): Remove once gold linker is used for mips Android.
+        "gcc",
+      ]
+    } else {
+      libs += [
+        # Manually link the libgcc.a that the cross compiler uses. This is
+        # absolute because the linker will look inside the sysroot if it's not.
+        rebase_path(android_libgcc_file),
+      ]
+    }
+
+    libs += [
+      "c",
+      "dl",
+      "m",
+    ]
+
+  }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+  if (is_win) {
+    cflags = [
+      "/W4",  # Warning level 4.
+    ]
+  } else {
+    cflags = [
+      "-Wall",
+      "-Wextra",
+
+      # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+      # so we specify it explicitly.
+      # TODO(fischman): remove this if http://llvm.org/PR10448 obsoletes it.
+      # http://code.google.com/p/chromium/issues/detail?id=90453
+      "-Wsign-compare",
+    ]
+
+    # In Chromium code, we define __STDC_foo_MACROS in order to get the
+    # C99 macros on Mac and Linux.
+    defines = [
+      "__STDC_CONSTANT_MACROS",
+      "__STDC_FORMAT_MACROS",
+    ]
+  }
+}
+config("no_chromium_code") {
+  cflags = []
+  cflags_cc = []
+  defines = []
+
+  if (is_win) {
+    cflags += [
+      "/W3",  # Warning level 3.
+      "/wd4800",  # Disable warning when forcing value to bool.
+    ]
+    defines += [
+      "_CRT_NONSTDC_NO_WARNINGS",
+      "_CRT_NONSTDC_NO_DEPRECATE",
+    ]
+  }
+
+  if (is_linux) {
+    # Don't warn about ignoring the return value from e.g. close(). This is
+    # off by default in some gccs but on by default in others. BSD systems do
+    # not support this option, since they are usually using gcc 4.2.1, which
+    # does not have this flag yet.
+    cflags += [ "-Wno-unused-result" ]
+  }
+
+  if (is_linux || is_android) {
+    cflags += [
+      # Don't warn about printf format problems. This is off by default in gcc
+      # but on in Ubuntu's gcc(!).
+      "-Wno-format",
+    ]
+    cflags_cc += [
+      # Don't warn about hash_map in third-party code.
+      "-Wno-deprecated",
+    ]
+  }
+
+  if (is_android_webview_build) {
+    # There is a class of warning which:
+    #  1) Android always enables and also treats as errors
+    #  2) Chromium ignores in third party code
+    # So we re-enable those warnings when building Android.
+    cflags += [
+      "-Wno-address",
+      "-Wno-format-security",
+      "-Wno-return-type",
+      "-Wno-sequence-point",
+    ]
+    cflags_cc += [ "-Wno-non-virtual-dtor" ]
+  }
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR" ]
+  }
+}
+config("no_rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR-" ]
+  } else {
+    cflags_cc = [ "-fno-rtti" ]
+  }
+}
+
+# Warnings ---------------------------------------------------------------------
+#
+# This is where we disable various warnings that we've decided aren't
+# worthwhile, and enable special warnings.
+
+config("default_warnings") {
+  if (is_win) {
+    cflags = [
+      "/WX",      # Treat warnings as errors.
+
+      # Warnings permanently disabled:
+
+      # TODO(GYP) The GYP build doesn't have this globally enabled but disabled
+      # for a bunch of individual targets. Re-enable this globally when those
+      # targets are fixed.
+      "/wd4018",  # Comparing signed and unsigned values.
+
+      # C4127: conditional expression is constant
+      # This warning can in theory catch dead code and other problems, but
+      # triggers in far too many desirable cases where the conditional
+      # expression is either set by macros or corresponds some legitimate
+      # compile-time constant expression (due to constant template args,
+      # conditionals comparing the sizes of different types, etc.).  Some of
+      # these can be worked around, but it's not worth it.
+      "/wd4127",
+
+      # C4251: 'identifier' : class 'type' needs to have dll-interface to be
+      #        used by clients of class 'type2'
+      # This is necessary for the shared library build.
+      "/wd4251",
+
+      # C4351: new behavior: elements of array 'array' will be default
+      #        initialized
+      # This is a silly "warning" that basically just alerts you that the
+      # compiler is going to actually follow the language spec like it's
+      # supposed to, instead of not following it like old buggy versions did.
+      # There's absolutely no reason to turn this on.
+      "/wd4351",
+
+      # C4355: 'this': used in base member initializer list
+      # It's commonly useful to pass |this| to objects in a class' initializer
+      # list.  While this warning can catch real bugs, most of the time the
+      # constructors in question don't attempt to call methods on the passed-in
+      # pointer (until later), and annotating every legit usage of this is
+      # simply more hassle than the warning is worth.
+      "/wd4355",
+
+      # C4503: 'identifier': decorated name length exceeded, name was
+      #        truncated
+      # This only means that some long error messages might have truncated
+      # identifiers in the presence of lots of templates.  It has no effect on
+      # program correctness and there's no real reason to waste time trying to
+      # prevent it.
+      "/wd4503",
+
+      # C4611: interaction between 'function' and C++ object destruction is
+      #        non-portable
+      # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+      # suggests using exceptions instead of setjmp/longjmp for C++, but
+      # Chromium code compiles without exception support.  We therefore have to
+      # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
+      # have to turn off this warning (and be careful about how object
+      # destruction happens in such cases).
+      "/wd4611",
+
+
+      # Warnings to evaluate and possibly fix/reenable later:
+
+      "/wd4100",  # Unreferenced formal function parameter.
+      "/wd4189",  # A variable was declared and initialized but never used.
+      "/wd4244",  # Conversion: possible loss of data.
+      "/wd4481",  # Nonstandard extension: override specifier.
+      "/wd4505",  # Unreferenced local function has been removed.
+      "/wd4510",  # Default constructor could not be generated.
+      "/wd4512",  # Assignment operator could not be generated.
+      "/wd4610",  # Class can never be instantiated, constructor required.
+    ]
+  } else {
+    # Common GCC warning setup.
+    cflags = [
+      # Enables.
+      "-Wendif-labels",  # Weird old-style text after an #endif.
+      "-Werror",  # Warnings as errors.
+
+      # Disables.
+      "-Wno-missing-field-initializers",  # "struct foo f = {0};"
+      "-Wno-unused-parameter",  # Unused function parameters.
+    ]
+    cflags_cc = []
+
+    if (is_mac) {
+      cflags += [
+        "-Wnewline-eof",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # This warns on using ints as initializers for floats in
+        # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+        # which happens in several places in chrome code. Not sure if
+        # this is worth fixing.
+        "-Wno-c++11-narrowing",
+
+        # Don't die on dtoa code that uses a char as an array index.
+        # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+        # TODO(brettw) move this to that project then!
+        "-Wno-char-subscripts",
+
+        # Warns on switches on enums that cover all enum values but
+        # also contain a default: branch. Chrome is full of that.
+        "-Wno-covered-switch-default",
+
+        # Clang considers the `register` keyword as deprecated, but e.g.
+        # code generated by flex (used in angle) contains that keyword.
+        # http://crbug.com/255186
+        "-Wno-deprecated-register",
+
+        # TODO(thakis): This used to be implied by -Wno-unused-function,
+        # which we no longer use. Check if it makes sense to remove
+        # this as well. http://crbug.com/316352
+        "-Wno-unneeded-internal-declaration",
+
+        # TODO(thakis): Remove, http://crbug.com/263960
+        "-Wno-reserved-user-defined-literal",
+
+        # TODO(hans): Clean this up. Or disable with finer granularity.
+        "-Wno-unused-local-typedef",
+      ]
+    }
+    if (gcc_version >= 48) {
+      cflags_cc += [
+        # See comment for -Wno-c++11-narrowing.
+        "-Wno-narrowing",
+        # TODO(thakis): Remove, http://crbug.com/263960
+        "-Wno-literal-suffix",
+      ]
+    }
+
+    # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+    # warning).
+    if (cpu_arch == "arm" && !is_clang) {
+      cflags += [ "-Wno-psabi" ]
+    }
+
+    if (is_android) {
+      # Disable any additional warnings enabled by the Android build system but
+      # which chromium does not build cleanly with (when treating warning as
+      # errors).
+      cflags += [
+        "-Wno-extra",
+        "-Wno-ignored-qualifiers",
+        "-Wno-type-limits",
+      ]
+      cflags_cc += [
+        # Disabling c++0x-compat should be handled in WebKit, but
+        # this currently doesn't work because gcc_version is not set
+        # correctly when building with the Android build system.
+        # TODO(torne): Fix this in WebKit.
+        "-Wno-error=c++0x-compat",
+        # Other things unrelated to -Wextra:
+        "-Wno-non-virtual-dtor",
+        "-Wno-sign-promo",
+      ]
+    }
+
+    if (gcc_version >= 48) {
+      # Don't warn about the "typedef 'foo' locally defined but not used"
+      # for gcc 4.8.
+      # TODO: remove this flag once all builds work. See crbug.com/227506
+      cflags += [
+        "-Wno-unused-local-typedefs",
+      ]
+    }
+  }
+}
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+config("wexit_time_destructors") {
+  if (is_clang) {
+    cflags = [ "-Wexit-time-destructors" ]
+  }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# Note that BUILDCONFIG.gn sets up a variable "default_optimization_config"
+# which it will assign to the config it implicitly applies to every target. If
+# you want to override the optimization level for your target, remove this
+# config (which will expand differently for debug or release builds), and then
+# add back the one you want to override it with:
+#
+#   configs -= default_optimization_config
+#   configs += [ "//build/config/compiler/optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+if (is_win) {
+  common_optimize_on_cflags = [
+    "/O2",
+    "/Ob2",  # both explicit and auto inlining.
+    "/Oy-",  # disable omitting frame pointers, must be after /o2.
+    "/Os",   # favor size over speed.
+  ]
+  common_optimize_on_ldflags = []
+} else {
+  common_optimize_on_cflags = [
+    # Don't emit the GCC version ident directives, they just end up in the
+    # .comment section taking up binary size.
+    "-fno-ident",
+    # Put data and code in their own sections, so that unused symbols
+    # can be removed at link time with --gc-sections.
+    "-fdata-sections",
+    "-ffunction-sections",
+  ]
+  common_optimize_on_ldflags = []
+
+  if (is_android) {
+    common_optimize_on_cflags += [
+      "-fomit-frame-pointer",
+    ]
+    common_optimize_on_ldflags += [
+      # Warn in case of text relocations.
+      "-Wl,--warn-shared-textrel",
+    ]
+  }
+
+  if (is_mac) {
+    if (symbol_level == 2) {
+      # Mac dead code stripping requires symbols.
+      common_optimize_on_ldflags += [
+        "-Wl,-dead_strip",
+      ]
+    }
+  } else {
+    # Non-Mac Posix linker flags.
+    common_optimize_on_ldflags += [
+      # Specifically tell the linker to perform optimizations.
+      # See http://lwn.net/Articles/192624/ .
+      "-Wl,-O1",
+      "-Wl,--as-needed",
+      "-Wl,--gc-sections",
+    ]
+  }
+}
+
+# Default "optimization on" config. On Windows, this favors size over speed.
+config("optimize") {
+  cflags = common_optimize_on_cflags
+  ldflags = common_optimize_on_ldflags
+  if (is_win) {
+    cflags += [
+      "/Os",   # favor size over speed.
+    ]
+  } else if (is_android || is_ios) {
+    cflags += [
+      "-Os",  # Favor size over speed.
+    ]
+  } else {
+    cflags += [
+      "-O2",
+    ]
+  }
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+  if (is_win) {
+    cflags = [
+      "/Od",  # Disable optimization.
+      "/Ob0",  # Disable all inlining (on by default).
+      "/RTC1",  # Runtime checks for stack frame and uninitialized variables.
+    ]
+  } else if (is_android && !android_full_debug) {
+    # On Android we kind of optimize some things that don't affect debugging
+    # much even when optimization is disabled to get the binary size down.
+    cflags = [
+      "-Os",
+      "-fomit-frame-pointer",
+      "-fdata-sections",
+      "-ffunction-sections",
+    ]
+    ldflags = common_optimize_on_ldflags
+  } else {
+    cflags = [ "-O0" ]
+  }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+  cflags = common_optimize_on_cflags
+  ldflags = common_optimize_on_ldflags
+  if (is_win) {
+    cflags += [
+      "/Ot",   # Favor speed over size.
+      "/GL",   # Whole program optimization.
+      # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+      # Probably anything that this would catch that wouldn't be caught in a
+      # normal build isn't going to actually be a bug, so the incremental value
+      # of C4702 for PGO builds is likely very small.
+      "/wd4702",
+    ]
+  } else {
+    cflags += [
+      "-O2",
+    ]
+  }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+config("symbols") {
+  if (is_win) {
+    cflags = [ "/Zi" ]  # Produce PDB file, no edit and continue.
+    ldflags = [ "/DEBUG" ]
+  } else {
+    cflags = [ "-g2" ]
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+  }
+}
+
+config("minimal_symbols") {
+  if (is_win) {
+    # Linker symbols for backtraces only.
+    ldflags = [ "/DEBUG" ]
+  } else {
+    cflags = [ "-g1" ]
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+  }
+}
+
+config("no_symbols") {
+  if (!is_win) {
+    cflags = [ "-g0" ]
+  }
+}
diff --git a/build/config/crypto.gni b/build/config/crypto.gni
new file mode 100644
index 0000000..20004c3
--- /dev/null
+++ b/build/config/crypto.gni
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file declares build flags for the SSL library configuration.
+#
+# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
+# the global build dependency on it should be removed.
+
+declare_args() {
+  # Use OpenSSL instead of NSS. This is used for Android, Mac, and NaCl untrusted
+  # code, and is experimental in other cases (see http://crbug.com/62803).
+  use_openssl = is_android || is_mac || is_nacl
+}
+
+# True when we're using OpenSSL for certificate verification and storage. We
+# only do this when we're using OpenSSL on desktop Linux systems. For other
+# systems (Mac/Win/Android) we use the system certificate features.
+use_openssl_certs = use_openssl && (is_linux || is_android)
+
+# Same meaning as use_openssl_certs but for NSS.
+use_nss_certs = !use_openssl && is_linux
diff --git a/build/config/features.gni b/build/config/features.gni
new file mode 100644
index 0000000..54b6159
--- /dev/null
+++ b/build/config/features.gni
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains UI-related build flags. It should theoretically be in the
+# src/ui directory and only things that depend on the ui module should get the
+# definitions.
+#
+# However, today we have many "bad" dependencies on some of these flags from,
+# e.g. base, so they need to be global.
+#
+# See also build/config/ui.gni
+
+if (is_android) {
+  import("//build/config/android/config.gni")
+}
+
+declare_args() {
+  # Multicast DNS.
+  enable_mdns = is_win || is_linux
+
+  enable_plugins = !is_android && !is_ios
+
+  # Enables Native Client support.
+  # TODO(GYP) enable this when nacl works in GN.
+  enable_nacl = false
+  #enable_nacl = (!is_ios && !is_android)
+
+  # If debug_devtools is set to true, JavaScript files for DevTools are stored
+  # as is and loaded from disk. Otherwise, a concatenated file is stored in
+  # resources.pak. It is still possible to load JS files from disk by passing
+  # --debug-devtools cmdline switch.
+  debug_devtools = false
+
+  # Enables WebRTC.
+  #enable_webrtc = !is_ios  TODO(GYP) use this condition when WebRTC works in
+  #                         the GN build.
+  enable_webrtc = false
+}
+
+# Additional dependent variables -----------------------------------------------
+
+# Set the version of CLD.
+#   0: Don't specify the version. This option is for the Finch testing.
+#   1: Use only CLD1.
+#   2: Use only CLD2.
+if (is_android || is_ios) {
+  cld_version = 1
+} else {
+  cld_version = 2
+}
+
+# libudev usage. This currently only affects the content layer.
+use_udev = is_linux
+
+# Enable the spell checker.
+enable_spellcheck = !is_android
+
+enable_pepper_cdms = enable_plugins && (is_linux || is_mac || is_win)
+
+enable_browser_cdms = is_android
+
+# Enable printing support and UI. This variable is used to configure which
+# parts of printing will be built. 0 disables printing completely, 1 enables it
+# fully, and 2 enables only the codepath to generate a Metafile (e.g.  usually
+# a PDF or EMF) and disables print preview, cloud print, UI, etc.
+if (is_android) {
+  printing_mode = 2
+} else {
+  printing_mode = 1
+}
+
+# The seccomp-bpf sandbox is only supported on three architectures
+# currently.
+# Do not disable seccomp_bpf anywhere without talking to
+# security@chromium.org!
+use_seccomp_bpf = (is_linux || is_android) &&
+  (cpu_arch == "x86" || cpu_arch == "x64" || cpu_arch == "arm")
+
+# Enable notifications everywhere except Android/iOS.
+# Android is http://crbug.com/115320
+enable_notifications = !is_android && !is_ios
+
+# TODO(brettw) this should be moved to net and only dependents get this define.
+disable_ftp_support = is_ios
+
+enable_web_speech = (!is_android && !is_ios)
+
+use_dbus = is_linux
+
+enable_extensions = (!is_android && !is_ios)
+
+# Variable safe_browsing is used to control the build time configuration for
+# safe browsing feature. Safe browsing can be compiled in 3 different levels: 0
+# disables it, 1 enables it fully, and 2 enables only UI and reporting features
+# without enabling phishing and malware detection. This is useful to integrate
+# a third party phishing/malware detection to existing safe browsing logic.
+if (is_android) {
+  safe_browsing_mode = 2
+} else if (is_ios) {
+  safe_browsing_mode = 0
+} else {
+  safe_browsing_mode = 1
+}
+
+enable_configuration_policy = true
+
+# The data acquisition mode for CLD2. Possible values are:
+#   static:     CLD2 data is statically linked to the executable.
+#   standalone: CLD2 data is provided in a standalone file that is
+#               bundled with the executable.
+#   component:  CLD2 data is provided as a Chrome "component" and is
+#               downloaded via the component updater.
+#
+# For more information on switching the CLD2 data source, see:
+#   https://sites.google.com/a/chromium.org/dev/developers/how-tos/compact-language-detector-cld-data-source-configuration
+cld2_data_source = "static"
+
+# Enables support for background apps.
+enable_background = !is_ios && !is_android
+
+enable_task_manager = !is_ios && !is_android
+
+use_cups = is_desktop_linux || is_mac
+
+enable_themes = !is_android && !is_chromeos
+
+# TODO(scottmg) remove this when we've fixed printing.
+win_pdf_metafile_for_printing = true
+
+enable_captive_portal_detection = !is_android && !is_ios
+
+# Enables use of the session service, which is enabled by default.
+# Android stores them separately on the Java side.
+enable_session_service = !is_android && !is_ios
+
+# Whether we are using the rlz library or not.  Platforms like Android send
+# rlz codes for searches but do not use the library.
+enable_rlz = is_chrome_branded && (is_win || is_mac || is_ios || is_chromeos)
+
+enable_plugin_installation = is_win || is_mac
+
+enable_app_list = !is_ios && !is_android
+enable_settings_app = enable_app_list && !is_chromeos
+
+enable_managed_users = !is_ios
+
+enable_service_discovery = enable_mdns || is_mac
+
+enable_autofill_dialog = !is_ios && !(is_android && is_android_webview_build)
+
+enable_wifi_bootstrapping = is_win || is_mac
+
+# Image loader extension is enabled on ChromeOS only.
+enable_image_loader_extension = is_chromeos
+
+enable_remoting = !is_ios && !is_android
+
+enable_google_now = !is_ios && !is_android
+
+enable_one_click_signin = is_win || is_mac || (is_linux && !is_chromeos)
+
+# Chrome OS: whether to also build the upcoming version of
+# ChromeVox, which can then be enabled via a command-line switch.
+enable_chromevox_next = false
+
+# Use brlapi from brltty for braille display support.
+use_brlapi = is_chromeos
+
+# Option controlling the use of GConf (the classic GNOME configuration
+# system).
+# TODO(GYP) also require !embedded to enable.
+use_gconf = is_linux && !is_chromeos
diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn
new file mode 100644
index 0000000..28502c2
--- /dev/null
+++ b/build/config/gcc/BUILD.gn
@@ -0,0 +1,44 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functiosn we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+  # Note that -fvisibility-inlines-hidden is set globally in the compiler
+  # config since that can almost always be applied.
+  cflags = [ "-fvisibility=hidden" ]
+}
+
+# Settings for executables and shared libraries.
+config("executable_ldconfig") {
+  ldflags = [
+    # Want to pass "\$". GN will re-escape as required for ninja.
+    "-Wl,-rpath=\$ORIGIN/",
+
+    "-Wl,-rpath-link=",
+
+    # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+    # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+    "-Wl,--disable-new-dtags",
+  ]
+
+  if (is_android) {
+    ldflags += [
+      "-Bdynamic",
+      "-Wl,-z,nocopyreloc",
+    ]
+  }
+}
+
+config("no_exceptions") {
+  cflags_cc = [ "-fno-exceptions" ]
+}
diff --git a/build/config/gcc/gcc_version.gni b/build/config/gcc/gcc_version.gni
new file mode 100644
index 0000000..9b27c90
--- /dev/null
+++ b/build/config/gcc/gcc_version.gni
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_clang) {
+  gcc_version = 0
+} else if (is_android) {
+  import("//build/config/android/config.gni")
+  if (is_android_webview_build) {
+    gcc_version = 48
+  } else {
+    gcc_version = 49
+  }
+} else {
+  gcc_version = exec_script("../../compiler_version.py", [ "host",  "compiler" ], "value")
+}
diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn
new file mode 100644
index 0000000..0886be4
--- /dev/null
+++ b/build/config/ios/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/config/ios/ios_sdk.gni")
+
+config("sdk") {
+  common_flags = [ "-isysroot", sysroot ]
+
+  cflags = common_flags
+  ldflags = common_flags
+
+  if (use_ios_simulator) {
+    cflags += [ "-mios-simulator-version-min=$ios_deployment_target" ]
+  } else {
+    cflags += [ "-miphoneos-version-min=$ios_deployment_target" ]
+  }
+}
diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni
new file mode 100644
index 0000000..6b81a03
--- /dev/null
+++ b/build/config/ios/ios_sdk.gni
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # SDK path to use. When empty this will use the default SDK based on the
+  # value of use_ios_simulator.
+  ios_sdk_path = ""
+
+  # Set to true when targeting a simulator build on iOS. False means that the
+  # target is for running on the device. The default value is to use the
+  # Simulator except when targeting GYP's Xcode builds (for compat with the
+  # existing GYP build).
+  use_ios_simulator = true
+
+  # Version of iOS that we're targeting.
+  ios_deployment_target = "6.0"
+}
+
+if (ios_sdk_path == "") {
+  # Compute default target.
+  if (use_ios_simulator) {
+    _ios_sdk_to_query = "iphonesimulator"
+  } else {
+    _ios_sdk_to_query = "iphoneos"
+  }
+  _ios_sdk_result =
+    exec_script("ios_sdk.py", [ _ios_sdk_to_query ], "list lines")
+  ios_sdk_path = _ios_sdk_result[0]
+}
diff --git a/build/config/ios/ios_sdk.py b/build/config/ios/ios_sdk.py
new file mode 100644
index 0000000..dfec4db
--- /dev/null
+++ b/build/config/ios/ios_sdk.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+
+# This script returns the path to the SDK of the given type. Pass the type of
+# SDK you want, which is typically "iphone" or "iphonesimulator".
+#
+# In the GYP build, this is done inside GYP itself based on the SDKROOT
+# variable.
+
+if len(sys.argv) != 2:
+  print "Takes one arg (SDK to find)"
+  sys.exit(1)
+
+print subprocess.check_output(['xcodebuild', '-version', '-sdk',
+                               sys.argv[1], 'Path']).strip()
diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn
new file mode 100644
index 0000000..3ca2b73
--- /dev/null
+++ b/build/config/linux/BUILD.gn
@@ -0,0 +1,226 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/features.gni")
+import("//build/config/sysroot.gni")
+import("//build/config/ui.gni")
+import("//tools/generate_library_loader/generate_library_loader.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    ldflags += [ exec_script("sysroot_ld_path.py",
+        [ rebase_path("//build/linux/sysroot_ld_path.sh", root_build_dir),
+          sysroot ],
+        "value") ]
+  }
+
+  # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like
+  # OS_LINUX and the like.
+  if (is_chromeos) {
+    defines = [ "OS_CHROMEOS" ]
+  }
+}
+
+config("fontconfig") {
+  libs = [ "fontconfig" ]
+}
+
+pkg_config("freetype2") {
+  packages = [ "freetype2" ]
+}
+
+pkg_config("glib") {
+  packages = [ "glib-2.0", "gmodule-2.0", "gobject-2.0", "gthread-2.0" ]
+}
+
+pkg_config("pangocairo") {
+  packages = [ "pangocairo" ]
+}
+
+pkg_config("pangoft2") {
+  packages = [ "pangoft2" ]
+}
+
+pkg_config("udev") {
+  packages = [ "libudev" ]
+}
+
+# Note: if your target also depends on //dbus, you don't need to add this
+# config (it will get added automatically if you depend on //dbus).
+pkg_config("dbus") {
+  packages = [ "dbus-1" ]
+}
+
+if (use_evdev_gestures) {
+  pkg_config("libevdev-cros") {
+    packages = [ "libevdev-cros" ]
+  }
+
+  pkg_config("libgestures") {
+    packages = [ "libgestures" ]
+  }
+}
+
+config("x11") {
+  # Don't bother running pkg-config for these X related libraries since it just
+  # returns the same libs, and forking pkg-config is slow.
+  libs = [
+    "X11",
+    "Xcomposite",
+    "Xcursor",
+    "Xdamage",
+    "Xext",
+    "Xfixes",
+    "Xi",
+    "Xrender",
+    "Xss",
+    "Xtst",
+  ]
+}
+
+config("xcomposite") {
+  libs = [ "Xcomposite" ]
+}
+
+config("xext") {
+  libs = [ "Xext" ]
+}
+
+config("xrandr") {
+  libs = [ "Xrandr" ]
+}
+
+config("xscrnsaver") {
+  libs = [ "Xss" ]
+}
+
+config("xfixes") {
+  libs = [ "Xfixes" ]
+}
+
+config("libcap") {
+  libs = [ "cap" ]
+}
+
+config("xi") {
+  libs = [ "Xi" ]
+}
+
+config("libresolv") {
+  libs = [ "resolv" ]
+}
+
+# CrOS doesn't install GTK, gconf or any gnome packages.
+if (!is_chromeos) {
+  pkg_config("gtk") {
+    # Gtk requires gmodule, but it does not list it as a dependency in some
+    # misconfigured systems.
+    packages = [ "gmodule-2.0", "gtk+-2.0", "gthread-2.0" ]
+  }
+
+  pkg_config("gtkprint") {
+    packages = [ "gtk+-unix-print-2.0" ]
+  }
+
+  pkg_config("gnome_keyring") {
+    packages = [ "gnome-keyring-1" ]
+  }
+
+  pkg_config("gconf") {
+    packages = [ "gconf-2.0" ]
+    defines = [ "USE_GCONF" ]
+  }
+}
+
+# If brlapi isn't needed, don't require it to be installed.
+if (use_brlapi) {
+  config("brlapi_config") {
+    defines = [ "USE_BRLAPI" ]
+  }
+
+  # TODO(GYP) linux_link_brlapi support. Is this needed?
+  generate_library_loader("libbrlapi") {
+    name = "LibBrlapiLoader"
+    output_h = "libbrlapi.h"
+    output_cc = "libbrlapi_loader.cc"
+    header = "<brlapi.h>"
+    config = ":brlapi_config"
+
+    functions = [
+      "brlapi_getHandleSize",
+      "brlapi_error_location",
+      "brlapi_strerror",
+      "brlapi__acceptKeys",
+      "brlapi__openConnection",
+      "brlapi__closeConnection",
+      "brlapi__getDisplaySize",
+      "brlapi__enterTtyModeWithPath",
+      "brlapi__leaveTtyMode",
+      "brlapi__writeDots",
+      "brlapi__readKey",
+    ]
+  }
+}
+
+pkg_config("gio_config") {
+  packages = [ "gio-2.0" ]
+  # glib >=2.40 deprecate g_settings_list_schemas in favor of
+  # g_settings_schema_source_list_schemas. This function is not available on
+  # earlier versions that we still need to support (specifically, 2.32), so
+  # disable the warning with the GLIB_DISABLE_DEPRECATION_WARNINGS define.
+  # TODO(mgiuca): Remove this suppression when we drop support for Ubuntu 13.10
+  # (saucy) and earlier. Update the code to use
+  # g_settings_schema_source_list_schemas instead.
+  defines = [ "USE_GIO", "GLIB_DISABLE_DEPRECATION_WARNINGS" ]
+ 
+  # TODO(brettw) Theoretically I think ignore_libs should be set so that we
+  # don't link directly to GIO and use the loader generated below. But the gio
+  # target in GYP doesn't make any sense to me and appears to link directly to
+  # GIO in addition to making a loader. This this uncommented, the link in
+  # component build fails, so I think this is closer to the GYP build.
+  #ignore_libs = true  # Loader generated below.
+}
+
+# This generates a target named "gio".
+generate_library_loader("gio") {
+  name = "LibGioLoader"
+  output_h = "libgio.h"
+  output_cc = "libgio_loader.cc"
+  header = "<gio/gio.h>"
+  config = ":gio_config"
+
+  functions = [
+    "g_settings_new",
+    "g_settings_get_child",
+    "g_settings_get_string",
+    "g_settings_get_boolean",
+    "g_settings_get_int",
+    "g_settings_get_strv",
+    "g_settings_list_schemas",
+  ]
+}
+
+# This generates a target named "libpci".
+generate_library_loader("libpci") {
+  name = "LibPciLoader"
+  output_h = "libpci.h"
+  output_cc = "libpci_loader.cc"
+  header = "<pci/pci.h>"
+
+  functions = [
+    "pci_alloc",
+    "pci_init",
+    "pci_cleanup",
+    "pci_scan_bus",
+    "pci_fill_info",
+    "pci_lookup_name",
+  ]
+}
+
+# Looking for libspeechd? Use //third_party/speech-dispatcher
diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py
new file mode 100644
index 0000000..60304d4
--- /dev/null
+++ b/build/config/linux/pkg-config.py
@@ -0,0 +1,169 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v <regexp>" where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a <arch>" where arch is either "x86" or "x64".
+
+# If this is run on non-Linux platforms, just return nothing and indicate
+# success. This allows us to "kind of emulate" a Linux build from other
+# platforms.
+if sys.platform.find("linux") == -1:
+  print "[[],[],[],[],[]]"
+  sys.exit(0)
+
+
+def SetConfigPath(options):
+  """Set the PKG_CONFIG_PATH environment variable.
+  This takes into account any sysroot and architecture specification from the
+  options on the given command line."""
+
+  sysroot = options.sysroot
+  if not sysroot:
+    sysroot = ""
+
+  # Compute the library path name based on the architecture.
+  arch = options.arch
+  if sysroot and not arch:
+    print "You must specify an architecture via -a if using a sysroot."
+    sys.exit(1)
+  if arch == 'x64':
+    libpath = 'lib64'
+  else:
+    libpath = 'lib'
+
+  # Add the sysroot path to the environment's PKG_CONFIG_PATH
+  config_path = sysroot + '/usr/' + libpath + '/pkgconfig'
+  config_path += ':' + sysroot + '/usr/share/pkgconfig'
+  if 'PKG_CONFIG_PATH' in os.environ:
+    os.environ['PKG_CONFIG_PATH'] += ':' + config_path
+  else:
+    os.environ['PKG_CONFIG_PATH'] = config_path
+
+
+def GetPkgConfigPrefixToStrip(args):
+  """Returns the prefix from pkg-config where packages are installed.
+  This returned prefix is the one that should be stripped from the beginning of
+  directory names to take into account sysroots."""
+  # Some sysroots, like the Chromium OS ones, may generate paths that are not
+  # relative to the sysroot. For example,
+  # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+  # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+  # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+  # To support this correctly, it's necessary to extract the prefix to strip
+  # from pkg-config's |prefix| variable.
+  prefix = subprocess.check_output(["pkg-config", "--variable=prefix"] + args,
+      env=os.environ)
+  if prefix[-4] == '/usr':
+    return prefix[4:]
+  return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+  """Returns true if the first argument matches any regular expression in the
+  given list."""
+  for regexp in list_of_regexps:
+    if regexp.search(flag) != None:
+      return True
+  return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(strip_prefix):
+      path = path[len(strip_prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+parser = OptionParser()
+parser.add_option('-p', action='store', dest='pkg_config', type='string',
+                  default='pkg-config')
+parser.add_option('-v', action='append', dest='strip_out', type='string')
+parser.add_option('-s', action='store', dest='sysroot', type='string')
+parser.add_option('-a', action='store', dest='arch', type='string')
+(options, args) = parser.parse_args()
+
+# Make a list of regular expressions to strip out.
+strip_out = []
+if options.strip_out != None:
+  for regexp in options.strip_out:
+    strip_out.append(re.compile(regexp))
+
+SetConfigPath(options)
+if options.sysroot:
+  prefix = GetPkgConfigPrefixToStrip(args)
+else:
+  prefix = ''
+
+try:
+  flag_string = subprocess.check_output(
+      [ options.pkg_config, "--cflags", "--libs-only-l", "--libs-only-L" ] +
+      args, env=os.environ)
+  # For now just split on spaces to get the args out. This will break if
+  # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+  # to happen in practice.
+  all_flags = flag_string.strip().split(' ')
+except:
+  print "Could not run pkg-config."
+  sys.exit(1)
+
+
+sysroot = options.sysroot
+if not sysroot:
+  sysroot = ''
+
+includes = []
+cflags = []
+libs = []
+lib_dirs = []
+ldflags = []
+
+for flag in all_flags[:]:
+  if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+    continue;
+
+  if flag[:2] == '-l':
+    libs.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:2] == '-L':
+    lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:2] == '-I':
+    includes.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:3] == '-Wl':
+    ldflags.append(flag)
+  elif flag == '-pthread':
+    # Many libs specify "-pthread" which we don't need since we always include
+    # this anyway. Removing it here prevents a bunch of duplicate inclusions on
+    # the command line.
+    pass
+  else:
+    cflags.append(flag)
+
+# Output a GN array, the first one is the cflags, the second are the libs. The
+# JSON formatter prints GN compatible lists when everything is a list of
+# strings.
+print json.dumps([includes, cflags, libs, lib_dirs, ldflags])
diff --git a/build/config/linux/pkg_config.gni b/build/config/linux/pkg_config.gni
new file mode 100644
index 0000000..378863e
--- /dev/null
+++ b/build/config/linux/pkg_config.gni
@@ -0,0 +1,71 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+#   pkg_config("mything") {
+#     packages = [ "mything1", "mything2" ]
+#     defines = [ "ENABLE_AWESOME" ]
+#   }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+#   extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+#   ignore_libs = true
+
+declare_args() {
+  # A pkg-config wrapper to call instead of trying to find and call the right
+  # pkg-config directly. Wrappers like this are common in cross-compilation
+  # environments.
+  # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+  # the sysroot mechanism to find the right .pc files.
+  pkg_config = ""
+}
+
+template("pkg_config") {
+  assert(defined(invoker.packages),
+        "Variable |packages| must be defined to be a list in pkg_config.")
+  config(target_name) {
+    if (sysroot != "") {
+      # Pass the sysroot if we're using one (it requires the CPU arch also).
+      args = ["-s", sysroot, "-a", cpu_arch] + invoker.packages
+    } else if (pkg_config != "") {
+      args = ["-p", pkg_config] + invoker.packages
+    } else {
+      args = invoker.packages
+    }
+
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+
+    pkgresult = exec_script("//build/config/linux/pkg-config.py",
+                            args, "value")
+    include_dirs = pkgresult[0]
+    cflags = pkgresult[1]
+
+    if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+      libs = pkgresult[2]
+      lib_dirs = pkgresult[3]
+      ldflags = pkgresult[4]
+    }
+
+    if (defined(invoker.defines)) {
+      defines = invoker.defines
+    }
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
diff --git a/build/config/linux/sysroot_ld_path.py b/build/config/linux/sysroot_ld_path.py
new file mode 100644
index 0000000..4bce7ee
--- /dev/null
+++ b/build/config/linux/sysroot_ld_path.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
+# Python in this file.
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+  print "Need two arguments"
+  sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
+
+print '"' + result + '"'
diff --git a/build/config/locales.gni b/build/config/locales.gni
new file mode 100644
index 0000000..a628007
--- /dev/null
+++ b/build/config/locales.gni
@@ -0,0 +1,118 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Note: keep in sync with below.
+locales = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en-GB",
+  "en-US",
+  "es-419",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt-BR",
+  "pt-PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh-CN",
+  "zh-TW",
+]
+
+# Same as the locales list but in the format Mac expects for output files:
+# it uses underscores instead of hyphens, and "en" instead of "en-US".
+locales_as_mac_outputs = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en_GB",
+  "en",
+  "es_419",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt_BR",
+  "pt_PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh_CN",
+  "zh_TW",
+]
diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn
new file mode 100644
index 0000000..2ebf458
--- /dev/null
+++ b/build/config/mac/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+config("sdk") {
+  common_flags = [
+    "-isysroot", sysroot,
+    "-mmacosx-version-min=10.6"
+  ]
+
+  cflags = common_flags
+  ldflags = common_flags
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+  ldflags = [
+    "-Wl,-search_paths_first",
+    "-L.",
+    # Path for loading shared libraries for unbundled binaries.
+    "-Wl,-rpath,@loader_path/.",
+    # Path for loading shared libraries for bundled binaries. Get back from
+    # Binary.app/Contents/MacOS.
+    "-Wl,-rpath,@loader_path/../../..",
+  ]
+}
+
+# On Mac, this is used only for executables.
+config("mac_executable_flags") {
+  ldflags = [
+    "-Wl,-pie",  # Position independent.
+  ]
+}
diff --git a/build/config/mac/mac_sdk.gni b/build/config/mac/mac_sdk.gni
new file mode 100644
index 0000000..aa03332
--- /dev/null
+++ b/build/config/mac/mac_sdk.gni
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Minimum supported version of the Mac SDK.
+  mac_sdk_min = "10.6"
+
+  # Path to a specific version of the Mac SDKJ, not including a backslash at
+  # the end. If empty, the path to the lowest version greater than or equal to
+  # mac_sdk_min is used.
+  mac_sdk_path = ""
+}
+
+find_sdk_args = [ "--print_sdk_path" ]
+if (is_chrome_branded && is_official_build) {
+  find_sdk_args += [ "--verify", mac_sdk_min, "--sdk_path=" + mac_sdk_path ]
+} else {
+  find_sdk_args += [ mac_sdk_min ]
+}
+# The tool will print the SDK path on the first line, and the version on the
+# second line.
+find_sdk_lines =
+    exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
+mac_sdk_version = find_sdk_lines[1]
+if (mac_sdk_path == "") {
+  # TODO(brettw) http://crbug.com/335325  when everybody moves to XCode 5 we
+  # can remove the --print_sdk_path argument to find_sdk and instead just use
+  # the following two lines to get the path. Although it looks longer here, it
+  # saves forking a process in find_sdk.py so will be faster.
+  #mac_sdk_root = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX"
+  #mac_sdk_path = mac_sdk_root + mac_sdk_version + ".sdk"
+  mac_sdk_path = find_sdk_lines[0]
+}
diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni
new file mode 100644
index 0000000..2b04c56
--- /dev/null
+++ b/build/config/sysroot.gni
@@ -0,0 +1,50 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+if (is_android) {
+  import("//build/config/android/config.gni")
+  if (!is_android_webview_build) {
+    if (cpu_arch == "x86") {
+      sysroot = rebase_path("$android_ndk_root/$x86_android_sysroot_subdir")
+    } else if (cpu_arch == "arm") {
+      sysroot = rebase_path("$android_ndk_root/$arm_android_sysroot_subdir")
+    } else if (cpu_arch == "mipsel") {
+      sysroot = rebase_path("$android_ndk_root/$mips_android_sysroot_subdir")
+    } else {
+      sysroot = ""
+    }
+  } else {
+    sysroot = ""
+  }
+} else if (is_linux && is_chrome_branded && is_official_build && !is_chromeos) {
+  # For official builds, use the sysroot checked into the internal source repo
+  # so that the builds work on older versions of Linux.
+  if (cpu_arch == "x64") {
+    sysroot =
+        rebase_path("//chrome/installer/linux/debian_wheezy_amd64-sysroot")
+  } else if (cpu_arch == "x86") {
+    sysroot = rebase_path("//chrome/installer/linux/debian_wheezy_i386-sysroot")
+  } else {
+    # Any other builds don't use a sysroot.
+    sysroot = ""
+  }
+} else if (is_linux && !is_chromeos) {
+  if (cpu_arch == "mipsel") {
+    sysroot = rebase_path("//mipsel-sysroot/sysroot")
+  } else {
+    sysroot = ""
+  }
+} else if (is_mac) {
+  import("//build/config/mac/mac_sdk.gni")
+
+  sysroot = mac_sdk_path
+} else if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+  sysroot = ios_sdk_path
+} else {
+  sysroot = ""
+}
diff --git a/build/config/ui.gni b/build/config/ui.gni
new file mode 100644
index 0000000..9c159a2
--- /dev/null
+++ b/build/config/ui.gni
@@ -0,0 +1,67 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains UI-related build flags. It should theoretically be in the
+# src/ui directory and only things that depend on the ui module should get the
+# definitions.
+#
+# However, today we have many "bad" dependencies on some of these flags from,
+# e.g. base, so they need to be global.
+#
+# See also build/config/features.gni
+
+declare_args() {
+  # Indicates if Ash is enabled. Ash is the Aura Shell which provides a
+  # desktop-like environment for Aura. Requires use_aura = true
+  use_ash = is_win || is_linux
+
+  # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+  # that does not require X11.
+  use_ozone = false
+
+  # Support ChromeOS touchpad gestures with ozone.
+  use_evdev_gestures = false
+
+  # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
+  # of a replacement for GDI or GTK.
+  use_aura = is_win || is_linux
+
+  # XInput2 multitouch support. Zero means disabled, nonzero indicates the
+  # minimum XI2 version. For example, use_xi2_mt=2 means XI2.2 or above.
+  use_xi2_mt = 2
+
+  # True means the UI is built using the "views" framework.
+  toolkit_views = is_win || is_chromeos || use_aura
+}
+
+# Additional dependent variables -----------------------------------------------
+#
+# These variables depend on other variables and can't be set externally.
+
+if (is_linux) {
+  use_cairo = true
+  use_pango = true
+} else {
+  use_cairo = false
+  use_pango = false
+}
+
+# Use GPU accelerated cross process image transport by default on linux builds
+# with the Aura window manager.
+ui_compositor_image_transport = use_aura && is_linux
+
+use_default_render_theme = use_aura || is_linux
+
+# Indicates if the UI toolkit depends on X11.
+use_x11 = is_linux && !use_ozone
+
+use_ozone_evdev = use_ozone
+
+use_glib = is_linux
+
+use_clipboard_aurax11 = is_linux && use_aura && use_x11
+
+use_athena = false
+
+enable_hidpi = is_mac || is_chromeos || is_win
diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn
new file mode 100644
index 0000000..0f00a1e
--- /dev/null
+++ b/build/config/win/BUILD.gn
@@ -0,0 +1,168 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/win/visual_studio_version.gni")
+
+# Compiler setup for the Windows SDK. Applied to all targets.
+config("sdk") {
+  # The include path is the stuff returned by the script.
+  #include_dirs = msvc_config[0]  TODO(brettw) make this work.
+
+  defines = [
+    "_ATL_NO_OPENGL",
+    "_WINDOWS",
+    "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
+    "NTDDI_VERSION=0x06020000",
+    "PSAPI_VERSION=1",
+    "WIN32",
+  ]
+
+  include_dirs = system_include_dirs
+
+  if (is_visual_studio_express) {
+    # https://code.google.com/p/chromium/issues/detail?id=372451#c20
+    # Warning 4702 ("Unreachable code") should be re-enabled once Express users
+    # are updated to VS2013 Update 2.
+    cflags = [ "/wd4702" ]
+  } else {
+    # Only supported on non-Express versions.
+    defines += [ "_SECURE_ATL" ]
+  }
+}
+
+# Sets the default Windows build version. This is separated because some
+# targets need to manually override it for their compiles.
+config("winver") {
+  defines = [
+    "_WIN32_WINNT=0x0602",
+    "WINVER=0x0602",
+  ]
+}
+
+# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
+config("sdk_link") {
+  if (cpu_arch == "x64") {
+    ldflags = [ "/MACHINE:X64" ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\win8\um\x64",
+      "$visual_studio_path\VC\lib\amd64",
+      "$visual_studio_path\VC\atlmfc\lib\amd64",
+    ]
+    if (is_visual_studio_express) {
+      lib_dirs += [ "$wdk_path/lib/ATL/amd64" ]
+    }
+  } else {
+    ldflags = [
+      "/MACHINE:X86",
+      "/SAFESEH",  # Not compatible with x64 so use only for x86.
+    ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\win8\um\x86",
+      "$visual_studio_path\VC\lib",
+      "$visual_studio_path\VC\atlmfc\lib",
+    ]
+    if (is_visual_studio_express) {
+      lib_dirs += [ "$wdk_path/lib/ATL/i386" ]
+    }
+    if (!is_asan) {
+      ldflags += [ "/largeaddressaware" ]
+    }
+  }
+
+  if (is_visual_studio_express) {
+    # Explicitly required when using the ATL with express.
+    libs = [ "atlthunk.lib" ]
+
+    # ATL 8.0 included in WDK 7.1 makes the linker to generate almost eight
+    # hundred LNK4254 and LNK4078 warnings:
+    #   - warning LNK4254: section 'ATL' (50000040) merged into '.rdata'
+    #     (40000040) with different attributes
+    #   - warning LNK4078: multiple 'ATL' sections found with different
+    #     attributes
+    ldflags += [ "/ignore:4254", "/ignore:4078" ]
+  }
+}
+
+# This default linker setup is provided separately from the SDK setup so
+# targets who want different library configurations can remove this and specify
+# their own.
+config("common_linker_setup") {
+  ldflags = [
+    "/FIXED:NO",
+    "/ignore:4199",
+    "/ignore:4221",
+    "/NXCOMPAT",
+  ]
+
+  # ASLR makes debugging with windbg difficult because Chrome.exe and
+  # Chrome.dll share the same base name. As result, windbg will name the
+  # Chrome.dll module like chrome_<base address>, where <base address>
+  # typically changes with each launch. This in turn means that breakpoints in
+  # Chrome.dll don't stick from one launch to the next. For this reason, we
+  # turn ASLR off in debug builds.
+  if (is_debug) {
+    ldflags += [ "/DYNAMICBASE:NO" ]
+  } else {
+    ldflags += [ "/DYNAMICBASE" ]
+  }
+
+  # Delay loaded DLLs.
+  ldflags += [
+    "/DELAYLOAD:dbghelp.dll",
+    "/DELAYLOAD:dwmapi.dll",
+    "/DELAYLOAD:shell32.dll",
+    "/DELAYLOAD:uxtheme.dll",
+  ]
+}
+
+# Subsystem --------------------------------------------------------------------
+
+config("console") {
+  ldflags = [ "/SUBSYSTEM:CONSOLE" ]
+}
+config("windowed") {
+  ldflags = [ "/SUBSYSTEM:WINDOWS" ]
+}
+
+# Incremental linking ----------------------------------------------------------
+
+config("incremental_linking") {
+  ldflags = [ "/INCREMENTAL" ]
+}
+config("no_incremental_linking") {
+  ldflags = [ "/INCREMENTAL:NO" ]
+}
+
+# Character set ----------------------------------------------------------------
+
+# Not including this config means "ansi" (8-bit system codepage).
+config("unicode") {
+  defines = [
+    "_UNICODE",
+    "UNICODE",
+  ]
+}
+
+# Lean and mean ----------------------------------------------------------------
+
+# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
+# to have a separate config for it. Remove this config from your target to
+# get the "bloaty and accomodating" version of windows.h.
+config("lean_and_mean") {
+  defines = [
+    "WIN32_LEAN_AND_MEAN",
+  ]
+}
+
+# Nominmax --------------------------------------------------------------------
+
+# Some third party code defines NOMINMAX before including windows.h, which
+# then causes warnings when it's been previously defined on the command line.
+# For such targets, this config can be removed.
+
+config("nominmax") {
+  defines = [
+    "NOMINMAX",
+  ]
+}
diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni
new file mode 100644
index 0000000..c0b18c7
--- /dev/null
+++ b/build/config/win/visual_studio_version.gni
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to Visual Studio. If empty, the default is used which is to use the
+  # automatic toolchain in depot_tools. If set, you must also set the
+  # visual_studio_version and wdk_path.
+  visual_studio_path = ""
+
+  # Version of Visual Studio pointed to by the visual_studio_path.
+  # Use "2013" for Visual Studio 2013, or "2013e" for the Express version.
+  visual_studio_version = ""
+
+  # Directory of the Windows driver kit. If visual_studio_path is empty, this
+  # will be auto-filled.
+  wdk_path = ""
+
+  # Full path to the Windows SDK, not including a backslash at the end.
+  # This value is the default location, override if you have a different
+  # installation location.
+  windows_sdk_path = "C:\Program Files (x86)\Windows Kits\8.0"
+
+  # The list of include directories that are treated as "system" include
+  # directories. TODO(scottmg): These are incorrectly put on the command line
+  # in GN, they should really be stored into %INCLUDE%.
+  system_include_dirs = []
+}
+
+if (visual_studio_path == "") {
+  toolchain_data =
+      exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
+  visual_studio_path = toolchain_data.vs_path
+  windows_sdk_path = toolchain_data.sdk_path
+  visual_studio_version = toolchain_data.vs_version
+  wdk_path = toolchain_data.wdk_dir
+} else {
+  assert(visual_studio_version != "",
+         "You must set the visual_studio_version if you set the path")
+  assert(wdk_path != "",
+         "You must set the wdk_path if you set the visual studio path")
+}
+
+# Set when using the "Express" version of a Visual Studio version we support.
+is_visual_studio_express = (visual_studio_version == "2013e")
+
+
+# The Windows SDK include directories must be first. They both have a sal.h,
+# and the SDK one is newer and the SDK uses some newer features from it not
+# present in the Visual Studio one.
+system_include_dirs = [
+  "$windows_sdk_path\Include\shared",
+  "$windows_sdk_path\Include\um",
+  "$windows_sdk_path\Include\winrt",
+  "$visual_studio_path\VC\include",
+  "$visual_studio_path\VC\atlmfc\include",
+]
+
+if (is_visual_studio_express) {
+  system_include_dirs += [
+    "$wdk_path/inc/atl71",
+    "$wdk_path/inc/mfc42",
+  ]
+}
diff --git a/build/copy_test_data_ios.gypi b/build/copy_test_data_ios.gypi
new file mode 100644
index 0000000..576a0f2
--- /dev/null
+++ b/build/copy_test_data_ios.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to copy test data files into
+# an iOS app bundle. To use this the following variables need to be defined:
+#   test_data_files: list: paths to test data files or directories
+#   test_data_prefix: string: a directory prefix that will be prepended to each
+#                             output path.  Generally, this should be the base
+#                             directory of the gypi file containing the unittest
+#                             target (e.g. "base" or "chrome").
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_unittests',
+#   'conditions': [
+#     ['OS == "ios"', {
+#       'actions': [
+#         {
+#           'action_name': 'copy_test_data',
+#           'variables': {
+#             'test_data_files': [
+#               'path/to/datafile.txt',
+#               'path/to/data/directory/',
+#             ]
+#             'test_data_prefix' : 'prefix',
+#           },
+#           'includes': ['path/to/this/gypi/file'],
+#         },
+#       ],
+#     }],
+# }
+#
+
+{
+  'inputs': [
+    # The |-o <(test_data_prefix)| is ignored; it is there to work around a
+    # caching bug in gyp (https://code.google.com/p/gyp/issues/detail?id=112).
+    # It caches command output when the string is the same, so if two copy
+    # steps have the same relative paths, there can be bogus cache hits that
+    # cause compile failures unless something varies.
+    '<!@pymod_do_main(copy_test_data_ios -o <(test_data_prefix) --inputs <(test_data_files))',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(copy_test_data_ios -o <(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix) --outputs <(test_data_files))',
+  ],
+  'action': [
+    'python',
+    '<(DEPTH)/build/copy_test_data_ios.py',
+    '-o', '<(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix)',
+    '<@(_inputs)',
+  ],
+}
diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py
new file mode 100755
index 0000000..6f0302f
--- /dev/null
+++ b/build/copy_test_data_ios.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+  pass
+
+def EscapePath(path):
+  """Returns a path with spaces escaped."""
+  return path.replace(" ", "\\ ")
+
+def ListFilesForPath(path):
+  """Returns a list of all the files under a given path."""
+  output = []
+  # Ignore revision control metadata directories.
+  if (os.path.basename(path).startswith('.git') or
+      os.path.basename(path).startswith('.svn')):
+    return output
+
+  # Files get returned without modification.
+  if not os.path.isdir(path):
+    output.append(path)
+    return output
+
+  # Directories get recursively expanded.
+  contents = os.listdir(path)
+  for item in contents:
+    full_path = os.path.join(path, item)
+    output.extend(ListFilesForPath(full_path))
+  return output
+
+def CalcInputs(inputs):
+  """Computes the full list of input files for a set of command-line arguments.
+  """
+  # |inputs| is a list of paths, which may be directories.
+  output = []
+  for input in inputs:
+    output.extend(ListFilesForPath(input))
+  return output
+
+def CopyFiles(relative_filenames, output_basedir):
+  """Copies files to the given output directory."""
+  for file in relative_filenames:
+    relative_dirname = os.path.dirname(file)
+    output_dir = os.path.join(output_basedir, relative_dirname)
+    output_filename = os.path.join(output_basedir, file)
+
+    # In cases where a directory has turned into a file or vice versa, delete it
+    # before copying it below.
+    if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+      os.remove(output_dir)
+    if os.path.exists(output_filename) and os.path.isdir(output_filename):
+      shutil.rmtree(output_filename)
+
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    shutil.copy(file, output_filename)
+
+def DoMain(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+  parser.add_option('--inputs', action='store_true', dest='list_inputs')
+  parser.add_option('--outputs', action='store_true', dest='list_outputs')
+  options, arglist = parser.parse_args(argv)
+
+  if len(arglist) == 0:
+    raise WrongNumberOfArgumentsException('<input_files> required.')
+
+  files_to_copy = CalcInputs(arglist)
+  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
+  if options.list_inputs:
+    return '\n'.join(escaped_files)
+
+  if not options.output_dir:
+    raise WrongNumberOfArgumentsException('-o required.')
+
+  if options.list_outputs:
+    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
+    return '\n'.join(outputs)
+
+  CopyFiles(files_to_copy, options.output_dir)
+  return
+
+def main(argv):
+  try:
+    result = DoMain(argv[1:])
+  except WrongNumberOfArgumentsException, e:
+    print >>sys.stderr, e
+    return 1
+  if result:
+    print result
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/cp.py b/build/cp.py
new file mode 100755
index 0000000..0f32536
--- /dev/null
+++ b/build/cp.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import os
+import shutil
+import sys
+
+
+def Main(src, dst):
+  # Use copy instead of copyfile to ensure the executable bit is copied.
+  return shutil.copy(src, os.path.normpath(dst))
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py
new file mode 100755
index 0000000..19579eb
--- /dev/null
+++ b/build/detect_host_arch.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+import platform
+import re
+import sys
+
+
+def HostArch():
+  """Returns the host architecture with a predictable string."""
+  host_arch = platform.machine()
+
+  # Convert machine type to format recognized by gyp.
+  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+    host_arch = 'ia32'
+  elif host_arch in ['x86_64', 'amd64']:
+    host_arch = 'x64'
+  elif host_arch.startswith('arm'):
+    host_arch = 'arm'
+
+  # platform.machine is based on running kernel. It's possible to use 64-bit
+  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+  # Distinguish between different userland bitness by querying
+  # the python binary.
+  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+    host_arch = 'ia32'
+
+  return host_arch
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return HostArch()
+
+if __name__ == '__main__':
+  print DoMain([])
diff --git a/build/dir_exists.py b/build/dir_exists.py
new file mode 100755
index 0000000..70d367e
--- /dev/null
+++ b/build/dir_exists.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+  sys.stdout.write(_is_dir(sys.argv[1]))
+  return 0
+
+def _is_dir(dir_name):
+  return str(os.path.isdir(dir_name))
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return _is_dir(args[0])
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py
new file mode 100755
index 0000000..3d6c64f
--- /dev/null
+++ b/build/download_nacl_toolchains.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+import os
+import shutil
+import sys
+
+
+def Main(args):
+  # Exit early if disable_nacl=1.
+  if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
+    return 0
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  src_dir = os.path.dirname(script_dir)
+  nacl_dir = os.path.join(src_dir, 'native_client')
+  nacl_build_dir = os.path.join(nacl_dir, 'build')
+  package_version_dir = os.path.join(nacl_build_dir, 'package_version')
+  package_version = os.path.join(package_version_dir, 'package_version.py')
+  if not os.path.exists(package_version):
+    print "Can't find '%s'" % package_version
+    print 'Presumably you are intentionally building without NativeClient.'
+    print 'Skipping NativeClient toolchain download.'
+    sys.exit(0)
+  sys.path.insert(0, package_version_dir)
+  import package_version
+
+  # BUG:
+  # We remove this --optional-pnacl argument, and instead replace it with
+  # --no-pnacl for most cases.  However, if the bot name is an sdk
+  # bot then we will go ahead and download it.  This prevents increasing the
+  # gclient sync time for developers, or standard Chrome bots.
+  if '--optional-pnacl' in args:
+    args.remove('--optional-pnacl')
+    use_pnacl = False
+    buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+    if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
+      use_pnacl = True
+    if use_pnacl:
+      print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
+    else:
+      args.extend(['--exclude', 'pnacl_newlib'])
+
+  # Only download the ARM gcc toolchain if we are building for ARM
+  # TODO(olonho): we need to invent more reliable way to get build
+  # configuration info, to know if we're building for ARM.
+  if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
+      args.extend(['--exclude', 'nacl_arm_newlib'])
+
+  args.append('sync')
+  package_version.main(args)
+
+  # Because we are no longer extracting the toolchain, it is best to delete
+  # the old extracted ones so that no stale toolchains are left behind. This
+  # also would catch any stale code that happens to work because it is using
+  # an old extracted toolchain that was left behind.
+  toolchain_dir = os.path.join(nacl_dir, 'toolchain')
+  for toolchain_item in os.listdir(toolchain_dir):
+    toolchain_path = os.path.join(toolchain_dir, toolchain_item)
+    if os.path.isdir(toolchain_path) and not toolchain_item.startswith('.'):
+      shutil.rmtree(toolchain_path)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/env_dump.py b/build/env_dump.py
new file mode 100755
index 0000000..21edfe6
--- /dev/null
+++ b/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--output-json',
+                    help='File to dump the environment as JSON into.')
+  parser.add_option(
+      '-d', '--dump-mode', action='store_true',
+      help='Dump the environment to sys.stdout and exit immediately.')
+
+  parser.disable_interspersed_args()
+  options, args = parser.parse_args()
+  if options.dump_mode:
+    if args or options.output_json:
+      parser.error('Cannot specify args or --output-json with --dump-mode.')
+    json.dump(dict(os.environ), sys.stdout)
+  else:
+    if not options.output_json:
+      parser.error('Requires --output-json option.')
+
+    envsetup_cmd = ' '.join(map(pipes.quote, args))
+    full_cmd = [
+        'bash', '-c',
+        '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+    ]
+    try:
+      output = subprocess.check_output(full_cmd)
+    except Exception as e:
+      sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+    env_diff = {}
+    new_env = json.loads(output)
+    for k, val in new_env.items():
+      if k == '_' or (k in os.environ and os.environ[k] == val):
+        continue
+      env_diff[k] = val
+    with open(options.output_json, 'w') as f:
+      json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/escape_unicode.py b/build/escape_unicode.py
new file mode 100755
index 0000000..859ba5d
--- /dev/null
+++ b/build/escape_unicode.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert any unicode characters found in the input file to C literals."""
+
+import codecs
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> <input_file>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+
+  options, arglist = parser.parse_args(argv)
+
+  if not options.output_dir:
+    print "output_dir required"
+    return 1
+
+  if len(arglist) != 2:
+    print "input_file required"
+    return 1
+
+  in_filename = arglist[1]
+
+  if not in_filename.endswith('.utf8'):
+    print "input_file should end in .utf8"
+    return 1
+
+  out_filename = os.path.join(options.output_dir, os.path.basename(
+      os.path.splitext(in_filename)[0]))
+
+  WriteEscapedFile(in_filename, out_filename)
+  return 0
+
+
+def WriteEscapedFile(in_filename, out_filename):
+  input_data = codecs.open(in_filename, 'r', 'utf8').read()
+  with codecs.open(out_filename, 'w', 'ascii') as out_file:
+    for i, char in enumerate(input_data):
+      if ord(char) > 127:
+        out_file.write(repr(char.encode('utf8'))[1:-1])
+        if input_data[i + 1:i + 2] in '0123456789abcdefABCDEF':
+          out_file.write('""')
+      else:
+        out_file.write(char.encode('ascii'))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py
new file mode 100755
index 0000000..080370c
--- /dev/null
+++ b/build/extract_from_cab.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+  """Run 'expand' suppressing noisy output. Returns returncode from process."""
+  popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+  out, _ = popen.communicate()
+  if popen.returncode:
+    # expand emits errors to stdout, so if we fail, then print that out.
+    print out
+  return popen.returncode
+
+def main():
+  if len(sys.argv) != 4:
+    print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+    return 1
+
+  [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+  # Expand.exe does its work in a fixed-named temporary directory created within
+  # the given output directory. This is a problem for concurrent extractions, so
+  # create a unique temp dir within the desired output directory to work around
+  # this limitation.
+  temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+  try:
+    # Invoke the Windows expand utility to extract the file.
+    level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+    if level == 0:
+      # Move the output file into place, preserving expand.exe's behavior of
+      # paving over any preexisting file.
+      output_file = os.path.join(output_dir, archived_file)
+      try:
+        os.remove(output_file)
+      except OSError:
+        pass
+      os.rename(os.path.join(temp_dir, archived_file), output_file)
+  finally:
+    shutil.rmtree(temp_dir, True)
+
+  if level != 0:
+    return level
+
+  # The expand utility preserves the modification date and time of the archived
+  # file. Touch the extracted file. This helps build systems that compare the
+  # modification times of input and output files to determine whether to do an
+  # action.
+  os.utime(os.path.join(output_dir, archived_file), None)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/filename_rules.gypi b/build/filename_rules.gypi
new file mode 100644
index 0000000..1bef75f
--- /dev/null
+++ b/build/filename_rules.gypi
@@ -0,0 +1,118 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This gypi file defines the patterns used for determining whether a
+# file is excluded from the build on a given platform.  It is
+# included by common.gypi for chromium_code.
+
+{
+  'target_conditions': [
+    ['OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_win(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)win/'],
+                    ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+    }],
+    ['OS!="mac" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)(cocoa|mac)/'] ],
+    }],
+    ['OS!="ios" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)ios/'] ],
+    }],
+    ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '\\.mm?$' ] ],
+    }],
+    # Do not exclude the linux files on *BSD since most of them can be
+    # shared at this point.
+    # In case a file is not needed, it is going to be excluded later on.
+    # TODO(evan): the above is not correct; we shouldn't build _linux
+    # files on non-linux.
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)linux/'],
+      ],
+    }],
+    ['OS!="android" or _toolset=="host" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_android(_unittest)?\\.cc$'],
+        ['exclude', '(^|/)android/'],
+      ],
+    }],
+    ['OS=="win" and >(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_posix(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)posix/'],
+      ],
+    }],
+    ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)chromeos/'],
+      ],
+    }],
+    ['>(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_(x|x11)(_interactive_uitest|_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+        ['exclude', '(^|/)x11/'],
+        ['exclude', '(^|/)x/'],
+      ],
+    }],
+    ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_views(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aura(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)aura/'],
+      ]
+    }],
+    ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="linux" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_auralinux\\.(h|cc)$'] ]
+    }],
+    ['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)ash/'],
+      ]
+    }],
+    ['<(use_ash)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ashwin\\.(h|cc)$'] ]
+    }],
+    ['<(use_ozone)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ozone(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)ozone/'],
+      ]
+    }],
+    ['<(use_ozone_evdev)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_evdev(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)evdev/'],
+      ]
+    }],
+    ['<(ozone_platform_dri)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_dri(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)dri/'],
+      ]
+    }],
+    ['<(use_pango)==0', {
+      'sources/': [ ['exclude', '(^|_)pango(_util|_browsertest|_unittest)?\\.(h|cc)$'], ],
+    }],
+  ]
+}
diff --git a/build/find_isolated_tests.py b/build/find_isolated_tests.py
new file mode 100755
index 0000000..c5b3ab7
--- /dev/null
+++ b/build/find_isolated_tests.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Scans build output directory for .isolated files, calculates their SHA1
+hashes, stores final list in JSON document and then removes *.isolated files
+found (to ensure no stale *.isolated stay around on the next build).
+
+Used to figure out what tests were build in isolated mode to trigger these
+tests to run on swarming.
+
+For more info see:
+https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing
+"""
+
+import glob
+import hashlib
+import json
+import optparse
+import os
+import re
+import sys
+
+
+def hash_file(filepath):
+  """Calculates the hash of a file without reading it all in memory at once."""
+  digest = hashlib.sha1()
+  with open(filepath, 'rb') as f:
+    while True:
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      digest.update(chunk)
+  return digest.hexdigest()
+
+
+def main():
+  parser = optparse.OptionParser(
+      usage='%prog --build-dir <path> --output-json <path>',
+      description=sys.modules[__name__].__doc__)
+  parser.add_option(
+      '--build-dir',
+      help='Path to a directory to search for *.isolated files.')
+  parser.add_option(
+      '--output-json',
+      help='File to dump JSON results into.')
+
+  options, _ = parser.parse_args()
+  if not options.build_dir:
+    parser.error('--build-dir option is required')
+  if not options.output_json:
+    parser.error('--output-json option is required')
+
+  result = {}
+
+  # Get the file hash values and output the pair.
+  pattern = os.path.join(options.build_dir, '*.isolated')
+  for filepath in sorted(glob.glob(pattern)):
+    test_name = os.path.splitext(os.path.basename(filepath))[0]
+    if re.match(r'^.+?\.\d$', test_name):
+      # It's a split .isolated file, e.g. foo.0.isolated. Ignore these.
+      continue
+
+    # TODO(csharp): Remove deletion once the isolate tracked dependencies are
+    # inputs for the isolated files.
+    sha1_hash = hash_file(filepath)
+    os.remove(filepath)
+    result[test_name] = sha1_hash
+
+  with open(options.output_json, 'wb') as f:
+    json.dump(result, f)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/gdb-add-index b/build/gdb-add-index
new file mode 100755
index 0000000..687e9f5
--- /dev/null
+++ b/build/gdb-add-index
@@ -0,0 +1,161 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+  trap "" EXIT USR1  # Avoid reentrancy.
+
+  local jobs=$(jobs -p)
+  if [ -n "$jobs" ]; then
+    echo -n "Killing outstanding index jobs..."
+    kill -KILL $(jobs -p)
+    wait
+    echo "done"
+  fi
+
+  if [ -f "$DIRECTORY" ]; then
+    echo -n "Removing temp directory $DIRECTORY..."
+    rm -rf $DIRECTORY
+    echo done
+  fi
+}
+
+# Add index to one binary.
+function index_one_file {
+  local file=$1
+  local basename=$(basename "$file")
+  local should_index="${SHOULD_INDEX}"
+
+  local readelf_out=$(readelf -S "$file")
+  if [[ $readelf_out =~ "gdb_index" ]]; then
+    if [ "${REMOVE_INDEX}" = 1 ]; then
+      objcopy --remove-section .gdb_index "$file"
+      echo "Removed index from $basename."
+    else
+      echo "Skipped $basename -- already contains index."
+      should_index=0
+    fi
+  fi
+
+  if [ "${should_index}" = 1 ]; then
+    local start=$(date +"%s%N")
+    echo "Adding index to $basename..."
+
+    gdb -batch "$file" -ex "save gdb-index $DIRECTORY" -ex "quit"
+    local index_file="$DIRECTORY/$basename.gdb-index"
+    if [ -f "$index_file" ]; then
+      objcopy --add-section .gdb_index="$index_file" \
+        --set-section-flags .gdb_index=readonly "$file" "$file"
+      local finish=$(date +"%s%N")
+      local elappsed=$(((finish - start)/1000000))
+      echo "   ...$basename indexed. [${elappsed}ms]"
+    else
+      echo "   ...$basename unindexable."
+    fi
+  fi
+}
+
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+  # Start a background subshell to run the index command.
+  {
+    index_one_file $1
+    kill -SIGUSR1 $$  # $$ resolves to the parent script.
+    exit 129  # See comment above wait loop at bottom.
+  } &
+}
+
+CUR_FILE_NUM=0
+function index_next {
+  if (( CUR_FILE_NUM >= ${#FILES_TO_INDEX[@]} )); then
+    return
+  fi
+
+  async_index "${FILES_TO_INDEX[CUR_FILE_NUM]}"
+  ((CUR_FILE_NUM += 1)) || true
+}
+
+
+########
+### Main body of the script.
+
+REMOVE_INDEX=0
+SHOULD_INDEX=1
+while getopts ":f:r" opt; do
+  case $opt in
+    f)
+      REMOVE_INDEX=1
+      shift
+      ;;
+    r)
+      REMOVE_INDEX=1
+      SHOULD_INDEX=0
+      shift
+      ;;
+    *)
+      echo "Invalid option: -$OPTARG" >&2
+      ;;
+  esac
+done
+
+if [[ ! $# == 1 ]]; then
+  echo "Usage: $0 [-f] [-r] path-to-binary"
+  echo "  -f forces replacement of an existing index."
+  echo "  -r removes the index section."
+  exit 1
+fi
+
+FILENAME="$1"
+if [[ ! -f "$FILENAME" ]]; then
+  echo "Path $FILENAME does not exist."
+  exit 1
+fi
+
+# Ensure we cleanup on on exit.
+trap on_exit EXIT
+
+# We're good to go! Create temp directory for index files.
+DIRECTORY=$(mktemp -d)
+echo "Made temp directory $DIRECTORY."
+
+# Create array with the filename and all shared libraries that
+# have the same dirname. The dirname is a signal that these
+# shared libraries were part of the same build as the binary.
+declare -a FILES_TO_INDEX=($FILENAME
+ $(ldd "$FILENAME" 2>/dev/null \
+  | grep $(dirname "$FILENAME") \
+  | sed "s/.*[ \t]\(.*\) (.*/\1/")
+)
+
+# Start concurrent indexing.
+trap index_next USR1
+
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+INDEX_TASKS=${INDEX_TASKS:-4}
+for ((i=0;i<${INDEX_TASKS};i++)); do
+  index_next
+done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (( $? > 128 )); do
+  wait
+done
diff --git a/build/get_landmines.py b/build/get_landmines.py
new file mode 100755
index 0000000..f15d8db
--- /dev/null
+++ b/build/get_landmines.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+import sys
+
+import landmine_utils
+
+
+builder = landmine_utils.builder
+distributor = landmine_utils.distributor
+gyp_defines = landmine_utils.gyp_defines
+gyp_msvs_version = landmine_utils.gyp_msvs_version
+platform = landmine_utils.platform
+
+
+def print_landmines():
+  """
+  ALL LANDMINES ARE EMITTED FROM HERE.
+  """
+  if (distributor() == 'goma' and platform() == 'win32' and
+      builder() == 'ninja'):
+    print 'Need to clobber winja goma due to backend cwd cache fix.'
+  if platform() == 'android':
+    print 'Clobber: To delete newly generated mojo class files.'
+  if platform() == 'win' and builder() == 'ninja':
+    print 'Compile on cc_unittests fails due to symbols removed in r185063.'
+  if platform() == 'linux' and builder() == 'ninja':
+    print 'Builders switching from make to ninja will clobber on this.'
+  if platform() == 'mac':
+    print 'Switching from bundle to unbundled dylib (issue 14743002).'
+  if platform() in ('win', 'mac'):
+    print ('Improper dependency for create_nmf.py broke in r240802, '
+           'fixed in r240860.')
+  if (platform() == 'win' and builder() == 'ninja' and
+      gyp_msvs_version() == '2012' and
+      gyp_defines().get('target_arch') == 'x64' and
+      gyp_defines().get('dcheck_always_on') == '1'):
+    print "Switched win x64 trybots from VS2010 to VS2012."
+  if (platform() == 'win' and builder() == 'ninja' and
+      gyp_msvs_version().startswith('2013')):
+    print "Switched win from VS2010 to VS2013."
+    print "Update to VS2013 Update 2."
+  print 'Need to clobber everything due to an IDL change in r154579 (blink)'
+  print 'Need to clobber everything due to gen file moves in r175513 (Blink)'
+  if (platform() != 'ios'):
+    print 'Clobber to get rid of obselete test plugin after r248358'
+    print 'Clobber to rebuild GN files for V8'
+  print 'Need to clobber everything due to build_nexe change in nacl r13424'
+  print '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...'
+  print 'blink_resources.grd changed: crbug.com/400860'
+  print 'ninja dependency cycle: crbug.com/408192'
+  if platform() == 'android':
+    print 'Clobber: To delete stale generated .java files.'
+    print 'Delete stale generated .java files again. crbug.com/349592'
+
+
+def main():
+  print_landmines()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/get_syzygy_binaries.py b/build/get_syzygy_binaries.py
new file mode 100755
index 0000000..79a186d
--- /dev/null
+++ b/build/get_syzygy_binaries.py
@@ -0,0 +1,446 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility script for downloading versioned Syzygy binaries."""
+
+import cStringIO
+import hashlib
+import errno
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import stat
+import sys
+import subprocess
+import urllib2
+import zipfile
+
+
+_LOGGER = logging.getLogger(os.path.basename(__file__))
+
+# The URL where official builds are archived.
+_SYZYGY_ARCHIVE_URL = ('http://syzygy-archive.commondatastorage.googleapis.com/'
+    'builds/official/%(revision)s')
+
+# A JSON file containing the state of the download directory. If this file and
+# directory state do not agree, then the binaries will be downloaded and
+# installed again.
+_STATE = '.state'
+
+# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash).
+# The archive exclusively uses lowercase GIT hashes.
+_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$')
+
+# This matches an MD5 hash.
+_MD5_RE = re.compile('^[a-f0-9]{32}$')
+
+# List of reources to be downloaded and installed. These are tuples with the
+# following format:
+# (basename, logging name, relative installation path, extraction filter)
+_RESOURCES = [
+  ('benchmark.zip', 'benchmark', '', None),
+  ('binaries.zip', 'binaries', 'exe', None),
+  ('symbols.zip', 'symbols', 'exe',
+      lambda x: x.filename.endswith('.dll.pdb')),
+  ('include.zip', 'include', 'include', None),
+  ('lib.zip', 'library', 'lib', None)]
+
+
+def _Shell(*cmd, **kw):
+  """Runs |cmd|, returns the results from Popen(cmd).communicate()."""
+  _LOGGER.debug('Executing %s.', cmd)
+  prog = subprocess.Popen(cmd, shell=True, **kw)
+
+  stdout, stderr = prog.communicate()
+  if prog.returncode != 0:
+    raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
+  return (stdout, stderr)
+
+
+def _LoadState(output_dir):
+  """Loads the contents of the state file for a given |output_dir|, returning
+  None if it doesn't exist.
+  """
+  path = os.path.join(output_dir, _STATE)
+  if not os.path.exists(path):
+    _LOGGER.debug('No state file found.')
+    return None
+  with open(path, 'rb') as f:
+    _LOGGER.debug('Reading state file: %s', path)
+    try:
+      return json.load(f)
+    except ValueError:
+      _LOGGER.debug('Invalid state file.')
+      return None
+
+
+def _SaveState(output_dir, state, dry_run=False):
+  """Saves the |state| dictionary to the given |output_dir| as a JSON file."""
+  path = os.path.join(output_dir, _STATE)
+  _LOGGER.debug('Writing state file: %s', path)
+  if dry_run:
+    return
+  with open(path, 'wb') as f:
+    f.write(json.dumps(state, sort_keys=True, indent=2))
+
+
+def _Md5(path):
+  """Returns the MD5 hash of the file at |path|, which must exist."""
+  return hashlib.md5(open(path, 'rb').read()).hexdigest()
+
+
+def _StateIsValid(state):
+  """Returns true if the given state structure is valid."""
+  if not isinstance(state, dict):
+    _LOGGER.debug('State must be a dict.')
+    return False
+  r = state.get('revision', None)
+  if not isinstance(r, basestring) or not _REVISION_RE.match(r):
+    _LOGGER.debug('State contains an invalid revision.')
+    return False
+  c = state.get('contents', None)
+  if not isinstance(c, dict):
+    _LOGGER.debug('State must contain a contents dict.')
+    return False
+  for (relpath, md5) in c.iteritems():
+    if not isinstance(relpath, basestring) or len(relpath) == 0:
+      _LOGGER.debug('State contents dict contains an invalid path.')
+      return False
+    if not isinstance(md5, basestring) or not _MD5_RE.match(md5):
+      _LOGGER.debug('State contents dict contains an invalid MD5 digest.')
+      return False
+  return True
+
+
+def _BuildActualState(stored, revision, output_dir):
+  """Builds the actual state using the provided |stored| state as a template.
+  Only examines files listed in the stored state, causing the script to ignore
+  files that have been added to the directories locally. |stored| must be a
+  valid state dictionary.
+  """
+  contents = {}
+  state = { 'revision': revision, 'contents': contents }
+  for relpath, md5 in stored['contents'].iteritems():
+    abspath = os.path.abspath(os.path.join(output_dir, relpath))
+    if os.path.isfile(abspath):
+      m = _Md5(abspath)
+      contents[relpath] = m
+
+  return state
+
+
+def _StatesAreConsistent(stored, actual):
+  """Validates whether two state dictionaries are consistent. Both must be valid
+  state dictionaries. Additional entries in |actual| are ignored.
+  """
+  if stored['revision'] != actual['revision']:
+    _LOGGER.debug('Mismatched revision number.')
+    return False
+  cont_stored = stored['contents']
+  cont_actual = actual['contents']
+  for relpath, md5 in cont_stored.iteritems():
+    if relpath not in cont_actual:
+      _LOGGER.debug('Missing content: %s', relpath)
+      return False
+    if md5 != cont_actual[relpath]:
+      _LOGGER.debug('Modified content: %s', relpath)
+      return False
+  return True
+
+
+def _GetCurrentState(revision, output_dir):
+  """Loads the current state and checks to see if it is consistent. Returns
+  a tuple (state, bool). The returned state will always be valid, even if an
+  invalid state is present on disk.
+  """
+  stored = _LoadState(output_dir)
+  if not _StateIsValid(stored):
+    _LOGGER.debug('State is invalid.')
+    # Return a valid but empty state.
+    return ({'revision': '0', 'contents': {}}, False)
+  actual = _BuildActualState(stored, revision, output_dir)
+  # If the script has been modified consider the state invalid.
+  path = os.path.join(output_dir, _STATE)
+  if os.path.getmtime(__file__) > os.path.getmtime(path):
+    return (stored, False)
+  # Otherwise, explicitly validate the state.
+  if not _StatesAreConsistent(stored, actual):
+    return (stored, False)
+  return (stored, True)
+
+
+def _DirIsEmpty(path):
+  """Returns true if the given directory is empty, false otherwise."""
+  for root, dirs, files in os.walk(path):
+    return not dirs and not files
+
+
+def _RmTreeHandleReadOnly(func, path, exc):
+  """An error handling function for use with shutil.rmtree. This will
+  detect failures to remove read-only files, and will change their properties
+  prior to removing them. This is necessary on Windows as os.remove will return
+  an access error for read-only files, and git repos contain read-only
+  pack/index files.
+  """
+  excvalue = exc[1]
+  if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
+    _LOGGER.debug('Removing read-only path: %s', path)
+    os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+    func(path)
+  else:
+    raise
+
+
+def _RmTree(path):
+  """A wrapper of shutil.rmtree that handles read-only files."""
+  shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly)
+
+
+def _CleanState(output_dir, state, dry_run=False):
+  """Cleans up files/directories in |output_dir| that are referenced by
+  the given |state|. Raises an error if there are local changes. Returns a
+  dictionary of files that were deleted.
+  """
+  _LOGGER.debug('Deleting files from previous installation.')
+  deleted = {}
+
+  # Generate a list of files to delete, relative to |output_dir|.
+  contents = state['contents']
+  files = sorted(contents.keys())
+
+  # Try to delete the files. Keep track of directories to delete as well.
+  dirs = {}
+  for relpath in files:
+    fullpath = os.path.join(output_dir, relpath)
+    fulldir = os.path.dirname(fullpath)
+    dirs[fulldir] = True
+    if os.path.exists(fullpath):
+      # If somehow the file has become a directory complain about it.
+      if os.path.isdir(fullpath):
+        raise Exception('Directory exists where file expected: %s' % fullpath)
+
+      # Double check that the file doesn't have local changes. If it does
+      # then refuse to delete it.
+      if relpath in contents:
+        stored_md5 = contents[relpath]
+        actual_md5 = _Md5(fullpath)
+        if actual_md5 != stored_md5:
+          raise Exception('File has local changes: %s' % fullpath)
+
+      # The file is unchanged so it can safely be deleted.
+      _LOGGER.debug('Deleting file "%s".', fullpath)
+      deleted[relpath] = True
+      if not dry_run:
+        os.unlink(fullpath)
+
+  # Sort directories from longest name to shortest. This lets us remove empty
+  # directories from the most nested paths first.
+  dirs = sorted(dirs.keys(), key=lambda x: len(x), reverse=True)
+  for p in dirs:
+    if os.path.exists(p) and _DirIsEmpty(p):
+      _LOGGER.debug('Deleting empty directory "%s".', p)
+      if not dry_run:
+        _RmTree(p)
+
+  return deleted
+
+
+def _Download(url):
+  """Downloads the given URL and returns the contents as a string."""
+  response = urllib2.urlopen(url)
+  if response.code != 200:
+    raise RuntimeError('Failed to download "%s".' % url)
+  return response.read()
+
+
+def _InstallBinaries(options, deleted={}):
+  """Installs Syzygy binaries. This assumes that the output directory has
+  already been cleaned, as it will refuse to overwrite existing files."""
+  contents = {}
+  state = { 'revision': options.revision, 'contents': contents }
+  archive_url = _SYZYGY_ARCHIVE_URL % { 'revision': options.revision }
+  for (base, name, subdir, filt) in _RESOURCES:
+    # Create the output directory if it doesn't exist.
+    fulldir = os.path.join(options.output_dir, subdir)
+    if os.path.isfile(fulldir):
+      raise Exception('File exists where a directory needs to be created: %s' %
+                      fulldir)
+    if not os.path.exists(fulldir):
+      _LOGGER.debug('Creating directory: %s', fulldir)
+      if not options.dry_run:
+        os.makedirs(fulldir)
+
+    # Download the archive.
+    url = archive_url + '/' + base
+    _LOGGER.debug('Retrieving %s archive at "%s".', name, url)
+    data = _Download(url)
+
+    _LOGGER.debug('Unzipping %s archive.', name)
+    archive = zipfile.ZipFile(cStringIO.StringIO(data))
+    for entry in archive.infolist():
+      if not filt or filt(entry):
+        fullpath = os.path.normpath(os.path.join(fulldir, entry.filename))
+        relpath = os.path.relpath(fullpath, options.output_dir)
+        if os.path.exists(fullpath):
+          # If in a dry-run take into account the fact that the file *would*
+          # have been deleted.
+          if options.dry_run and relpath in deleted:
+            pass
+          else:
+            raise Exception('Path already exists: %s' % fullpath)
+
+        # Extract the file and update the state dictionary.
+        _LOGGER.debug('Extracting "%s".', fullpath)
+        if not options.dry_run:
+          archive.extract(entry.filename, fulldir)
+          md5 = _Md5(fullpath)
+          contents[relpath] = md5
+          if sys.platform == 'cygwin':
+            os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR)
+
+  return state
+
+
+def _ParseCommandLine():
+  """Parses the command-line and returns an options structure."""
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--dry-run', action='store_true', default=False,
+      help='If true then will simply list actions that would be performed.')
+  option_parser.add_option('--force', action='store_true', default=False,
+      help='Force an installation even if the binaries are up to date.')
+  option_parser.add_option('--output-dir', type='string',
+      help='The path where the binaries will be replaced. Existing binaries '
+           'will only be overwritten if not up to date.')
+  option_parser.add_option('--overwrite', action='store_true', default=False,
+      help='If specified then the installation will happily delete and rewrite '
+           'the entire output directory, blasting any local changes.')
+  option_parser.add_option('--revision', type='string',
+      help='The SVN revision or GIT hash associated with the required version.')
+  option_parser.add_option('--revision-file', type='string',
+      help='A text file containing an SVN revision or GIT hash.')
+  option_parser.add_option('--verbose', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.DEBUG,
+      help='Enables verbose logging.')
+  option_parser.add_option('--quiet', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.ERROR,
+      help='Disables all output except for errors.')
+  options, args = option_parser.parse_args()
+  if args:
+    option_parser.error('Unexpected arguments: %s' % args)
+  if not options.output_dir:
+    option_parser.error('Must specify --output-dir.')
+  if not options.revision and not options.revision_file:
+    option_parser.error('Must specify one of --revision or --revision-file.')
+  if options.revision and options.revision_file:
+    option_parser.error('Must not specify both --revision and --revision-file.')
+
+  # Configure logging.
+  logging.basicConfig(level=options.log_level)
+
+  # If a revision file has been specified then read it.
+  if options.revision_file:
+    options.revision = open(options.revision_file, 'rb').read().strip()
+    _LOGGER.debug('Parsed revision "%s" from file "%s".',
+                 options.revision, options.revision_file)
+
+  # Ensure that the specified SVN revision or GIT hash is valid.
+  if not _REVISION_RE.match(options.revision):
+    option_parser.error('Must specify a valid SVN or GIT revision.')
+
+  # This just makes output prettier to read.
+  options.output_dir = os.path.normpath(options.output_dir)
+
+  return options
+
+
+def _RemoveOrphanedFiles(options):
+  """This is run on non-Windows systems to remove orphaned files that may have
+  been downloaded by a previous version of this script.
+  """
+  # Reconfigure logging to output info messages. This will allow inspection of
+  # cleanup status on non-Windows buildbots.
+  _LOGGER.setLevel(logging.INFO)
+
+  output_dir = os.path.abspath(options.output_dir)
+
+  # We only want to clean up the folder in 'src/third_party/syzygy', and we
+  # expect to be called with that as an output directory. This is an attempt to
+  # not start deleting random things if the script is run from an alternate
+  # location, or not called from the gclient hooks.
+  expected_syzygy_dir = os.path.abspath(os.path.join(
+      os.path.dirname(__file__), '..', 'third_party', 'syzygy'))
+  expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries')
+  if expected_output_dir != output_dir:
+    _LOGGER.info('Unexpected output directory, skipping cleanup.')
+    return
+
+  if not os.path.isdir(expected_syzygy_dir):
+    _LOGGER.info('Output directory does not exist, skipping cleanup.')
+    return
+
+  def OnError(function, path, excinfo):
+    """Logs error encountered by shutil.rmtree."""
+    _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo)
+
+  _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir)
+  if not options.dry_run:
+    shutil.rmtree(expected_syzygy_dir, True, OnError)
+
+
+def main():
+  options = _ParseCommandLine()
+
+  if options.dry_run:
+    _LOGGER.debug('Performing a dry-run.')
+
+  # We only care about Windows platforms, as the Syzygy binaries aren't used
+  # elsewhere. However, there was a short period of time where this script
+  # wasn't gated on OS types, and those OSes downloaded and installed binaries.
+  # This will cleanup orphaned files on those operating systems.
+  if sys.platform not in ('win32', 'cygwin'):
+    return _RemoveOrphanedFiles(options)
+
+  # Load the current installation state, and validate it against the
+  # requested installation.
+  state, is_consistent = _GetCurrentState(options.revision, options.output_dir)
+
+  # Decide whether or not an install is necessary.
+  if options.force:
+    _LOGGER.debug('Forcing reinstall of binaries.')
+  elif is_consistent:
+    # Avoid doing any work if the contents of the directory are consistent.
+    _LOGGER.debug('State unchanged, no reinstall necessary.')
+    return
+
+  # Under normal logging this is the only only message that will be reported.
+  _LOGGER.info('Installing revision %s Syzygy binaries.',
+               options.revision[0:12])
+
+  # Clean up the old state to begin with.
+  deleted = []
+  if options.overwrite:
+    if os.path.exists(options.output_dir):
+      # If overwrite was specified then take a heavy-handed approach.
+      _LOGGER.debug('Deleting entire installation directory.')
+      if not options.dry_run:
+        _RmTree(options.output_dir)
+  else:
+    # Otherwise only delete things that the previous installation put in place,
+    # and take care to preserve any local changes.
+    deleted = _CleanState(options.output_dir, state, options.dry_run)
+
+  # Install the new binaries. In a dry-run this will actually download the
+  # archives, but it won't write anything to disk.
+  state = _InstallBinaries(options, deleted)
+
+  # Build and save the state for the directory.
+  _SaveState(options.output_dir, state, options.dry_run)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/git-hooks/OWNERS b/build/git-hooks/OWNERS
new file mode 100644
index 0000000..3e327dc
--- /dev/null
+++ b/build/git-hooks/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+szager@chromium.org
+cmp@chromium.org
diff --git a/build/git-hooks/pre-commit b/build/git-hooks/pre-commit
new file mode 100755
index 0000000..41b5963
--- /dev/null
+++ b/build/git-hooks/pre-commit
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+submodule_diff() {
+  if test -n "$2"; then
+    git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  else
+    git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  fi
+}
+
+if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then
+  merge_base=$(git merge-base HEAD MERGE_HEAD)
+  if test -z "$(submodule_diff $merge_base HEAD)"; then
+    # Most up-to-date submodules are in MERGE_HEAD.
+    head_ref=MERGE_HEAD
+  else
+    # Most up-to-date submodules are in HEAD.
+    head_ref=HEAD
+  fi
+else
+  # No merge in progress. Submodules must match HEAD.
+  head_ref=HEAD
+fi
+
+submods=$(submodule_diff $head_ref)
+if test "$submods"; then
+  echo "You are trying to commit changes to the following submodules:" 1>&2
+  echo 1>&2
+  echo $submods | cut -d ' ' -f 6 | sed 's/^/  /g' 1>&2
+  cat <<EOF 1>&2
+
+Submodule commits are not allowed.  Please run:
+
+  git status --ignore-submodules=dirty
+
+and/or:
+
+  git diff-index --cached --ignore-submodules=dirty HEAD
+
+... to see what's in your index.
+
+If you're really and truly trying to roll the version of a submodule, you should
+commit the new version to DEPS, instead.
+EOF
+  exit 1
+fi
+
+gitmodules_diff() {
+  git diff-index --cached "$1" .gitmodules
+}
+
+if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then
+  cat <<EOF 1>&2
+You are trying to commit a change to .gitmodules.  That is not allowed.
+To make changes to submodule names/paths, edit DEPS.
+EOF
+  exit 1
+fi
+
+exit 0
diff --git a/build/gn_helpers.py b/build/gn_helpers.py
new file mode 100644
index 0000000..3b0647d
--- /dev/null
+++ b/build/gn_helpers.py
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that are run from GN's
+exec_script function."""
+
+class GNException(Exception):
+  pass
+
+
+def ToGNString(value, allow_dicts = True):
+  """Prints the given value to stdout.
+
+  allow_dicts indicates if this function will allow converting dictionaries
+  to GN scopes. This is only possible at the top level, you can't nest a
+  GN scope in a list, so this should be set to False for recursive calls."""
+  if isinstance(value, str):
+    if value.find('\n') >= 0:
+      raise GNException("Trying to print a string with a newline in it.")
+    return '"' + value.replace('"', '\\"') + '"'
+
+  if isinstance(value, list):
+    return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
+
+  if isinstance(value, dict):
+    if not allow_dicts:
+      raise GNException("Attempting to recursively print a dictionary.")
+    result = ""
+    for key in value:
+      if not isinstance(key, str):
+        raise GNException("Dictionary key is not a string.")
+      result += "%s = %s\n" % (key, ToGNString(value[key], False))
+    return result
+
+  if isinstance(value, int):
+    return str(value)
+
+  raise GNException("Unsupported type when printing to GN.")
diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py
new file mode 100644
index 0000000..7d83f61
--- /dev/null
+++ b/build/gn_run_binary.py
@@ -0,0 +1,22 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
+
+Run with:
+  python gn_run_binary.py <binary_name> [args ...]
+"""
+
+import sys
+import subprocess
+
+# This script is designed to run binaries produced by the current build. We
+# always prefix it with "./" to avoid picking up system versions that might
+# also be on the path.
+path = './' + sys.argv[1]
+
+# The rest of the arguements are passed directly to the executable.
+args = [path] + sys.argv[2:]
+
+sys.exit(subprocess.call(args))
diff --git a/build/go/go.py b/build/go/go.py
new file mode 100755
index 0000000..2e4f99b
--- /dev/null
+++ b/build/go/go.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script invokes the go build tool.
+Must be called as follows:
+python go.py <go-binary> <build directory> <output file> <src directory>
+<CGO_CFLAGS> <CGO_LDFLAGS> <go-binary options>
+eg.
+python go.py /usr/lib/google-golang/bin/go out/build out/a.out .. "-I."
+"-L. -ltest" test -c test/test.go
+"""
+
+import argparse
+import os
+import shutil
+import sys
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('go_binary')
+  parser.add_argument('build_directory')
+  parser.add_argument('output_file')
+  parser.add_argument('src_root')
+  parser.add_argument('cgo_cflags')
+  parser.add_argument('cgo_ldflags')
+  parser.add_argument('go_option', nargs='*')
+  args = parser.parse_args()
+  go_binary = args.go_binary
+  build_dir = args.build_directory
+  out_file = os.path.abspath(args.output_file)
+  # The src directory specified is relative. We need this as an absolute path.
+  src_root = os.path.abspath(args.src_root)
+  # GOPATH must be absolute, and point to one directory up from |src_Root|
+  go_path = os.path.abspath(os.path.join(src_root, ".."))
+  go_options = args.go_option
+  try:
+    shutil.rmtree(build_dir, True)
+    os.mkdir(build_dir)
+  except:
+    pass
+  old_directory = os.getcwd()
+  os.chdir(build_dir)
+  os.environ["GOPATH"] = go_path
+  os.environ["CGO_CFLAGS"] = args.cgo_cflags
+  os.environ["CGO_LDFLAGS"] = args.cgo_ldflags
+  os.system("%s %s" % (go_binary, " ".join(go_options)))
+  out_files = [ f for f in os.listdir(".") if os.path.isfile(f)]
+  if (len(out_files) > 0):
+    shutil.move(out_files[0], out_file)
+  os.chdir(old_directory)
+  try:
+    shutil.rmtree(build_dir, True)
+  except:
+    pass
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/go/rules.gni b/build/go/rules.gni
new file mode 100644
index 0000000..ab703d6
--- /dev/null
+++ b/build/go/rules.gni
@@ -0,0 +1,62 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # By default, there is no go build tool, because go builds are not supported.
+  go_build_tool = ""
+}
+
+# Declare a go test binary target.
+#
+# The target generates a go test executable, linking against other C code,
+# which is compiled into a static library and linked against Go.
+#
+# Only works on linux. |go_build_tool| must be set to the absolute path
+# of the go build tool.
+#
+# Variables (all required)
+#   sources: list of .go files to compile
+#   static_library_sources: list of C sources needed for the static library
+#   deps: dependencies for the static library
+
+template("go_test_binary") {
+  # Only available on linux for now.
+  assert(is_linux)
+  assert(defined(invoker.sources))
+  assert(go_build_tool != "")
+
+  static_library_name = target_name + "_static_library"
+
+  static_library(static_library_name) {
+    sources = invoker.static_library_sources
+    deps = invoker.deps
+    complete_static_lib = true
+  }
+
+  action(target_name) {
+    deps = [
+      ":$static_library_name",
+    ]
+    script = "//build/go/go.py"
+    outputs = [ "${target_out_dir}/${target_name}" ]
+    # Since go test does not permit specifying an output directory or output
+    # binary name, we create a temporary build directory, and the python
+    # script will later identify the output, copy it to the target location,
+    # and clean up the temporary build directory.
+    build_dir = "${target_out_dir}/${target_name}_build"
+    args = [
+      "--", 
+      "${go_build_tool}",
+      rebase_path(build_dir, root_build_dir),
+      rebase_path(target_out_dir, root_build_dir) + "/${target_name}",
+      rebase_path("//", root_build_dir),
+      "-I" + rebase_path("//"),
+      " -L" + rebase_path(target_out_dir) +
+      " -L" + rebase_path(root_build_dir + "/obj/third_party/libevent") +
+      " -l" + static_library_name +
+      " -lstdc++ -lpthread -lm -lglib-2.0 -levent",
+      "test", "-c",
+    ] + rebase_path(invoker.sources, build_dir)
+  }
+}
diff --git a/build/grit_action.gypi b/build/grit_action.gypi
new file mode 100644
index 0000000..ab7a70b
--- /dev/null
+++ b/build/grit_action.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   grit_grd_file: string: grd file path
+#   grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don't currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+  'variables': {
+    'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+    'grit_resource_ids%': '<(DEPTH)/tools/gritsettings/resource_ids',
+    # This makes it possible to add more defines in specific targets,
+    # instead of build/common.gypi .
+    'grit_additional_defines%': [],
+    'grit_rc_header_format%': [],
+  },
+  'inputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+        '--inputs <(grit_grd_file) -f "<(grit_resource_ids)")',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+        '--outputs \'<(grit_out_dir)\' '
+        '<(grit_grd_file) -f "<(grit_resource_ids)")',
+  ],
+  'action': ['<@(grit_cmd)',
+             '-i', '<(grit_grd_file)', 'build',
+             '-f', '<(grit_resource_ids)',
+             '-o', '<(grit_out_dir)',
+             '<@(grit_defines)',
+             '<@(grit_additional_defines)',
+             '<@(grit_rc_header_format)'],
+  'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/build/grit_target.gypi b/build/grit_target.gypi
new file mode 100644
index 0000000..179f986
--- /dev/null
+++ b/build/grit_target.gypi
@@ -0,0 +1,31 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target that will have one or more
+# uses of grit_action.gypi. To use this the following variables need to be
+# defined:
+#   grit_out_dir: string: the output directory path
+
+# DO NOT USE THIS FILE. Instead, use qualified includes.
+# TODO: Convert everything to qualified includes, and delete this file,
+# http://crbug.com/401588
+{
+  'conditions': [
+    # If the target is a direct binary, it needs to be able to find the header,
+    # otherwise it probably a supporting target just for grit so the include
+    # dir needs to be set on anything that depends on this action.
+    ['_type=="executable" or _type=="shared_library" or \
+      _type=="loadable_module" or _type=="static_library"', {
+      'include_dirs': [
+        '<(grit_out_dir)',
+      ],
+    }, {
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(grit_out_dir)',
+        ],
+      },
+    }],
+  ],
+}
diff --git a/build/gyp_chromium b/build/gyp_chromium
new file mode 100755
index 0000000..326919c
--- /dev/null
+++ b/build/gyp_chromium
@@ -0,0 +1,322 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+import glob
+import gyp_environment
+import os
+import re
+import shlex
+import subprocess
+import string
+import sys
+import vs_toolchain
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Assume this file is in a one-level-deep subdirectory of the source root.
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'build', 'android', 'gyp'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'chromecast', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src',
+    'build_tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit',
+    'Source', 'build', 'scripts'))
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds.  Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+  try:
+    sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+    import psyco
+  except:
+    psyco = None
+else:
+  psyco = None
+
+
+def GetSupplementalFiles():
+  """Returns a list of the supplemental files that are included in all GYP
+  sources."""
+  return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+
+
+def ProcessGypDefinesItems(items):
+  """Converts a list of strings to a list of key-value pairs."""
+  result = []
+  for item in items:
+    tokens = item.split('=', 1)
+    # Some GYP variables have hyphens, which we don't support.
+    if len(tokens) == 2:
+      result += [(tokens[0], tokens[1])]
+    else:
+      # No value supplied, treat it as a boolean and set it. Note that we
+      # use the string '1' here so we have a consistent definition whether
+      # you do 'foo=1' or 'foo'.
+      result += [(tokens[0], '1')]
+  return result
+
+
+def GetGypVars(supplemental_files):
+  """Returns a dictionary of all GYP vars."""
+  # Find the .gyp directory in the user's home directory.
+  home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
+  if home_dot_gyp:
+    home_dot_gyp = os.path.expanduser(home_dot_gyp)
+  if not home_dot_gyp:
+    home_vars = ['HOME']
+    if sys.platform in ('cygwin', 'win32'):
+      home_vars.append('USERPROFILE')
+    for home_var in home_vars:
+      home = os.getenv(home_var)
+      if home != None:
+        home_dot_gyp = os.path.join(home, '.gyp')
+        if not os.path.exists(home_dot_gyp):
+          home_dot_gyp = None
+        else:
+          break
+
+  if home_dot_gyp:
+    include_gypi = os.path.join(home_dot_gyp, "include.gypi")
+    if os.path.exists(include_gypi):
+      supplemental_files += [include_gypi]
+
+  # GYP defines from the supplemental.gypi files.
+  supp_items = []
+  for supplement in supplemental_files:
+    with open(supplement, 'r') as f:
+      try:
+        file_data = eval(f.read(), {'__builtins__': None}, None)
+      except SyntaxError, e:
+        e.filename = os.path.abspath(supplement)
+        raise
+      variables = file_data.get('variables', [])
+      for v in variables:
+        supp_items += [(v, str(variables[v]))]
+
+  # GYP defines from the environment.
+  env_items = ProcessGypDefinesItems(
+      shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+  # GYP defines from the command line. We can't use optparse since we want
+  # to ignore all arguments other than "-D".
+  cmdline_input_items = []
+  for i in range(len(sys.argv))[1:]:
+    if sys.argv[i].startswith('-D'):
+      if sys.argv[i] == '-D' and i + 1 < len(sys.argv):
+        cmdline_input_items += [sys.argv[i + 1]]
+      elif len(sys.argv[i]) > 2:
+        cmdline_input_items += [sys.argv[i][2:]]
+  cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
+
+  vars_dict = dict(supp_items + env_items + cmdline_items)
+  return vars_dict
+
+
+def GetOutputDirectory():
+  """Returns the output directory that GYP will use."""
+  # GYP generator flags from the command line. We can't use optparse since we
+  # want to ignore all arguments other than "-G".
+  needle = '-Goutput_dir='
+  cmdline_input_items = []
+  for item in sys.argv[1:]:
+    if item.startswith(needle):
+      return item[len(needle):]
+
+  env_items = shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
+  needle = 'output_dir='
+  for item in env_items:
+    if item.startswith(needle):
+      return item[len(needle):]
+
+  return "out"
+
+
+def additional_include_files(supplemental_files, args=[]):
+  """
+  Returns a list of additional (.gypi) files to include, without duplicating
+  ones that are already specified on the command line. The list of supplemental
+  include files is passed in as an argument.
+  """
+  # Determine the include files specified on the command line.
+  # This doesn't cover all the different option formats you can use,
+  # but it's mainly intended to avoid duplicating flags on the automatic
+  # makefile regeneration which only uses this format.
+  specified_includes = set()
+  for arg in args:
+    if arg.startswith('-I') and len(arg) > 2:
+      specified_includes.add(os.path.realpath(arg[2:]))
+
+  result = []
+  def AddInclude(path):
+    if os.path.realpath(path) not in specified_includes:
+      result.append(path)
+
+  # Always include common.gypi.
+  AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+  # Optionally add supplemental .gypi files if present.
+  for supplement in supplemental_files:
+    AddInclude(supplement)
+
+  return result
+
+
+if __name__ == '__main__':
+  # Disabling garbage collection saves about 1 second out of 16 on a Linux
+  # z620 workstation. Since this is a short-lived process it's not a problem to
+  # leak a few cyclyc references in order to spare the CPU cycles for
+  # scanning the heap.
+  import gc
+  gc.disable()
+
+  args = sys.argv[1:]
+
+  use_analyzer = len(args) and args[0] == '--analyzer'
+  if use_analyzer:
+    args.pop(0)
+    os.environ['GYP_GENERATORS'] = 'analyzer'
+    args.append('-Gconfig_path=' + args.pop(0))
+    args.append('-Ganalyzer_output_path=' + args.pop(0))
+
+  if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+    print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
+    sys.exit(0)
+
+  # Use the Psyco JIT if available.
+  if psyco:
+    psyco.profile()
+    print "Enabled Psyco JIT."
+
+  # Fall back on hermetic python if we happen to get run under cygwin.
+  # TODO(bradnelson): take this out once this issue is fixed:
+  #    http://code.google.com/p/gyp/issues/detail?id=177
+  if sys.platform == 'cygwin':
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    python_dir = sorted(glob.glob(os.path.join(depot_tools_path,
+                                               'python2*_bin')))[-1]
+    env = os.environ.copy()
+    env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+    p = subprocess.Popen(
+       [os.path.join(python_dir, 'python.exe')] + sys.argv,
+       env=env, shell=False)
+    p.communicate()
+    sys.exit(p.returncode)
+
+  # This could give false positives since it doesn't actually do real option
+  # parsing.  Oh well.
+  gyp_file_specified = False
+  for arg in args:
+    if arg.endswith('.gyp'):
+      gyp_file_specified = True
+      break
+
+  gyp_environment.SetEnvironment()
+
+  # If we didn't get a file, check an env var, and then fall back to
+  # assuming 'all.gyp' from the same directory as the script.
+  if not gyp_file_specified:
+    gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+    if gyp_file:
+      # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+      # path separators even on Windows due to the use of shlex.split().
+      args.extend(shlex.split(gyp_file))
+    else:
+      args.append(os.path.join(script_dir, 'all.gyp'))
+
+  # There shouldn't be a circular dependency relationship between .gyp files,
+  # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+  # currently exist.  The check for circular dependencies is currently
+  # bypassed on other platforms, but is left enabled on the Mac, where a
+  # violation of the rule causes Xcode to misbehave badly.
+  # TODO(mark): Find and kill remaining circular dependencies, and remove this
+  # option.  http://crbug.com/35878.
+  # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+  # list.
+  if sys.platform not in ('darwin',):
+    args.append('--no-circular-check')
+
+  # We explicitly don't support the make gyp generator (crbug.com/348686). Be
+  # nice and fail here, rather than choking in gyp.
+  if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
+    print 'Error: make gyp generator not supported (check GYP_GENERATORS).'
+    sys.exit(1)
+
+  # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+  # to enfore syntax checking.
+  syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+  if syntax_check and int(syntax_check):
+    args.append('--check')
+
+  supplemental_includes = GetSupplementalFiles()
+  gyp_vars_dict = GetGypVars(supplemental_includes)
+
+  # TODO(dmikurube): Remove these checks and messages after a while.
+  if ('linux_use_tcmalloc' in gyp_vars_dict or
+      'android_use_tcmalloc' in gyp_vars_dict):
+    print '*****************************************************************'
+    print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!'
+    print '-----------------------------------------------------------------'
+    print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in'
+    print 'your GYP_DEFINES. Please switch them into "use_allocator" now.'
+    print 'See http://crbug.com/345554 for the details.'
+    print '*****************************************************************'
+
+  # Automatically turn on crosscompile support for platforms that need it.
+  # (The Chrome OS build sets CC_host / CC_target which implicitly enables
+  # this mode.)
+  if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
+          gyp_vars_dict.get('OS') in ['android', 'ios'],
+          'GYP_CROSSCOMPILE' not in os.environ)):
+    os.environ['GYP_CROSSCOMPILE'] = '1'
+  if gyp_vars_dict.get('OS') == 'android':
+    args.append('--check')
+
+  args.extend(
+      ['-I' + i for i in additional_include_files(supplemental_includes, args)])
+
+  args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
+
+  if not use_analyzer:
+    print 'Updating projects from gyp files...'
+    sys.stdout.flush()
+
+  # Off we go...
+  gyp_rc = gyp.main(args)
+
+  if not use_analyzer:
+    vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+    if vs2013_runtime_dll_dirs:
+      x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+      vs_toolchain.CopyVsRuntimeDlls(
+        os.path.join(chrome_src, GetOutputDirectory()),
+        (x86_runtime, x64_runtime))
+
+  sys.exit(gyp_rc)
diff --git a/build/gyp_chromium.py b/build/gyp_chromium.py
new file mode 100644
index 0000000..f9e8ac8
--- /dev/null
+++ b/build/gyp_chromium.py
@@ -0,0 +1,18 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is (possibly, depending on python version) imported by
+# gyp_chromium when GYP_PARALLEL=1 and it creates sub-processes
+# through the multiprocessing library.
+
+# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
+# imports that don't end in .py (and aren't directories with an
+# __init__.py). This wrapper makes "import gyp_chromium" work with
+# those old versions and makes it possible to execute gyp_chromium.py
+# directly on Windows where the extension is useful.
+
+import os
+
+path = os.path.abspath(os.path.split(__file__)[0])
+execfile(os.path.join(path, 'gyp_chromium'))
diff --git a/build/gyp_environment.py b/build/gyp_environment.py
new file mode 100644
index 0000000..fb50645
--- /dev/null
+++ b/build/gyp_environment.py
@@ -0,0 +1,33 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Sets up various automatic gyp environment variables. These are used by
+gyp_chromium and landmines.py which run at different stages of runhooks. To
+make sure settings are consistent between them, all setup should happen here.
+"""
+
+import gyp_helper
+import os
+import sys
+import vs_toolchain
+
+def SetEnvironment():
+  """Sets defaults for GYP_* variables."""
+  gyp_helper.apply_chromium_gyp_env()
+
+  # Default to ninja on linux and windows, but only if no generator has
+  # explicitly been set.
+  # Also default to ninja on mac, but only when not building chrome/ios.
+  # . -f / --format has precedence over the env var, no need to check for it
+  # . set the env var only if it hasn't been set yet
+  # . chromium.gyp_env has been applied to os.environ at this point already
+  if sys.platform.startswith(('linux', 'win', 'freebsd')) and \
+      not os.environ.get('GYP_GENERATORS'):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+  elif sys.platform == 'darwin' and not os.environ.get('GYP_GENERATORS') and \
+      not 'OS=ios' in os.environ.get('GYP_DEFINES', []):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+
+  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
diff --git a/build/gyp_helper.py b/build/gyp_helper.py
new file mode 100644
index 0000000..eadc7a5
--- /dev/null
+++ b/build/gyp_helper.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file helps gyp_chromium and landmines correctly set up the gyp
+# environment from chromium.gyp_env on disk
+
+import os
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.dirname(SCRIPT_DIR)
+
+
+def apply_gyp_environment_from_file(file_path):
+  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
+  if not os.path.exists(file_path):
+    return
+  with open(file_path, 'rU') as f:
+    file_contents = f.read()
+  try:
+    file_data = eval(file_contents, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = os.path.abspath(file_path)
+    raise
+  supported_vars = (
+      'CC',
+      'CC_wrapper',
+      'CHROMIUM_GYP_FILE',
+      'CHROMIUM_GYP_SYNTAX_CHECK',
+      'CXX',
+      'CXX_wrapper',
+      'GYP_DEFINES',
+      'GYP_GENERATOR_FLAGS',
+      'GYP_CROSSCOMPILE',
+      'GYP_GENERATOR_OUTPUT',
+      'GYP_GENERATORS',
+      'GYP_MSVS_VERSION',
+  )
+  for var in supported_vars:
+    file_val = file_data.get(var)
+    if file_val:
+      if var in os.environ:
+        print 'INFO: Environment value for "%s" overrides value in %s.' % (
+            var, os.path.abspath(file_path)
+        )
+      else:
+        os.environ[var] = file_val
+
+
+def apply_chromium_gyp_env():
+  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+    # Update the environment based on chromium.gyp_env
+    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
+    apply_gyp_environment_from_file(path)
diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py
new file mode 100644
index 0000000..a107f94
--- /dev/null
+++ b/build/gypi_to_gn.py
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Converts a given gypi file to a python scope and writes the result to stdout.
+
+It is assumed that the file contains a toplevel dictionary, and this script
+will return that dictionary as a GN "scope" (see example below). This script
+does not know anything about GYP and it will not expand variables or execute
+conditions.
+
+It will strip conditions blocks.
+
+A variables block at the top level will be flattened so that the variables
+appear in the root dictionary. This way they can be returned to the GN code.
+
+Say your_file.gypi looked like this:
+  {
+     'sources': [ 'a.cc', 'b.cc' ],
+     'defines': [ 'ENABLE_DOOM_MELON' ],
+  }
+
+You would call it like this:
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi") ],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+Notes:
+ - The rebase_path call converts the gypi file from being relative to the
+   current build file to being system absolute for calling the script, which
+   will have a different current directory than this file.
+
+ - The "scope" parameter tells GN to interpret the result as a series of GN
+   variable assignments.
+
+ - The last file argument to exec_script tells GN that the given file is a
+   dependency of the build so Ninja can automatically re-run GN if the file
+   changes.
+
+Read the values into a target like this:
+  component("mycomponent") {
+    sources = gypi_values.sources
+    defines = gypi_values.defines
+  }
+
+Sometimes your .gypi file will include paths relative to a different
+directory than the current .gn file. In this case, you can rebase them to
+be relative to the current directory.
+  sources = rebase_path(gypi_values.sources, ".",
+                        "//path/gypi/input/values/are/relative/to")
+
+This script will tolerate a 'variables' in the toplevel dictionary or not. If
+the toplevel dictionary just contains one item called 'variables', it will be
+collapsed away and the result will be the contents of that dictinoary. Some
+.gypi files are written with or without this, depending on how they expect to
+be embedded into a .gyp file.
+
+This script also has the ability to replace certain substrings in the input.
+Generally this is used to emulate GYP variable expansion. If you passed the
+argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in
+the input will be replaced with "bar":
+
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi"),
+                              "--replace=<(foo)=bar"],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+"""
+
+import gn_helpers
+from optparse import OptionParser
+import sys
+
+def LoadPythonDictionary(path):
+  file_string = open(path).read()
+  try:
+    file_data = eval(file_string, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = path
+    raise
+  except Exception, e:
+    raise Exception("Unexpected error while reading %s: %s" % (path, str(e)))
+
+  assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
+
+  # Flatten any variables to the top level.
+  if 'variables' in file_data:
+    file_data.update(file_data['variables'])
+    del file_data['variables']
+
+  # Strip any conditions.
+  if 'conditions' in file_data:
+    del file_data['conditions']
+  if 'target_conditions' in file_data:
+    del file_data['target_conditions']
+
+  # Strip targets in the toplevel, since some files define these and we can't
+  # slurp them in.
+  if 'targets' in file_data:
+    del file_data['targets']
+
+  return file_data
+
+
+def ReplaceSubstrings(values, search_for, replace_with):
+  """Recursively replaces substrings in a value.
+
+  Replaces all substrings of the "search_for" with "repace_with" for all
+  strings occurring in "values". This is done by recursively iterating into
+  lists as well as the keys and values of dictionaries."""
+  if isinstance(values, str):
+    return values.replace(search_for, replace_with)
+
+  if isinstance(values, list):
+    return [ReplaceSubstrings(v, search_for, replace_with) for v in values]
+
+  if isinstance(values, dict):
+    # For dictionaries, do the search for both the key and values.
+    result = {}
+    for key, value in values.items():
+      new_key = ReplaceSubstrings(key, search_for, replace_with)
+      new_value = ReplaceSubstrings(value, search_for, replace_with)
+      result[new_key] = new_value
+    return result
+
+  # Assume everything else is unchanged.
+  return values
+
+def main():
+  parser = OptionParser()
+  parser.add_option("-r", "--replace", action="append",
+    help="Replaces substrings. If passed a=b, replaces all substrs a with b.")
+  (options, args) = parser.parse_args()
+
+  if len(args) != 1:
+    raise Exception("Need one argument which is the .gypi file to read.")
+
+  data = LoadPythonDictionary(args[0])
+  if options.replace:
+    # Do replacements for all specified patterns.
+    for replace in options.replace:
+      split = replace.split('=')
+      # Allow "foo=" to replace with nothing.
+      if len(split) == 1:
+        split.append('')
+      assert len(split) == 2, "Replacement must be of the form 'key=value'."
+      data = ReplaceSubstrings(data, split[0], split[1])
+
+  # Sometimes .gypi files use the GYP syntax with percents at the end of the
+  # variable name (to indicate not to overwrite a previously-defined value):
+  #   'foo%': 'bar',
+  # Convert these to regular variables.
+  for key in data:
+    if len(key) > 1 and key[len(key) - 1] == '%':
+      data[key[:-1]] = data[key]
+      del data[key]
+
+  print gn_helpers.ToGNString(data)
+
+if __name__ == '__main__':
+  try:
+    main()
+  except Exception, e:
+    print str(e)
+    sys.exit(1)
diff --git a/build/host_jar.gypi b/build/host_jar.gypi
new file mode 100644
index 0000000..6ccc1bd
--- /dev/null
+++ b/build/host_jar.gypi
@@ -0,0 +1,102 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule to build
+# a JAR file for use on a host in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_jar',
+#   'type': 'none',
+#   'variables': {
+#     'src_paths': [
+#       'path/to/directory',
+#       'path/to/other/directory',
+#       'path/to/individual_file.java',
+#       ...
+#     ],
+#   },
+#   'includes': [ 'path/to/this/gypi/file' ],
+# }
+#
+# Required variables:
+#   src_paths - A list of all paths containing java files that should be
+#     included in the jar. Paths can be either directories or files.
+# Optional/automatic variables:
+#   excluded_src_paths - A list of all paths that should be excluded from
+#     the jar.
+#   generated_src_dirs - Directories containing additional .java files
+#     generated at build time.
+#   input_jars_paths - A list of paths to the jars that should be included
+#     in the classpath.
+#   main_class - The class containing the main() function that should be called
+#     when running the jar file.
+#   jar_excluded_classes - A list of .class files that should be excluded
+#     from the jar.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs',
+  ],
+  'variables': {
+    'classes_dir': '<(intermediate_dir)/classes',
+    'excluded_src_paths': [],
+    'generated_src_dirs': [],
+    'input_jars_paths': [],
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'jar_dir': '<(PRODUCT_DIR)/lib.java',
+    'jar_excluded_classes': [],
+    'jar_name': '<(_target_name).jar',
+    'jar_path': '<(jar_dir)/<(jar_name)',
+    'main_class%': '',
+    'stamp': '<(intermediate_dir)/jar.stamp',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(jar_path)']
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling <(_target_name) java sources',
+      'variables': {
+        'extra_options': [],
+        'java_sources': [ '<!@(find <@(src_paths) -name "*.java")' ],
+        'conditions': [
+          ['"<(excluded_src_paths)" != ""', {
+            'java_sources!': ['<!@(find <@(excluded_src_paths) -name "*.java")']
+          }],
+          ['"<(jar_excluded_classes)" != ""', {
+            'extra_options': ['--excluded-classes=<(jar_excluded_classes)']
+          }],
+          ['">(main_class)" != ""', {
+            'extra_options': ['--main-class=>(main_class)']
+          }]
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '^@(java_sources)',
+        '>@(input_jars_paths)',
+      ],
+      'outputs': [
+        '<(jar_path)',
+        '<(stamp)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--classpath=>(input_jars_paths)',
+        '--src-gendirs=>(generated_src_dirs)',
+        '--chromium-code=<(chromium_code)',
+        '--stamp=<(stamp)',
+        '--jar-path=<(jar_path)',
+        '<@(extra_options)',
+        '^@(java_sources)',
+      ],
+    },
+  ]
+}
+
diff --git a/build/host_prebuilt_jar.gypi b/build/host_prebuilt_jar.gypi
new file mode 100644
index 0000000..feed5ca
--- /dev/null
+++ b/build/host_prebuilt_jar.gypi
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule to
+# copy a prebuilt JAR for use on a host to the output directory.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_prebuilt_jar',
+#   'type': 'none',
+#   'variables': {
+#     'jar_path': 'path/to/prebuilt.jar',
+#   },
+#   'includes': [ 'path/to/this/gypi/file' ],
+# }
+#
+# Required variables:
+#   jar_path - The path to the prebuilt jar.
+
+{
+  'dependencies': [
+  ],
+  'variables': {
+    'dest_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).jar',
+    'src_path': '<(jar_path)',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': [
+        '<(dest_path)',
+      ]
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'copy_prebuilt_jar',
+      'message': 'Copy <(src_path) to <(dest_path)',
+      'inputs': [
+        '<(src_path)',
+      ],
+      'outputs': [
+        '<(dest_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/cp.py', '<(src_path)', '<(dest_path)',
+      ],
+    }
+  ]
+}
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
new file mode 100755
index 0000000..e740910
--- /dev/null
+++ b/build/install-build-deps-android.sh
@@ -0,0 +1,87 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android that
+# requires sudo privileges.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
+# a license agreement, so upon installation it will prompt the user. To get
+# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+# Install first the default Linux build deps.
+"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
+    --no-syms --no-arm --no-chromeos-fonts --no-nacl --no-prompt "$@"
+
+# The temporary directory used to store output of update-java-alternatives
+TEMPDIR=$(mktemp -d)
+cleanup() {
+  local status=${?}
+  trap - EXIT
+  rm -rf "${TEMPDIR}"
+  exit ${status}
+}
+trap cleanup EXIT
+
+sudo apt-get update
+
+# Fix deps
+sudo apt-get -f install
+
+# Install deps
+# This step differs depending on what Ubuntu release we are running
+# on since the package names are different, and Sun's Java must
+# be installed manually on late-model versions.
+
+# common
+sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils
+
+# Few binaries in the Android SDK require 32-bit libraries on the host.
+sudo apt-get -y install lib32z1 g++-multilib
+
+# On Trusty-based systems you can't compile V8's mksnapshot without this one.
+# It is compiled for the host, using the -m32 flag, so it needs some 32 bit
+# development support. It seems harmless on older Linux releases.
+sudo apt-get -y install linux-libc-dev:i386
+
+sudo apt-get -y install ant
+
+# Install openjdk and openjre 7 stuff
+sudo apt-get -y install openjdk-7-jre openjdk-7-jdk
+
+# Switch version of Java to openjdk 7.
+# Some Java plugins (e.g. for firefox, mozilla) are not required to build, and
+# thus are treated only as warnings. Any errors in updating java alternatives
+# which are not '*-javaplugin.so' will cause errors and stop the script from
+# completing successfully.
+if ! sudo update-java-alternatives -s java-1.7.0-openjdk-amd64 \
+           >& "${TEMPDIR}"/update-java-alternatives.out
+then
+  # Check that there are the expected javaplugin.so errors for the update
+  if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
+      /dev/null
+  then
+    # Print as warnings all the javaplugin.so errors
+    echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
+    grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+  fi
+  # Check if there are any errors that are not javaplugin.so
+  if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
+      >& /dev/null
+  then
+    # If there are non-javaplugin.so errors, treat as errors and exit
+    echo 'ERRORS: Failed to update alternatives for java-6-sun:'
+    grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+    exit 1
+  fi
+fi
+
+echo "install-build-deps-android.sh complete."
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
new file mode 100755
index 0000000..a0f11db
--- /dev/null
+++ b/build/install-build-deps.sh
@@ -0,0 +1,410 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
+# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+
+usage() {
+  echo "Usage: $0 [--options]"
+  echo "Options:"
+  echo "--[no-]syms: enable or disable installation of debugging symbols"
+  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+  echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
+       "fonts"
+  echo "--[no-]nacl: enable or disable installation of prerequisites for"\
+       "building standalone NaCl and all its toolchains"
+  echo "--no-prompt: silently select standard options/defaults"
+  echo "--quick-check: quickly try to determine if dependencies are installed"
+  echo "               (this avoids interactive prompts and sudo commands,"
+  echo "               so might not be 100% accurate)"
+  echo "--unsupported: attempt installation even on unsupported systems"
+  echo "Script will prompt interactively if options not given."
+  exit 1
+}
+
+# Checks whether a particular package is available in the repos.
+# USAGE: $ package_exists <package name>
+package_exists() {
+  apt-cache pkgnames | grep -x "$1" > /dev/null 2>&1
+}
+
+# These default to on because (some) bots need them and it keeps things
+# simple for the bot setup if all bots just run the script in its default
+# mode.  Developers who don't want stuff they don't need installed on their
+# own workstations can pass --no-arm --no-nacl when running the script.
+do_inst_arm=1
+do_inst_nacl=1
+
+while test "$1" != ""
+do
+  case "$1" in
+  --syms)                   do_inst_syms=1;;
+  --no-syms)                do_inst_syms=0;;
+  # TODO(phajdan.jr): Remove the lib32 flags when nothing else refers to them.
+  --lib32)                  do_inst_lib32=1;;
+  --no-lib32)               do_inst_lib32=0;;
+  --arm)                    do_inst_arm=1;;
+  --no-arm)                 do_inst_arm=0;;
+  --chromeos-fonts)         do_inst_chromeos_fonts=1;;
+  --no-chromeos-fonts)      do_inst_chromeos_fonts=0;;
+  --nacl)                   do_inst_nacl=1;;
+  --no-nacl)                do_inst_nacl=0;;
+  --no-prompt)              do_default=1
+                            do_quietly="-qq --assume-yes"
+    ;;
+  --quick-check)            do_quick_check=1;;
+  --unsupported)            do_unsupported=1;;
+  *) usage;;
+  esac
+  shift
+done
+
+# Check for lsb_release command in $PATH
+if ! which lsb_release > /dev/null; then
+  echo "ERROR: lsb_release not found in \$PATH" >&2
+  exit 1;
+fi
+
+lsb_release=$(lsb_release --codename --short)
+ubuntu_codenames="(precise|quantal|raring|saucy|trusty)"
+if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
+  if [[ ! $lsb_release =~ $ubuntu_codenames ]]; then
+    echo "ERROR: Only Ubuntu 12.04 (precise) through 14.04 (trusty) are"\
+        "currently supported" >&2
+    exit 1
+  fi
+
+  if ! uname -m | egrep -q "i686|x86_64"; then
+    echo "Only x86 architectures are currently supported" >&2
+    exit
+  fi
+fi
+
+if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev"
+
+# Packages needed for development
+dev_list="apache2.2-bin bison cdbs curl dpkg-dev elfutils devscripts fakeroot
+          flex fonts-thai-tlwg g++ git-core git-svn gperf language-pack-da
+          language-pack-fr language-pack-he language-pack-zh-hant
+          libapache2-mod-php5 libasound2-dev libbrlapi-dev libav-tools
+          libbz2-dev libcairo2-dev libcap-dev libcups2-dev libcurl4-gnutls-dev
+          libdrm-dev libelf-dev libexif-dev libgconf2-dev libgl1-mesa-dev
+          libglib2.0-dev libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev
+          libkrb5-dev libnspr4-dev libnss3-dev libpam0g-dev libpci-dev
+          libpulse-dev libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev
+          libudev-dev libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev
+          mesa-common-dev openbox patch perl php5-cgi pkg-config python
+          python-cherrypy3 python-crypto python-dev python-openssl
+          python-psutil rpm ruby subversion ttf-dejavu-core ttf-indic-fonts
+          ttf-kochi-gothic ttf-kochi-mincho wdiff xfonts-mathml zip
+          $chromeos_dev_list"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries.
+if file /sbin/init | grep -q 'ELF 64-bit'; then
+  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0"
+
+# Full list of required run-time libraries
+lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcap2 libcups2 libexpat1
+          libexif12 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
+          libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0
+          libpng12-0 libspeechd2 libstdc++6 libsqlite3-0 libx11-6
+          libxau6 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxdmcp6
+          libxext6 libxfixes3 libxi6 libxinerama1 libxrandr2 libxrender1
+          libxtst6 zlib1g $chromeos_lib_list"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libfontconfig1-dbg
+          libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg
+          libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libxau6-dbg
+          libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg libxdamage1-dbg
+          libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg libxinerama1-dbg
+          libxrandr2-dbg libxrender1-dbg libxtst6-dbg zlib1g-dbg
+          libstdc++6-4.6-dbg"
+
+# arm cross toolchain packages needed to build chrome on armhf
+arm_list="libc6-dev-armhf-cross
+          linux-libc-dev-armhf-cross
+          g++-arm-linux-gnueabihf
+          linux-libc-dev:i386"
+
+# Packages to build NaCl, its toolchains, and its ports.
+naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
+nacl_list="g++-mingw-w64-i686 lib32z1-dev
+           libasound2:i386 libcap2:i386 libelf-dev:i386 libexif12:i386
+           libfontconfig1:i386 libgconf-2-4:i386 libglib2.0-0:i386 libgpm2:i386
+           libgtk2.0-0:i386 libncurses5:i386 lib32ncurses5-dev
+           libnss3:i386 libpango1.0-0:i386
+           libssl0.9.8:i386 libtinfo-dev libtinfo-dev:i386 libtool
+           libxcomposite1:i386 libxcursor1:i386 libxdamage1:i386 libxi6:i386
+           libxrandr2:i386 libxss1:i386 libxtst6:i386 texinfo xvfb
+           ${naclports_list}"
+
+# Find the proper version of libgbm-dev. We can't just install libgbm-dev as
+# it depends on mesa, and only one version of mesa can exists on the system.
+# Hence we must match the same version or this entire script will fail.
+mesa_variant=""
+for variant in "-lts-quantal" "-lts-raring" "-lts-saucy" "-lts-trusty"; do
+  if $(dpkg-query -Wf'${Status}' libgl1-mesa-glx${variant} | \
+       grep -q " ok installed"); then
+    mesa_variant="${variant}"
+  fi
+done
+dev_list="${dev_list} libgbm-dev${mesa_variant}
+          libgles2-mesa-dev${mesa_variant}"
+nacl_list="${nacl_list} libgl1-mesa-glx${mesa_variant}:i386"
+
+# Some package names have changed over time
+if package_exists ttf-mscorefonts-installer; then
+  dev_list="${dev_list} ttf-mscorefonts-installer"
+else
+  dev_list="${dev_list} msttcorefonts"
+fi
+if package_exists libnspr4-dbg; then
+  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+  lib_list="${lib_list} libnspr4 libnss3"
+else
+  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if package_exists libjpeg-dev; then
+  dev_list="${dev_list} libjpeg-dev"
+else
+  dev_list="${dev_list} libjpeg62-dev"
+fi
+if package_exists libudev1; then
+  dev_list="${dev_list} libudev1"
+  nacl_list="${nacl_list} libudev1:i386"
+else
+  dev_list="${dev_list} libudev0"
+  nacl_list="${nacl_list} libudev0:i386"
+fi
+if package_exists libbrlapi0.6; then
+  dev_list="${dev_list} libbrlapi0.6"
+else
+  dev_list="${dev_list} libbrlapi0.5"
+fi
+
+
+# Some packages are only needed if the distribution actually supports
+# installing them.
+if package_exists appmenu-gtk; then
+  lib_list="$lib_list appmenu-gtk"
+fi
+
+# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
+# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
+# been provided to yes_no(), the function also accepts RETURN as a user input.
+# The parameter specifies the exit code that should be returned in that case.
+# The function will echo the user's selection followed by a newline character.
+# Users can abort the function by pressing CTRL-C. This will call "exit 1".
+yes_no() {
+  if [ 0 -ne "${do_default-0}" ] ; then
+    [ $1 -eq 0 ] && echo "Y" || echo "N"
+    return $1
+  fi
+  local c
+  while :; do
+    c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
+         stty -echo iuclc -icanon 2>/dev/null
+         dd count=1 bs=1 2>/dev/null | od -An -tx1)"
+    case "$c" in
+      " 0a") if [ -n "$1" ]; then
+               [ $1 -eq 0 ] && echo "Y" || echo "N"
+               return $1
+             fi
+             ;;
+      " 79") echo "Y"
+             return 0
+             ;;
+      " 6e") echo "N"
+             return 1
+             ;;
+      "")    echo "Aborted" >&2
+             exit 1
+             ;;
+      *)     # The user pressed an unrecognized key. As we are not echoing
+             # any incorrect user input, alert the user by ringing the bell.
+             (tput bel) 2>/dev/null
+             ;;
+    esac
+  done
+}
+
+if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0}
+then
+  echo "This script installs all tools and libraries needed to build Chromium."
+  echo ""
+  echo "For most of the libraries, it can also install debugging symbols, which"
+  echo "will allow you to debug code in the system libraries. Most developers"
+  echo "won't need these symbols."
+  echo -n "Do you want me to install them for you (y/N) "
+  if yes_no 1; then
+    do_inst_syms=1
+  fi
+fi
+if test "$do_inst_syms" = "1"; then
+  echo "Including debugging symbols."
+else
+  echo "Skipping debugging symbols."
+  dbg_list=
+fi
+
+# When cross building for arm on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if file /sbin/init | grep -q 'ELF 64-bit'; then
+  if [ "$lsb_release" = "trusty" ]; then
+    # gcc-multilib conflicts with the arm cross compiler in trusty but
+    # g++-4.8-multilib gives us the 32-bit support that we need.
+    arm_list="$arm_list g++-4.8-multilib"
+  else
+    arm_list="$arm_list g++-multilib"
+  fi
+fi
+
+if test "$do_inst_arm" = "1" ; then
+  echo "Including ARM cross toolchain."
+else
+  echo "Skipping ARM cross toolchain."
+  arm_list=
+fi
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
+else
+  echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
+  nacl_list=
+fi
+
+packages="$(
+  echo "${dev_list} ${lib_list} ${dbg_list} ${arm_list} ${nacl_list}" |
+  tr " " "\n" | sort -u | tr "\n" " "
+)"
+
+if [ 1 -eq "${do_quick_check-0}" ] ; then
+  failed_check="$(dpkg-query -W -f '${PackageSpec}:${Status}\n' \
+    ${packages} 2>&1 | grep -v "ok installed" || :)"
+  if [ -n "${failed_check}" ]; then
+    echo
+    nomatch="$(echo "${failed_check}" | \
+      sed -e "s/^No packages found matching \(.*\).$/\1/;t;d")"
+    missing="$(echo "${failed_check}" | \
+      sed -e "/^No packages found matching/d;s/^\(.*\):.*$/\1/")"
+    if [ "$nomatch" ]; then
+      # Distinguish between packages that actually aren't available to the
+      # system (i.e. not in any repo) and packages that just aren't known to
+      # dpkg (i.e. managed by apt).
+      unknown=""
+      for p in ${nomatch}; do
+        if apt-cache show ${p} > /dev/null 2>&1; then
+          missing="${p}\n${missing}"
+        else
+          unknown="${p}\n${unknown}"
+        fi
+      done
+      if [ -n "${unknown}" ]; then
+        echo "WARNING: The following packages are unknown to your system"
+        echo "(maybe missing a repo or need to 'sudo apt-get update'):"
+        echo -e "${unknown}" | sed -e "s/^/  /"
+      fi
+    fi
+    if [ -n "${missing}" ]; then
+      echo "WARNING: The following packages are not installed:"
+      echo -e "${missing}" | sed -e "s/^/  /"
+    fi
+    exit 1
+  fi
+  exit 0
+fi
+
+sudo apt-get update
+
+# We initially run "apt-get" with the --reinstall option and parse its output.
+# This way, we can find all the packages that need to be newly installed
+# without accidentally promoting any packages from "auto" to "manual".
+# We then re-run "apt-get" with just the list of missing packages.
+echo "Finding missing packages..."
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
+if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then
+  # We probably never hit this following line.
+  echo "No missing packages, and the packages are up-to-date."
+elif [ $? -eq 1 ]; then
+  # We expect apt-get to have exit status of 1.
+  # This indicates that we cancelled the install with "yes n|".
+  new_list=$(echo "$new_list" |
+    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
+  new_list=$(echo "$new_list" | sed 's/ *$//')
+  if [ -z "$new_list" ] ; then
+    echo "No missing packages, and the packages are up-to-date."
+  else
+    echo "Installing missing packages: $new_list."
+    sudo apt-get install ${do_quietly-} ${new_list}
+  fi
+  echo
+else
+  # An apt-get exit status of 100 indicates that a real error has occurred.
+
+  # I am intentionally leaving out the '"'s around new_list_cmd,
+  # as this makes it easier to cut and paste the output
+  echo "The following command failed: " ${new_list_cmd}
+  echo
+  echo "It produces the following output:"
+  yes n | $new_list_cmd || true
+  echo
+  echo "You will have to install the above packages yourself."
+  echo
+  exit 100
+fi
+
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if test "$do_inst_chromeos_fonts" != "0"; then
+  echo
+  echo "Installing Chrome OS fonts."
+  dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+  if ! sudo $dir/linux/install-chromeos-fonts.py; then
+    echo "ERROR: The installation of the Chrome OS default fonts failed."
+    if [ `stat -f -c %T $dir` == "nfs" ]; then
+      echo "The reason is that your repo is installed on a remote file system."
+    else
+      echo "This is expected if your repo is installed on a remote file system."
+    fi
+    echo "It is recommended to install your repo on a local file system."
+    echo "You can skip the installation of the Chrome OS default founts with"
+    echo "the command line option: --no-chromeos-fonts."
+    exit 1
+  fi
+else
+  echo "Skipping installation of Chrome OS fonts."
+fi
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Installing symbolic links for NaCl."
+  if [ ! -r /usr/lib/i386-linux-gnu/libcrypto.so ]; then
+    sudo ln -fs libcrypto.so.0.9.8 /usr/lib/i386-linux-gnu/libcrypto.so
+  fi
+  if [ ! -r /usr/lib/i386-linux-gnu/libssl.so ]; then
+    sudo ln -fs libssl.so.0.9.8 /usr/lib/i386-linux-gnu/libssl.so
+  fi
+else
+  echo "Skipping symbolic links for NaCl."
+fi
diff --git a/build/install-chroot.sh b/build/install-chroot.sh
new file mode 100755
index 0000000..c060f60
--- /dev/null
+++ b/build/install-chroot.sh
@@ -0,0 +1,852 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the preferred name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+  echo "-b dir       additional directories that should be bind mounted,"
+  echo '             or "NONE".'
+  echo "             Default: if local filesystems present, ask user for help"
+  echo "-g group,... groups that can use the chroot unauthenticated"
+  echo "             Default: '${admin}' and current user's group ('$(id -gn)')"
+  echo "-l           List all installed chroot environments"
+  echo "-m mirror    an alternate repository mirror for package downloads"
+  echo "-s           configure default deb-srcs"
+  echo "-c           always copy 64bit helper binaries to 32bit chroot"
+  echo "-h           this help message"
+}
+
+process_opts() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":b:g:lm:sch" OPTNAME; do
+    case "$OPTNAME" in
+      b)
+        if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+          bind_mounts="${OPTARG}"
+        else
+          if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+               ! -d "${OPTARG}" ]; then
+            echo "Invalid -b option(s)"
+            usage
+            exit 1
+          fi
+          bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+        fi
+        ;;
+      g)
+        [ -n "${OPTARG}" ] &&
+          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+        ;;
+      l)
+        list_all_chroots
+        exit
+        ;;
+      m)
+        if [ -n "${mirror}" ]; then
+          echo "You can only specify exactly one mirror location"
+          usage
+          exit 1
+        fi
+        mirror="$OPTARG"
+        ;;
+      s)
+        add_srcs="y"
+        ;;
+      c)
+        copy_64="y"
+        ;;
+      h)
+        usage
+        exit 0
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command-line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+list_all_chroots() {
+  for i in /var/lib/chroot/*; do
+    i="${i##*/}"
+    [ "${i}" = "*" ] && continue
+    [ -x "/usr/local/bin/${i%bit}" ] || continue
+    grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+    [ -r "/etc/schroot/script-${i}" -a \
+      -r "/etc/schroot/mount-${i}" ] || continue
+    echo "${i%bit}"
+  done
+}
+
+getkey() {
+  (
+    trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+    stty -echo iuclc -icanon 2>/dev/null
+    dd count=1 bs=1 2>/dev/null
+  )
+}
+
+chr() {
+  printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+  printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+  stat -c %T -f "$1/" 2>/dev/null |
+    egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+  echo "Run this script as a regular user and provide your \"sudo\""           \
+       "password if requested" >&2
+  exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+           ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+  printf '%4d: %s\n' "$j" "$i"
+  j=$(($j+1))
+done
+while :; do
+  printf "Which target would you like to install: "
+  read n
+  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+  [ "$j" -eq "$n" ] && { distname="$i"; break; }
+  j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+  while :; do
+    echo "You are running a 64bit kernel. This allows you to install either a"
+    printf "32bit or a 64bit chroot environment. %s"                           \
+           "Which one do you want (32, 64) "
+    read arch
+    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+  done
+  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+  arch="${arch}bit"
+  echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+  while :; do
+    echo "This chroot already exists on your machine."
+    if schroot -l --all-sessions 2>&1 |
+       sed 's/^session://' |
+       grep -qs "^${target%bit}-"; then
+      echo "And it appears to be in active use. Terminate all programs that"
+      echo "are currently using the chroot environment and then re-run this"
+      echo "script."
+      echo "If you still get an error message, you might have stale mounts"
+      echo "that you forgot to delete. You can always clean up mounts by"
+      echo "executing \"${target%bit} -c\"."
+      exit 1
+    fi
+    echo "I can abort installation, I can overwrite the existing chroot,"
+    echo "or I can delete the old one and then exit. What would you like to"
+    printf "do (a/o/d)? "
+    read choice
+    case "${choice}" in
+      a|A) exit 1;;
+      o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+      d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
+                       "/usr/local/bin/${target%bit}"   \
+                       "/etc/schroot/mount-${target}"   \
+                       "/etc/schroot/script-${target}"
+           sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+                         :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+                       "/etc/schroot/schroot.conf"
+           trap '' INT TERM QUIT HUP
+           trap '' EXIT
+           echo "Deleted!"
+           exit 0;;
+    esac
+  done
+  echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+  while :; do
+    echo "Would you like to add ${distname}-updates and ${distname}-security "
+    printf "to the chroot's sources.list (y/n)? "
+    read alt_repos
+    case "${alt_repos}" in
+      y|Y)
+        alt_repos="y"
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+  mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+                 $2 !~ "^/media" && $2 !~ "^/run" &&
+                 ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+                 $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+                 $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+                 $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+                   print $2
+                 }' /proc/mounts |
+            head -n26)"
+  if [ -n "${mounts}" ]; then
+    echo "You appear to have non-standard mount points that you"
+    echo "might want to import into the chroot environment:"
+    echo
+    sel=
+    while :; do
+      # Print a menu, listing all non-default mounts of local or network
+      # file systems.
+      j=1; for m in ${mounts}; do
+        c="$(printf $(printf '\\%03o' $((64+$j))))"
+        echo "$sel" | grep -qs $c &&
+          state="mounted in chroot" || state="$(tput el)"
+        printf "   $c) %-40s${state}\n" "$m"
+        j=$(($j+1))
+      done
+      # Allow user to interactively (de-)select any of the entries
+      echo
+      printf "Select mount points that you want to be included or press %s" \
+             "SPACE to continue"
+      c="$(getkey | tr a-z A-Z)"
+      [ "$c" == " " ] && { echo; echo; break; }
+      if [ -z "$c" ] ||
+         [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+          # Invalid input, ring the console bell
+          tput bel
+      else
+        # Toggle the selection for the given entry
+        if echo "$sel" | grep -qs $c; then
+          sel="$(printf "$sel" | sed "s/$c//")"
+        else
+          sel="$sel$c"
+        fi
+      fi
+      # Reposition cursor to the top of the list of entries
+      tput cuu $(($j + 1))
+      echo
+    done
+  fi
+  j=1; for m in ${mounts}; do
+    c="$(chr $(($j + 64)))"
+    if echo "$sel" | grep -qs $c; then
+      bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+    fi
+    j=$(($j+1))
+  done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
+         /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+   mirror="http://archive.ubuntu.com/ubuntu" ||
+   mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+    "${distname}" "/var/lib/chroot/${target}"  "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+  brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+  chroot_groups="${admin},$(id -gn)"
+fi
+# Older versions of schroot wanted a "priority=" line, whereas recent
+# versions deprecate "priority=" and warn if they see it. We don't have
+# a good feature test, but scanning for the string "priority=" in the
+# existing "schroot.conf" file is a good indication of what to do.
+priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+           echo 'priority=3' || :)
+sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+# Set up a list of mount points that is specific to this
+# chroot environment.
+sed '/^FSTAB=/s,"[^"]*","/etc/schroot/mount-'"${target}"'",' \
+         /etc/schroot/script-defaults |
+  sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+  /etc/schroot/mount-defaults |
+  sudo sh -c 'cat > /etc/schroot/mount-'"${target}"
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+  printf "${bind_mounts}" |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+   ! grep -qs '^/media' /etc/schroot/mount-"${target}"; then
+  echo '/media /media none rw,rbind 0 0' |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+
+# Share /dev/shm, /run and /run/shm.
+grep -qs '^/dev/shm' /etc/schroot/mount-"${target}" ||
+  echo '/dev/shm /dev/shm none rw,bind 0 0' |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+if [ ! -d "/var/lib/chroot/${target}/run" ] &&
+   ! grep -qs '^/run' /etc/schroot/mount-"${target}"; then
+  echo '/run /run none rw,bind 0 0' |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+if ! grep -qs '^/run/shm' /etc/schroot/mount-"${target}"; then
+  { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+                   echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+  sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+  # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+  # insert the same number of spaces as the number of characters in the
+  # parameter(s) passed to this function.
+  # If the "fold" program cannot be found, or if the actual width of the
+  # terminal cannot be determined, this function doesn't attempt to do any
+  # wrapping.
+  local f="$(type -P fold)"
+  [ -z "${f}" ] && { cat; return; }
+  local c="$(stty -a </dev/tty 2>/dev/null |
+             sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+  [ -z "${c}" ] && { cat; return; }
+  local i="$(echo "$*"|sed 's/./ /g')"
+  local j="$(printf %s "${i}"|wc -c)"
+  if [ "${c}" -gt "${j}" ]; then
+    dd bs=1 count="${j}" 2>/dev/null
+    "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+  else
+    "${f}" -sw "${c}"
+  fi
+}
+
+help() {
+  echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+  echo "  help:      print this message"                                                | wrap "             "
+  echo "  list:      list all known chroot environments"                                | wrap "             "
+  echo "  clean:     remove all old chroot sessions for \"${chroot}\""                  | wrap "             "
+  echo "  clean-all: remove all old chroot sessions for all environments"               | wrap "             "
+  exit 0
+}
+
+clean() {
+  local s t rc
+  rc=0
+  for s in $(schroot -l --all-sessions); do
+    if [ -n "$1" ]; then
+      t="${s#session:}"
+      [ "${t#${chroot}-}" == "${t}" ] && continue
+    fi
+    if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+       fgrep -qs "/var/lib/schroot/mount/${t}"; then
+      echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+      rc=1
+      continue
+    fi
+    sudo schroot -c "${s}" -e || rc=1
+  done
+  exit ${rc}
+}
+
+list() {
+  for e in $(schroot -l); do
+    e="${e#chroot:}"
+    [ -x "/usr/local/bin/${e}" ] || continue
+    if schroot -l --all-sessions 2>/dev/null |
+       sed 's/^session://' |
+       grep -qs "^${e}-"; then
+      echo "${e} is currently active"
+    else
+      echo "${e}"
+    fi
+  done
+  exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+  case "$1" in
+    --)             shift; break;;
+    -h|--help)      shift; help;;
+    -l|--list)      shift; list;;
+    -c|--clean)     shift; clean "${chroot}";;
+    -C|--clean-all) shift; clean;;
+    *)              break;;
+  esac
+done
+
+# Start a new chroot session and keep track of the session id. We inject this
+# id into all processes that run inside the chroot. Unless they go out of their
+# way to clear their environment, we can then later identify our child and
+# grand-child processes by scanning their environment.
+session="$(schroot -c "${chroot}" -b)"
+export CHROOT_SESSION_ID="${session}"
+
+if [ $# -eq 0 ]; then
+  # Run an interactive shell session
+  schroot -c "${session}" -r -p
+else
+  # Run a command inside of the chroot environment
+  p="$1"; shift
+  schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+# Compute the inode of the root directory inside of the chroot environment.
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+     awk '{ print $1 }') 2>/dev/null
+other_pids=
+while [ -n "$i" ]; do
+  # Identify processes by the inode number of their root directory. Then
+  # remove all processes that we know belong to other sessions. We use
+  # "sort | uniq -u" to do what amounts to a "set substraction operation".
+  pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
+         sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+                 t
+                 d';
+         echo "${other_pids}";
+         echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
+  # Kill all processes that are still left running in the session. This is
+  # typically an assortment of daemon processes that were started
+  # automatically. They result in us being unable to tear down the session
+  # cleanly.
+  [ -z "${pids}" ] && break
+  for j in $pids; do
+    # Unfortunately, the way that schroot sets up sessions has the
+    # side-effect of being unable to tell one session apart from another.
+    # This can result in us attempting to kill processes in other sessions.
+    # We make a best-effort to avoid doing so.
+    k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
+         sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
+    if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
+      other_pids="${other_pids}
+${j}"
+      continue
+    fi
+    kill -9 $pids
+  done
+done
+# End the chroot session. This should clean up all temporary files. But if we
+# earlier failed to terminate all (daemon) processes inside of the session,
+# deleting the session could fail. When that happens, the user has to manually
+# clean up the stale files by invoking us with "--clean" after having killed
+# all running processes.
+schroot -c "${session}" -e
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+             s/^\(deb .* [^ -]\+\) main/\1-security main/
+             p
+             t1
+             d
+             :1;s/-security main/-updates main/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+   HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+   >&/dev/null; then
+  sudo sh -c '
+    echo "deb http://archive.canonical.com/ubuntu" \
+         "'"${distname}"' partner" \
+      >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+             s/^deb\([^-]\)/deb-src\1/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+  sudo sh -c '
+    echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+        >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+    sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  l='"${LANG:-en_US}"'; l="${l%%.*}"
+  [ -r /etc/locale.gen ] &&
+    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+  locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+  [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+  sudo sed -i 's/ / [arch=amd64,i386] /' \
+              "/var/lib/chroot/${target}/etc/apt/sources.list"
+  [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+  sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
+      $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
+    echo foreign-architecture \
+        $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+      sudo sh -c \
+        "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
+  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
+  strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+  cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+    file /bin/bash 2>/dev/null | grep -q x86-64; then
+  readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+  sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1
+  dep=
+  for i in binutils gdb; do
+    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+  done
+  [ -n "$dep" ] && sudo apt-get -y install $dep
+  sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+  for i in libbfd libpython; do
+    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+           grep -s "$i" | awk '{ print $3 }')"
+    if [ -n "$lib" -a -r "$lib" ]; then
+      sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+    fi
+  done
+  for lib in libssl libcrypt; do
+    for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+      sudo cp $path/$lib* \
+              "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+    done
+  done
+  for i in gdb ld; do
+    sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+    sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+  /usr/local/lib/amd64/$i "\$@"
+EOF
+    sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+  done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+  while :; do
+    echo
+    echo "If you plan on building Chrome inside of the new chroot environment,"
+    echo "you now have to install the build dependencies. Do you want me to"
+    printf "start the script that does this for you (y/n)? "
+    read install_deps
+    case "${install_deps}" in
+      y|Y)
+        echo
+        # We prefer running the script in-place, but this might not be
+        # possible, if it lives on a network filesystem that denies
+        # access to root.
+        tmp_script=
+        if ! sudo /usr/local/bin/"${target%bit}" \
+            sh -c "[ -x '${script}' ]" >&/dev/null; then
+          tmp_script="/tmp/${script##*/}"
+          cp "${script}" "${tmp_script}"
+        fi
+        # Some distributions automatically start an instance of the system-
+        # wide dbus daemon, cron daemon or of the logging daemon, when
+        # installing the Chrome build depencies. This prevents the chroot
+        # session from being closed.  So, we always try to shut down any running
+        # instance of dbus and rsyslog.
+        sudo /usr/local/bin/"${target%bit}" sh -c "${script} --no-lib32;
+              rc=$?;
+              /etc/init.d/cron stop >/dev/null 2>&1 || :;
+              /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+              /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+              exit $rc"
+        rc=$?
+        [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+        [ $rc -ne 0 ] && exit $rc
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+   ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+   { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+     is_network_drive "${HOME}/chroot"; } &&
+   ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+  echo "${HOME}/chroot is currently located on the same device as your"
+  echo "home directory."
+  echo "This might not be what you want. Do you want me to move it somewhere"
+  echo "else?"
+  # If the computer has multiple spindles, many users configure all or part of
+  # the secondary hard disk to be writable by the primary user of this machine.
+  # Make some reasonable effort to detect this type of configuration and
+  # then offer a good location for where to put the ~/chroot directory.
+  suggest=
+  for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+    if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+       ! is_network_drive "$i"; then
+      suggest="$i"
+    else
+      for j in "$i/"*; do
+        if [ -d "$j" -a -w "$j" -a \
+             \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+           ! is_network_drive "$j"; then
+          suggest="$j"
+        else
+          for k in "$j/"*; do
+            if [ -d "$k" -a -w "$k" -a \
+                 \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+               ! is_network_drive "$k"; then
+              suggest="$k"
+              break
+            fi
+          done
+        fi
+        [ -n "${suggest}" ] && break
+      done
+    fi
+    [ -n "${suggest}" ] && break
+  done
+  def_suggest="${HOME}"
+  if [ -n "${suggest}" ]; then
+    # For home directories that reside on network drives, make our suggestion
+    # the default option. For home directories that reside on a local drive,
+    # require that the user manually enters the new location.
+    if is_network_drive "${HOME}"; then
+      def_suggest="${suggest}"
+    else
+      echo "A good location would probably be in \"${suggest}\""
+    fi
+  fi
+  while :; do
+    printf "Physical location [${def_suggest}]: "
+    read dir
+    [ -z "${dir}" ] && dir="${def_suggest}"
+    [ "${dir%%/}" == "${HOME%%/}" ] && break
+    if ! [ -d "${dir}" -a -w "${dir}" ] ||
+       [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+      echo "Cannot write to ${dir}/chroot. Please try again"
+    else
+      mv "${HOME}/chroot" "${dir}/chroot"
+      ln -s "${dir}/chroot" "${HOME}/chroot"
+      for i in $(list_all_chroots); do
+        sudo "$i" mkdir -p "${dir}/chroot"
+      done
+      sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+      break
+    fi
+  done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/build/internal/README.chromium b/build/internal/README.chromium
new file mode 100644
index 0000000..4624830
--- /dev/null
+++ b/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+  essential.vsprops
+    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+  release_defaults.vsprops
+    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+  release_impl.vsprops
+    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+  release_impl_checksenabled.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+  release_impl_official.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_instrument.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_optimize.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_purify.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/build/internal/release_defaults.gypi b/build/internal/release_defaults.gypi
new file mode 100644
index 0000000..1bf674a
--- /dev/null
+++ b/build/internal/release_defaults.gypi
@@ -0,0 +1,18 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'StringPooling': 'true',
+    },
+    'VCLinkerTool': {
+      # No incremental linking.
+      'LinkIncremental': '1',
+      # Eliminate Unreferenced Data (/OPT:REF).
+      'OptimizeReferences': '2',
+      # Folding on (/OPT:ICF).
+      'EnableCOMDATFolding': '2',
+    },
+  },
+}
diff --git a/build/internal/release_impl.gypi b/build/internal/release_impl.gypi
new file mode 100644
index 0000000..5ac0e09
--- /dev/null
+++ b/build/internal/release_impl.gypi
@@ -0,0 +1,17 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+  },
+}
diff --git a/build/internal/release_impl_official.gypi b/build/internal/release_impl_official.gypi
new file mode 100644
index 0000000..d0729a9
--- /dev/null
+++ b/build/internal/release_impl_official.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'defines': ['OFFICIAL_BUILD'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'InlineFunctionExpansion': '2',
+      'EnableIntrinsicFunctions': 'true',
+      'EnableFiberSafeOptimizations': 'true',
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+    'VCLibrarianTool': {
+      'AdditionalOptions': [
+        '/ltcg',
+        '/expectedoutputsize:120000000'
+      ],
+    },
+    'VCLinkerTool': {
+      'AdditionalOptions': [
+        '/time',
+        # This may reduce memory fragmentation during linking.
+        # The expected size is 40*1024*1024, which gives us about 10M of
+        # headroom as of Dec 16, 2011.
+        '/expectedoutputsize:41943040',
+      ],
+      # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to
+      # the generated PDB. According to MSDN documentation, this flag is only
+      # available (or perhaps supported) in the Enterprise (team development)
+      # version of Visual Studio. If this blocks your official build, simply
+      # comment out this line, then  re-run "gclient runhooks".
+      'Profile': 'true',
+    },
+  },
+}
diff --git a/build/inverse_depth.py b/build/inverse_depth.py
new file mode 100755
index 0000000..ce7a6ab
--- /dev/null
+++ b/build/inverse_depth.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def DoMain(argv):
+  depth = argv[0]
+  return os.path.relpath(os.getcwd(), os.path.abspath(depth))
+
+
+def main(argv):
+  if len(argv) < 2:
+    print "USAGE: inverse_depth.py depth"
+    return 1
+  print DoMain(argv[1:])
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/ios/PRESUBMIT.py b/build/ios/PRESUBMIT.py
new file mode 100644
index 0000000..a0d32f8
--- /dev/null
+++ b/build/ios/PRESUBMIT.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+"""Chromium presubmit script for src/tools/ios.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into gcl.
+"""
+
+WHITELIST_FILE = 'build/ios/grit_whitelist.txt'
+
+def _CheckWhitelistSorted(input_api, output_api):
+  for path in input_api.LocalPaths():
+    if WHITELIST_FILE == path:
+      lines = open(os.path.join('../..', WHITELIST_FILE)).readlines()
+      i = 0
+      while i < len(lines) - 1 and lines[i] <= lines[i + 1]:
+        i += 1
+      if i < len(lines) - 1:
+        return [output_api.PresubmitError(
+            'The file ' + WHITELIST_FILE + ' must be sorted.  ' +
+            'First offending line: #' + str(i + 2))]
+  return []
+
+def _CommonChecks(input_api, output_api):
+  """Checks common to both upload and commit."""
+  results = []
+  results.extend(_CheckWhitelistSorted(input_api, output_api))
+  return results
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
diff --git a/build/ios/chrome_ios.croc b/build/ios/chrome_ios.croc
new file mode 100644
index 0000000..938a2e9
--- /dev/null
+++ b/build/ios/chrome_ios.croc
@@ -0,0 +1,71 @@
+# -*- python -*-
+# Crocodile config file for Chromium iOS.
+#
+# Note that Chromium iOS also uses the config file at src/build/common.croc.
+#
+# See src/tools/code_coverage/example.croc for more info on config files.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Exclude everything to negate whatever is in src/build/common.croc
+    {
+      'regexp' : '.*',
+      'include' : 0,
+    },
+
+    # Include all directories (but not the files in the directories).
+    # This is a workaround for how croc.py walks the directory tree. See the
+    # TODO in the AddFiles method of src/tools/code_coverage/croc.py
+    {
+      'regexp' : '.*/$',
+      'include' : 1,
+    },
+
+    # Include any file with an 'ios' directory in the path.
+    {
+      'regexp' : '.*/ios/.*',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+    
+    # Include any file that ends with _ios.
+    {
+      'regexp' : '.*_ios\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+
+    # Include any file that ends with _ios_unittest (and label it a test).
+    {
+      'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+      'group' : 'test',
+    },
+
+    # Don't scan for executable lines in uninstrumented header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Don't measure coverage of perftests.
+    {
+      'regexp' : '.*perftest\\.(c|cc|m|mm)$',
+      'include' : 0,
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/ios/clean_env.py b/build/ios/clean_env.py
new file mode 100755
index 0000000..548e2b9
--- /dev/null
+++ b/build/ios/clean_env.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(argv):
+  """This is like 'env -i', but it uses a whitelist of env variables to allow
+  through to the command being run.  It attempts to strip off Xcode-added
+  values from PATH.
+  """
+  # Note: An attempt was made to do something like: env -i bash -lc '[command]'
+  # but that fails to set the things set by login (USER, etc.), so instead
+  # the only approach that seems to work is to have a whitelist.
+  env_key_whitelist = (
+    'HOME',
+    'LOGNAME',
+    # 'PATH' added below (but filtered).
+    'PWD',
+    'SHELL',
+    'TEMP',
+    'TMPDIR',
+    'USER'
+  )
+
+  # Need something to run.
+  # TODO(lliabraa): Make this output a usage string and exit (here and below).
+  assert(len(argv) > 0)
+
+  add_to_path = [];
+  first_entry = argv[0];
+  if first_entry.startswith('ADD_TO_PATH='):
+    argv = argv[1:];
+    add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
+
+  # Still need something to run.
+  assert(len(argv) > 0)
+
+  clean_env = {}
+
+  # Pull over the whitelisted keys.
+  for key in env_key_whitelist:
+    val = os.environ.get(key, None)
+    if not val is None:
+      clean_env[key] = val
+
+  # Collect the developer dir as set via Xcode, defaulting it.
+  dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
+  if dev_prefix[-1:] != '/':
+    dev_prefix += '/'
+
+  # Now pull in PATH, but remove anything Xcode might have added.
+  initial_path = os.environ.get('PATH', '')
+  filtered_chunks = \
+      [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
+  if filtered_chunks:
+    clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
+
+  # Add any KEY=VALUE args before the command to the cleaned environment.
+  args = argv[:]
+  while '=' in args[0]:
+    (key, val) = args[0].split('=', 1)
+    clean_env[key] = val
+    args = args[1:]
+
+  # Still need something to run.
+  assert(len(args) > 0)
+
+  # Off it goes...
+  os.execvpe(args[0], args, clean_env)
+  # Should never get here, so return a distinctive, non-zero status code.
+  return 66
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/ios/grit_whitelist.txt b/build/ios/grit_whitelist.txt
new file mode 100644
index 0000000..354cb50
--- /dev/null
+++ b/build/ios/grit_whitelist.txt
@@ -0,0 +1,1047 @@
+IDR_ABOUT_DOM_DISTILLER_CSS
+IDR_ABOUT_DOM_DISTILLER_HTML
+IDR_ABOUT_DOM_DISTILLER_JS
+IDR_ABOUT_VERSION_CSS
+IDR_ABOUT_VERSION_HTML
+IDR_ABOUT_VERSION_JS
+IDR_BOOKMARK_BAR_FOLDER
+IDR_CLOSE_1_MASK
+IDR_CLOSE_1_P
+IDR_CRASHES_HTML
+IDR_CRASHES_JS
+IDR_CREDITS_HTML
+IDR_CREDITS_JS
+IDR_DEFAULT_FAVICON
+IDR_DIR_HEADER_HTML
+IDR_HISTORY_HTML
+IDR_HISTORY_JS
+IDR_INCOGNITO_TAB_HTML
+IDR_INFOBAR_AUTOLOGIN
+IDR_INFOBAR_RESTORE_SESSION
+IDR_INFOBAR_SAVE_PASSWORD
+IDR_INPUT_ALERT
+IDR_NET_ERROR_HTML
+IDR_NET_EXPORT_HTML
+IDR_NET_EXPORT_JS
+IDR_OMAHA_HTML
+IDR_OMAHA_JS
+IDR_OMNIBOX_EXTENSION_APP
+IDR_OMNIBOX_HTTP
+IDR_OMNIBOX_HTTPS_INVALID
+IDR_OMNIBOX_HTTPS_VALID
+IDR_OMNIBOX_HTTPS_WARNING
+IDR_OMNIBOX_SEARCH
+IDR_OMNIBOX_STAR
+IDR_PAGEINFO_BAD
+IDR_PAGEINFO_GOOD
+IDR_PAGEINFO_INFO
+IDR_PAGEINFO_WARNING_MAJOR
+IDR_PAGEINFO_WARNING_MINOR
+IDR_PRERENDER
+IDR_PRINTER_FAVICON
+IDR_PRODUCT_LOGO_26
+IDR_SAD_TAB
+IDR_SIGNIN_INTERNALS_INDEX_HTML
+IDR_SIGNIN_INTERNALS_INDEX_JS
+IDR_SSL_BLOCKING_HTML
+IDR_SSL_ROAD_BLOCK_HTML
+IDR_SYNC_INTERNALS_ABOUT_JS
+IDR_SYNC_INTERNALS_CHROME_SYNC_JS
+IDR_SYNC_INTERNALS_DATA_JS
+IDR_SYNC_INTERNALS_EVENTS_JS
+IDR_SYNC_INTERNALS_INDEX_HTML
+IDR_SYNC_INTERNALS_INDEX_JS
+IDR_SYNC_INTERNALS_SEARCH_JS
+IDR_SYNC_INTERNALS_SYNC_LOG_JS
+IDR_SYNC_INTERNALS_SYNC_NODE_BROWSER_JS
+IDR_SYNC_INTERNALS_SYNC_SEARCH_JS
+IDR_SYNC_INTERNALS_TYPES_JS
+IDR_THROBBER
+IDR_TRANSLATE_JS
+IDR_WEBUI_I18N_PROCESS_JS
+IDR_WEBUI_I18N_TEMPLATE2_JS
+IDR_WEBUI_I18N_TEMPLATE_JS
+IDR_WEBUI_IMAGES_SELECT
+IDR_WEBUI_JSTEMPLATE_JS
+IDR_WEBUI_JS_LOAD_TIME_DATA
+IDR_WEBUI_JS_UTIL
+IDS_ABOUT_MAC
+IDS_ABOUT_VERSION_COMMAND_LINE
+IDS_ABOUT_VERSION_COMPANY_NAME
+IDS_ABOUT_VERSION_COPYRIGHT
+IDS_ABOUT_VERSION_EXECUTABLE_PATH
+IDS_ABOUT_VERSION_OFFICIAL
+IDS_ABOUT_VERSION_OS
+IDS_ABOUT_VERSION_PATH_NOTFOUND
+IDS_ABOUT_VERSION_PROFILE_PATH
+IDS_ABOUT_VERSION_TITLE
+IDS_ABOUT_VERSION_UNOFFICIAL
+IDS_ABOUT_VERSION_USER_AGENT
+IDS_ABOUT_VERSION_VARIATIONS
+IDS_ACCEPT_LANGUAGES
+IDS_ACCNAME_BACK
+IDS_ACCNAME_CLEAR_TEXT
+IDS_ACCNAME_FORWARD
+IDS_ACCNAME_LOCATION
+IDS_ACCNAME_VOICE_SEARCH
+IDS_ALLOW_INSECURE_CONTENT_BUTTON
+IDS_ALTERNATE_NAV_URL_VIEW_LABEL
+IDS_APP_CANCEL
+IDS_APP_OK
+IDS_APP_UNTITLED_SHORTCUT_FILE_NAME
+IDS_AUTOCOMPLETE_SEARCH_DESCRIPTION
+IDS_AUTOFILL_ADDRESS_LINE_SEPARATOR
+IDS_AUTOFILL_ADDRESS_SUMMARY_SEPARATOR
+IDS_AUTOFILL_CC_AMEX
+IDS_AUTOFILL_CC_DINERS
+IDS_AUTOFILL_CC_DISCOVER
+IDS_AUTOFILL_CC_INFOBAR_ACCEPT
+IDS_AUTOFILL_CC_INFOBAR_DENY
+IDS_AUTOFILL_CC_INFOBAR_TEXT
+IDS_AUTOFILL_CC_JCB
+IDS_AUTOFILL_CC_MASTERCARD
+IDS_AUTOFILL_CC_UNION_PAY
+IDS_AUTOFILL_CC_VISA
+IDS_AUTOFILL_CLEAR_FORM_MENU_ITEM
+IDS_AUTOFILL_DIALOG_PRIVACY_POLICY_LINK
+IDS_AUTOFILL_FIELD_LABEL_AREA
+IDS_AUTOFILL_FIELD_LABEL_COUNTY
+IDS_AUTOFILL_FIELD_LABEL_DEPARTMENT
+IDS_AUTOFILL_FIELD_LABEL_DISTRICT
+IDS_AUTOFILL_FIELD_LABEL_EMIRATE
+IDS_AUTOFILL_FIELD_LABEL_ISLAND
+IDS_AUTOFILL_FIELD_LABEL_PARISH
+IDS_AUTOFILL_FIELD_LABEL_POSTAL_CODE
+IDS_AUTOFILL_FIELD_LABEL_PREFECTURE
+IDS_AUTOFILL_FIELD_LABEL_PROVINCE
+IDS_AUTOFILL_FIELD_LABEL_STATE
+IDS_AUTOFILL_FIELD_LABEL_ZIP_CODE
+IDS_AUTOFILL_OPTIONS_POPUP
+IDS_AUTOFILL_WARNING_FORM_DISABLED
+IDS_AUTOFILL_WARNING_INSECURE_CONNECTION
+IDS_AUTOLOGIN_INFOBAR_CANCEL_BUTTON
+IDS_AUTOLOGIN_INFOBAR_MESSAGE
+IDS_AUTOLOGIN_INFOBAR_OK_BUTTON
+IDS_BLOCKED_DISPLAYING_INSECURE_CONTENT
+IDS_BLOCK_INSECURE_CONTENT_BUTTON
+IDS_BOOKMARK_ADD_EDITOR_TITLE
+IDS_BOOKMARK_ALL_TABS_DIALOG_TITLE
+IDS_BOOKMARK_BAR_FOLDER_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DEFAULT_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DOMAIN_NAME
+IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME
+IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME
+IDS_BOOKMARK_BAR_REDO
+IDS_BOOKMARK_BAR_REDO_ADD
+IDS_BOOKMARK_BAR_REDO_DELETE
+IDS_BOOKMARK_BAR_REDO_EDIT
+IDS_BOOKMARK_BAR_REDO_MOVE
+IDS_BOOKMARK_BAR_REDO_REORDER
+IDS_BOOKMARK_BAR_UNDO
+IDS_BOOKMARK_BAR_UNDO_ADD
+IDS_BOOKMARK_BAR_UNDO_DELETE
+IDS_BOOKMARK_BAR_UNDO_EDIT
+IDS_BOOKMARK_BAR_UNDO_MOVE
+IDS_BOOKMARK_BAR_UNDO_REORDER
+IDS_BOOKMARK_BUBBLE_CHOOSER_ANOTHER_FOLDER
+IDS_BOOKMARK_BUBBLE_REMOVE_BOOKMARK
+IDS_BOOKMARK_EDITOR_CONFIRM_DELETE
+IDS_BOOKMARK_EDITOR_NEW_FOLDER_NAME
+IDS_BOOKMARK_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_CHOOSER_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE_NEW
+IDS_BOOKMARK_MANAGER_FOLDER_SECTION
+IDS_BOOKMARK_MANAGER_FOLDER_TITLE
+IDS_BOOKMARK_MANAGER_NAME_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_MANAGER_REMOVE_TITLE
+IDS_BOOKMARK_MANAGER_URL_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_NEW_FOLDER_BUTTON_TITLE
+IDS_CANCEL
+IDS_CERT_ERROR_AUTHORITY_INVALID_DESCRIPTION
+IDS_CERT_ERROR_AUTHORITY_INVALID_DETAILS
+IDS_CERT_ERROR_AUTHORITY_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_AUTHORITY_INVALID_TITLE
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DESCRIPTION
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DETAILS
+IDS_CERT_ERROR_COMMON_NAME_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_COMMON_NAME_INVALID_TITLE
+IDS_CERT_ERROR_CONTAINS_ERRORS_DESCRIPTION
+IDS_CERT_ERROR_CONTAINS_ERRORS_DETAILS
+IDS_CERT_ERROR_CONTAINS_ERRORS_EXTRA_INFO_2
+IDS_CERT_ERROR_CONTAINS_ERRORS_TITLE
+IDS_CERT_ERROR_EXPIRED_DESCRIPTION
+IDS_CERT_ERROR_EXPIRED_DETAILS
+IDS_CERT_ERROR_EXPIRED_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_EXPIRED_TITLE
+IDS_CERT_ERROR_EXTRA_INFO_1
+IDS_CERT_ERROR_EXTRA_INFO_TITLE
+IDS_CERT_ERROR_INVALID_CERT_DESCRIPTION
+IDS_CERT_ERROR_INVALID_CERT_DETAILS
+IDS_CERT_ERROR_INVALID_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_INVALID_CERT_TITLE
+IDS_CERT_ERROR_NOT_YET_VALID_DESCRIPTION
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_NOT_YET_VALID_TITLE
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DESCRIPTION
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DETAILS
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_TITLE
+IDS_CERT_ERROR_REVOKED_CERT_DESCRIPTION
+IDS_CERT_ERROR_REVOKED_CERT_DETAILS
+IDS_CERT_ERROR_REVOKED_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_REVOKED_CERT_TITLE
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DESCRIPTION
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DETAILS
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_TITLE
+IDS_CERT_ERROR_UNKNOWN_ERROR_DESCRIPTION
+IDS_CERT_ERROR_UNKNOWN_ERROR_DETAILS
+IDS_CERT_ERROR_UNKNOWN_ERROR_TITLE
+IDS_CERT_ERROR_WEAK_KEY_DESCRIPTION
+IDS_CERT_ERROR_WEAK_KEY_DETAILS
+IDS_CERT_ERROR_WEAK_KEY_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_KEY_TITLE
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DESCRIPTION
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DETAILS
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_TITLE
+IDS_CHROME_TO_DEVICE_PRINT_TO_PHONE
+IDS_CHROME_TO_DEVICE_SNAPSHOTS
+IDS_COULDNT_OPEN_PROFILE_ERROR
+IDS_CRASHES_BUG_LINK_LABEL
+IDS_CRASHES_CRASH_COUNT_BANNER_FORMAT
+IDS_CRASHES_CRASH_HEADER_FORMAT
+IDS_CRASHES_CRASH_TIME_FORMAT
+IDS_CRASHES_DISABLED_HEADER
+IDS_CRASHES_DISABLED_MESSAGE
+IDS_CRASHES_NO_CRASHES_MESSAGE
+IDS_CRASHES_TITLE
+IDS_CREDIT_CARD_NUMBER_PREVIEW_FORMAT
+IDS_DEFAULT_AVATAR_NAME_10
+IDS_DEFAULT_AVATAR_NAME_11
+IDS_DEFAULT_AVATAR_NAME_12
+IDS_DEFAULT_AVATAR_NAME_13
+IDS_DEFAULT_AVATAR_NAME_14
+IDS_DEFAULT_AVATAR_NAME_15
+IDS_DEFAULT_AVATAR_NAME_16
+IDS_DEFAULT_AVATAR_NAME_17
+IDS_DEFAULT_AVATAR_NAME_18
+IDS_DEFAULT_AVATAR_NAME_19
+IDS_DEFAULT_AVATAR_NAME_20
+IDS_DEFAULT_AVATAR_NAME_21
+IDS_DEFAULT_AVATAR_NAME_22
+IDS_DEFAULT_AVATAR_NAME_23
+IDS_DEFAULT_AVATAR_NAME_24
+IDS_DEFAULT_AVATAR_NAME_25
+IDS_DEFAULT_AVATAR_NAME_26
+IDS_DEFAULT_AVATAR_NAME_8
+IDS_DEFAULT_AVATAR_NAME_9
+IDS_DEFAULT_ENCODING
+IDS_DEFAULT_PROFILE_NAME
+IDS_DEFAULT_TAB_TITLE
+IDS_DELETE
+IDS_DISABLE_TOUCH_ADJUSTMENT_DESCRIPTION
+IDS_DISABLE_TOUCH_ADJUSTMENT_NAME
+IDS_DOM_DISTILLER_VIEWER_FAILED_TO_FIND_ARTICLE_CONTENT
+IDS_DOM_DISTILLER_VIEWER_FAILED_TO_FIND_ARTICLE_TITLE
+IDS_DOM_DISTILLER_VIEWER_LOADING_STRING
+IDS_DOM_DISTILLER_VIEWER_NO_DATA_CONTENT
+IDS_DOM_DISTILLER_VIEWER_NO_DATA_TITLE
+IDS_DOM_DISTILLER_VIEWER_VIEW_ORIGINAL
+IDS_DOM_DISTILLER_WEBUI_ENTRY_ADD
+IDS_DOM_DISTILLER_WEBUI_ENTRY_ADD_FAILED
+IDS_DOM_DISTILLER_WEBUI_ENTRY_URL
+IDS_DOM_DISTILLER_WEBUI_FETCHING_ENTRIES
+IDS_DOM_DISTILLER_WEBUI_REFRESH
+IDS_DOM_DISTILLER_WEBUI_TITLE
+IDS_DOM_DISTILLER_WEBUI_VIEW_URL
+IDS_DOM_DISTILLER_WEBUI_VIEW_URL_FAILED
+IDS_DONE
+IDS_EDIT_FIND_MAC
+IDS_EMPTY_KEYWORD_VALUE
+IDS_ERRORPAGES_BUTTON_LESS
+IDS_ERRORPAGES_BUTTON_LOAD_STALE
+IDS_ERRORPAGES_BUTTON_LOAD_STALE_HELP
+IDS_ERRORPAGES_BUTTON_MORE
+IDS_ERRORPAGES_BUTTON_RELOAD
+IDS_ERRORPAGES_DETAILS_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_DETAILS_BAD_GATEWAY
+IDS_ERRORPAGES_DETAILS_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_DETAILS_BLOCKED
+IDS_ERRORPAGES_DETAILS_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_DETAILS_CACHE_MISS
+IDS_ERRORPAGES_DETAILS_CACHE_READ_FAILURE
+IDS_ERRORPAGES_DETAILS_CONNECTION_CLOSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_CONNECTION_REFUSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_RESET
+IDS_ERRORPAGES_DETAILS_DNS_PROBE_RUNNING
+IDS_ERRORPAGES_DETAILS_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_DETAILS_EMPTY_RESPONSE
+IDS_ERRORPAGES_DETAILS_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_FILE_NOT_FOUND
+IDS_ERRORPAGES_DETAILS_FORBIDDEN
+IDS_ERRORPAGES_DETAILS_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_DETAILS_GONE
+IDS_ERRORPAGES_DETAILS_HTTP_VERSION_NOT_SUPPORTED
+IDS_ERRORPAGES_DETAILS_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_DETAILS_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_DETAILS_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_DETAILS_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_NETWORK_CHANGED
+IDS_ERRORPAGES_DETAILS_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_DETAILS_NOT_IMPLEMENTED
+IDS_ERRORPAGES_DETAILS_PINNING_FAILURE
+IDS_ERRORPAGES_DETAILS_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_DISPOSITION
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_LENGTH
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_LOCATION
+IDS_ERRORPAGES_DETAILS_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_DETAILS_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_DETAILS_SSL_UNSAFE_NEGOTIATION
+IDS_ERRORPAGES_DETAILS_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_DETAILS_TIMED_OUT
+IDS_ERRORPAGES_DETAILS_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_DETAILS_UNKNOWN
+IDS_ERRORPAGES_ERROR_CODE
+IDS_ERRORPAGES_HEADING_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_HEADING_BLOCKED
+IDS_ERRORPAGES_HEADING_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_HEADING_CACHE_MISS
+IDS_ERRORPAGES_HEADING_CACHE_READ_FAILURE
+IDS_ERRORPAGES_HEADING_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_HEADING_DUPLICATE_HEADERS
+IDS_ERRORPAGES_HEADING_EMPTY_RESPONSE
+IDS_ERRORPAGES_HEADING_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_HTTP_SERVER_ERROR
+IDS_ERRORPAGES_HEADING_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_HEADING_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_HEADING_NOT_AVAILABLE
+IDS_ERRORPAGES_HEADING_NOT_FOUND
+IDS_ERRORPAGES_HEADING_PINNING_FAILURE
+IDS_ERRORPAGES_HEADING_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_HEADING_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_HEADING_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_HEADING_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_HTTP_POST_WARNING
+IDS_ERRORPAGES_NET_BUTTON_DETAILS
+IDS_ERRORPAGES_NET_BUTTON_HIDE_DETAILS
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION_BODY
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION_HEADER
+IDS_ERRORPAGES_SUGGESTION_CONTACT_ADMINISTRATOR
+IDS_ERRORPAGES_SUGGESTION_DNS_CONFIG
+IDS_ERRORPAGES_SUGGESTION_FIREWALL_CONFIG
+IDS_ERRORPAGES_SUGGESTION_LEARNMORE_BODY
+IDS_ERRORPAGES_SUGGESTION_NETWORK_PREDICTION
+IDS_ERRORPAGES_SUGGESTION_PROXY_CONFIG
+IDS_ERRORPAGES_SUGGESTION_PROXY_DISABLE_PLATFORM
+IDS_ERRORPAGES_SUGGESTION_RELOAD
+IDS_ERRORPAGES_SUGGESTION_RELOAD_REPOST_BODY
+IDS_ERRORPAGES_SUGGESTION_RELOAD_REPOST_HEADER
+IDS_ERRORPAGES_SUGGESTION_VIEW_POLICIES
+IDS_ERRORPAGES_SUMMARY_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_SUMMARY_BAD_GATEWAY
+IDS_ERRORPAGES_SUMMARY_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_SUMMARY_BLOCKED
+IDS_ERRORPAGES_SUMMARY_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_SUMMARY_CACHE_MISS
+IDS_ERRORPAGES_SUMMARY_CACHE_READ_FAILURE
+IDS_ERRORPAGES_SUMMARY_CONNECTION_REFUSED
+IDS_ERRORPAGES_SUMMARY_CONNECTION_RESET
+IDS_ERRORPAGES_SUMMARY_DNS_PROBE_RUNNING
+IDS_ERRORPAGES_SUMMARY_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_SUMMARY_DUPLICATE_HEADERS
+IDS_ERRORPAGES_SUMMARY_EMPTY_RESPONSE
+IDS_ERRORPAGES_SUMMARY_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_FORBIDDEN
+IDS_ERRORPAGES_SUMMARY_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_SUMMARY_GONE
+IDS_ERRORPAGES_SUMMARY_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_INSTRUCTIONS_TEMPLATE
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_PLATFORM
+IDS_ERRORPAGES_SUMMARY_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_SUMMARY_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_NETWORK_CHANGED
+IDS_ERRORPAGES_SUMMARY_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_SUMMARY_NOT_AVAILABLE
+IDS_ERRORPAGES_SUMMARY_NOT_FOUND
+IDS_ERRORPAGES_SUMMARY_PINNING_FAILURE
+IDS_ERRORPAGES_SUMMARY_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_SUMMARY_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_SUMMARY_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_SUMMARY_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_SUMMARY_TIMED_OUT
+IDS_ERRORPAGES_SUMMARY_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_SUMMARY_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_SUMMARY_WEBSITE_CANNOT_HANDLE
+IDS_ERRORPAGES_TITLE_ACCESS_DENIED
+IDS_ERRORPAGES_TITLE_BLOCKED
+IDS_ERRORPAGES_TITLE_LOAD_FAILED
+IDS_ERRORPAGES_TITLE_NOT_AVAILABLE
+IDS_ERRORPAGES_TITLE_NOT_FOUND
+IDS_EXTENSION_KEYWORD_COMMAND
+IDS_FEEDBACK_REPORT_PAGE_TITLE
+IDS_FEEDBACK_REPORT_URL_LABEL
+IDS_FEEDBACK_SEND_REPORT
+IDS_FEEDBACK_USER_EMAIL_LABEL
+IDS_FILE_BROWSER_OPEN_LABEL
+IDS_FIND_IN_PAGE_CLOSE_TOOLTIP
+IDS_FIND_IN_PAGE_COUNT
+IDS_FIND_IN_PAGE_NEXT_TOOLTIP
+IDS_FIND_IN_PAGE_PREVIOUS_TOOLTIP
+IDS_FLAGS_ACCELERATED_FIXED_ROOT_BACKGROUND_DESCRIPTION
+IDS_FLAGS_ACCELERATED_FIXED_ROOT_BACKGROUND_NAME
+IDS_FLAGS_ALLOW_NACL_SOCKET_API_DESCRIPTION
+IDS_FLAGS_ALLOW_NACL_SOCKET_API_NAME
+IDS_FLAGS_ALLOW_TOUCHPAD_THREE_FINGER_CLICK_DESCRIPTION
+IDS_FLAGS_ALLOW_TOUCHPAD_THREE_FINGER_CLICK_NAME
+IDS_FLAGS_COMPOSITED_LAYER_BORDERS
+IDS_FLAGS_COMPOSITED_LAYER_BORDERS_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_HIGH_DPI
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_NAME
+IDS_FLAGS_COMPOSITING_FOR_TRANSITION_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_TRANSITION_NAME
+IDS_FLAGS_CONFLICTS_CHECK_DESCRIPTION
+IDS_FLAGS_CONFLICTS_CHECK_NAME
+IDS_FLAGS_DEBUG_PACKED_APP_DESCRIPTION
+IDS_FLAGS_DEBUG_PACKED_APP_NAME
+IDS_FLAGS_DEBUG_SHORTCUTS_DESCRIPTION
+IDS_FLAGS_DEBUG_SHORTCUTS_NAME
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_DESCRIPTION
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_GRANDE
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_NAME
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_SHORT
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_TALL
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_VENTI
+IDS_FLAGS_DEFAULT_TILE_WIDTH_DESCRIPTION
+IDS_FLAGS_DEFAULT_TILE_WIDTH_GRANDE
+IDS_FLAGS_DEFAULT_TILE_WIDTH_NAME
+IDS_FLAGS_DEFAULT_TILE_WIDTH_SHORT
+IDS_FLAGS_DEFAULT_TILE_WIDTH_TALL
+IDS_FLAGS_DEFAULT_TILE_WIDTH_VENTI
+IDS_FLAGS_DISABLE
+IDS_FLAGS_DISABLE_ACCELERATED_2D_CANVAS_DESCRIPTION
+IDS_FLAGS_DISABLE_ACCELERATED_2D_CANVAS_NAME
+IDS_FLAGS_DISABLE_ACCELERATED_VIDEO_DECODE_DESCRIPTION
+IDS_FLAGS_DISABLE_ACCELERATED_VIDEO_DECODE_NAME
+IDS_FLAGS_DISABLE_BOOT_ANIMATION
+IDS_FLAGS_DISABLE_BOOT_ANIMATION_DESCRIPTION
+IDS_FLAGS_DISABLE_GESTURE_REQUIREMENT_FOR_MEDIA_PLAYBACK_DESCRIPTION
+IDS_FLAGS_DISABLE_GESTURE_REQUIREMENT_FOR_MEDIA_PLAYBACK_NAME
+IDS_FLAGS_DISABLE_HYPERLINK_AUDITING_DESCRIPTION
+IDS_FLAGS_DISABLE_HYPERLINK_AUDITING_NAME
+IDS_FLAGS_DISABLE_PNACL_DESCRIPTION
+IDS_FLAGS_DISABLE_PNACL_NAME
+IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_DESCRIPTION
+IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_NAME
+IDS_FLAGS_DISABLE_WEBGL_DESCRIPTION
+IDS_FLAGS_DISABLE_WEBGL_NAME
+IDS_FLAGS_DISABLE_WEBRTC_DESCRIPTION
+IDS_FLAGS_DISABLE_WEBRTC_NAME
+IDS_FLAGS_ENABLE
+IDS_FLAGS_ENABLE_ASYNC_DNS_DESCRIPTION
+IDS_FLAGS_ENABLE_ASYNC_DNS_NAME
+IDS_FLAGS_ENABLE_CARRIER_SWITCHING
+IDS_FLAGS_ENABLE_CARRIER_SWITCHING_DESCRIPTION
+IDS_FLAGS_ENABLE_DEFERRED_IMAGE_DECODING_DESCRIPTION
+IDS_FLAGS_ENABLE_DEFERRED_IMAGE_DECODING_NAME
+IDS_FLAGS_ENABLE_DEVTOOLS_EXPERIMENTS_DESCRIPTION
+IDS_FLAGS_ENABLE_DEVTOOLS_EXPERIMENTS_NAME
+IDS_FLAGS_ENABLE_DOWNLOAD_RESUMPTION_DESCRIPTION
+IDS_FLAGS_ENABLE_DOWNLOAD_RESUMPTION_NAME
+IDS_FLAGS_ENABLE_EXPERIMENTAL_CANVAS_FEATURES_DESCRIPTION
+IDS_FLAGS_ENABLE_EXPERIMENTAL_CANVAS_FEATURES_NAME
+IDS_FLAGS_ENABLE_GESTURE_TAP_HIGHLIGHTING_DESCRIPTION
+IDS_FLAGS_ENABLE_GESTURE_TAP_HIGHLIGHTING_NAME
+IDS_FLAGS_ENABLE_JAVASCRIPT_HARMONY_DESCRIPTION
+IDS_FLAGS_ENABLE_JAVASCRIPT_HARMONY_NAME
+IDS_FLAGS_ENABLE_NACL_DEBUG_DESCRIPTION
+IDS_FLAGS_ENABLE_NACL_DEBUG_NAME
+IDS_FLAGS_ENABLE_NACL_DESCRIPTION
+IDS_FLAGS_ENABLE_NACL_NAME
+IDS_FLAGS_ENABLE_PANELS_DESCRIPTION
+IDS_FLAGS_ENABLE_PANELS_NAME
+IDS_FLAGS_ENABLE_PASSWORD_GENERATION_DESCRIPTION
+IDS_FLAGS_ENABLE_PASSWORD_GENERATION_NAME
+IDS_FLAGS_ENABLE_PINCH_SCALE_DESCRIPTION
+IDS_FLAGS_ENABLE_PINCH_SCALE_NAME
+IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_DESCRIPTION
+IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_NAME
+IDS_FLAGS_ENABLE_SCREEN_CAPTURE_DESCRIPTION
+IDS_FLAGS_ENABLE_SCREEN_CAPTURE_NAME
+IDS_FLAGS_ENABLE_SIMPLE_CACHE_BACKEND_DESCRIPTION
+IDS_FLAGS_ENABLE_SIMPLE_CACHE_BACKEND_NAME
+IDS_FLAGS_ENABLE_SMOOTH_SCROLLING_DESCRIPTION
+IDS_FLAGS_ENABLE_SMOOTH_SCROLLING_NAME
+IDS_FLAGS_ENABLE_SUGGESTIONS_SERVICE_DESCRIPTION
+IDS_FLAGS_ENABLE_SUGGESTIONS_SERVICE_NAME
+IDS_FLAGS_ENABLE_SYNCED_NOTIFICATIONS_DESCRIPTION
+IDS_FLAGS_ENABLE_SYNCED_NOTIFICATIONS_NAME
+IDS_FLAGS_ENABLE_TCP_FAST_OPEN_DESCRIPTION
+IDS_FLAGS_ENABLE_TCP_FAST_OPEN_NAME
+IDS_FLAGS_ENABLE_TOUCH_DRAG_DROP_DESCRIPTION
+IDS_FLAGS_ENABLE_TOUCH_DRAG_DROP_NAME
+IDS_FLAGS_ENABLE_TOUCH_EDITING_DESCRIPTION
+IDS_FLAGS_ENABLE_TOUCH_EDITING_NAME
+IDS_FLAGS_EXPERIMENTAL_EXTENSION_APIS_DESCRIPTION
+IDS_FLAGS_EXPERIMENTAL_EXTENSION_APIS_NAME
+IDS_FLAGS_EXPERIMENTAL_WEB_PLATFORM_FEATURES_DESCRIPTION
+IDS_FLAGS_EXPERIMENTAL_WEB_PLATFORM_FEATURES_NAME
+IDS_FLAGS_EXTENSIONS_ON_CHROME_URLS_DESCRIPTION
+IDS_FLAGS_EXTENSIONS_ON_CHROME_URLS_NAME
+IDS_FLAGS_FORCE_ACCELERATED_OVERFLOW_SCROLL_MODE_DESCRIPTION
+IDS_FLAGS_FORCE_ACCELERATED_OVERFLOW_SCROLL_MODE_NAME
+IDS_FLAGS_FORCE_HIGH_DPI_DESCRIPTION
+IDS_FLAGS_FORCE_HIGH_DPI_NAME
+IDS_FLAGS_IGNORE_GPU_BLACKLIST_DESCRIPTION
+IDS_FLAGS_IGNORE_GPU_BLACKLIST_NAME
+IDS_FLAGS_IMPL_SIDE_PAINTING_DESCRIPTION
+IDS_FLAGS_IMPL_SIDE_PAINTING_NAME
+IDS_FLAGS_LONG_TITLE
+IDS_FLAGS_NACL_DEBUG_MASK_DESCRIPTION
+IDS_FLAGS_NACL_DEBUG_MASK_NAME
+IDS_FLAGS_NOT_AVAILABLE
+IDS_FLAGS_NO_EXPERIMENTS_AVAILABLE
+IDS_FLAGS_NO_UNSUPPORTED_EXPERIMENTS
+IDS_FLAGS_NTP_OTHER_SESSIONS_MENU_DESCRIPTION
+IDS_FLAGS_NTP_OTHER_SESSIONS_MENU_NAME
+IDS_FLAGS_PERFORMANCE_MONITOR_GATHERING_DESCRIPTION
+IDS_FLAGS_PERFORMANCE_MONITOR_GATHERING_NAME
+IDS_FLAGS_RELAUNCH_BUTTON
+IDS_FLAGS_RELAUNCH_NOTICE
+IDS_FLAGS_RESET_ALL_BUTTON
+IDS_FLAGS_SAVE_PAGE_AS_MHTML_DESCRIPTION
+IDS_FLAGS_SAVE_PAGE_AS_MHTML_NAME
+IDS_FLAGS_SHOW_AUTOFILL_TYPE_PREDICTIONS_DESCRIPTION
+IDS_FLAGS_SHOW_AUTOFILL_TYPE_PREDICTIONS_NAME
+IDS_FLAGS_SHOW_FPS_COUNTER
+IDS_FLAGS_SHOW_FPS_COUNTER_DESCRIPTION
+IDS_FLAGS_SHOW_TOUCH_HUD_DESCRIPTION
+IDS_FLAGS_SHOW_TOUCH_HUD_NAME
+IDS_FLAGS_SILENT_DEBUGGER_EXTENSION_API_DESCRIPTION
+IDS_FLAGS_SILENT_DEBUGGER_EXTENSION_API_NAME
+IDS_FLAGS_SPELLCHECK_AUTOCORRECT
+IDS_FLAGS_SPELLCHECK_AUTOCORRECT_DESCRIPTION
+IDS_FLAGS_STACKED_TAB_STRIP_DESCRIPTION
+IDS_FLAGS_STACKED_TAB_STRIP_NAME
+IDS_FLAGS_TABLE_TITLE
+IDS_FLAGS_THREADED_COMPOSITING_MODE_DESCRIPTION
+IDS_FLAGS_THREADED_COMPOSITING_MODE_NAME
+IDS_FLAGS_UNSUPPORTED_TABLE_TITLE
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_DESCRIPTION
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_NAME
+IDS_FLAGS_WARNING_HEADER
+IDS_FLAGS_WARNING_TEXT
+IDS_FULLSCREEN
+IDS_GENERIC_EXPERIMENT_CHOICE_AUTOMATIC
+IDS_GENERIC_EXPERIMENT_CHOICE_DEFAULT
+IDS_GENERIC_EXPERIMENT_CHOICE_DISABLED
+IDS_GENERIC_EXPERIMENT_CHOICE_ENABLED
+IDS_GOOGLE_URL_TRACKER_INFOBAR_DONT_SWITCH
+IDS_GOOGLE_URL_TRACKER_INFOBAR_LEARN_MORE
+IDS_GOOGLE_URL_TRACKER_INFOBAR_MESSAGE
+IDS_GOOGLE_URL_TRACKER_INFOBAR_SWITCH
+IDS_GROUP_BY_DOMAIN_LABEL
+IDS_HISTORY_ACTION_MENU_DESCRIPTION
+IDS_HISTORY_BLOCKED_VISIT_TEXT
+IDS_HISTORY_BROWSERESULTS
+IDS_HISTORY_CONTINUED
+IDS_HISTORY_DATE_WITH_RELATIVE_TIME
+IDS_HISTORY_DELETE_PRIOR_VISITS_CONFIRM_BUTTON
+IDS_HISTORY_DELETE_PRIOR_VISITS_WARNING
+IDS_HISTORY_FILTER_ALLOWED
+IDS_HISTORY_FILTER_ALLOW_ITEMS
+IDS_HISTORY_FILTER_BLOCKED
+IDS_HISTORY_FILTER_BLOCK_ITEMS
+IDS_HISTORY_HAS_SYNCED_RESULTS
+IDS_HISTORY_INTERVAL
+IDS_HISTORY_IN_CONTENT_PACK
+IDS_HISTORY_LOADING
+IDS_HISTORY_LOCK_BUTTON
+IDS_HISTORY_MORE_FROM_SITE
+IDS_HISTORY_NEWER
+IDS_HISTORY_NEWEST
+IDS_HISTORY_NO_RESULTS
+IDS_HISTORY_NO_SEARCH_RESULTS
+IDS_HISTORY_NO_SYNCED_RESULTS
+IDS_HISTORY_NUMBER_VISITS
+IDS_HISTORY_OLDER
+IDS_HISTORY_OPEN_CLEAR_BROWSING_DATA_DIALOG
+IDS_HISTORY_RANGE_ALL_TIME
+IDS_HISTORY_RANGE_LABEL
+IDS_HISTORY_RANGE_MONTH
+IDS_HISTORY_RANGE_NEXT
+IDS_HISTORY_RANGE_PREVIOUS
+IDS_HISTORY_RANGE_TODAY
+IDS_HISTORY_RANGE_WEEK
+IDS_HISTORY_REMOVE_PAGE
+IDS_HISTORY_REMOVE_SELECTED_ITEMS
+IDS_HISTORY_SEARCHRESULTSFOR
+IDS_HISTORY_SEARCH_BUTTON
+IDS_HISTORY_TITLE
+IDS_HISTORY_UNKNOWN_DEVICE
+IDS_HISTORY_UNLOCK_BUTTON
+IDS_HTTP_POST_WARNING
+IDS_HTTP_POST_WARNING_RESEND
+IDS_HTTP_POST_WARNING_TITLE
+IDS_IMPORT_FROM_FIREFOX
+IDS_IMPORT_FROM_ICEWEASEL
+IDS_KEYWORD_SEARCH
+IDS_LEARN_MORE
+IDS_LIBADDRESSINPUT_ADDRESS_LINE_1_LABEL
+IDS_LIBADDRESSINPUT_AREA
+IDS_LIBADDRESSINPUT_COUNTRY_OR_REGION_LABEL
+IDS_LIBADDRESSINPUT_COUNTY
+IDS_LIBADDRESSINPUT_DEPARTMENT
+IDS_LIBADDRESSINPUT_DISTRICT
+IDS_LIBADDRESSINPUT_DO_SI
+IDS_LIBADDRESSINPUT_EMIRATE
+IDS_LIBADDRESSINPUT_ISLAND
+IDS_LIBADDRESSINPUT_LOCALITY_LABEL
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_POSTAL_CODE
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_POSTAL_CODE_URL
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_ZIP
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_ZIP_URL
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_FIELD
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_POSTAL_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_POSTAL_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_ZIP_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_ZIP_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_OBLAST
+IDS_LIBADDRESSINPUT_ORGANIZATION_LABEL
+IDS_LIBADDRESSINPUT_PARISH
+IDS_LIBADDRESSINPUT_POSTAL_CODE_LABEL
+IDS_LIBADDRESSINPUT_PO_BOX_FORBIDDEN_VALUE
+IDS_LIBADDRESSINPUT_PREFECTURE
+IDS_LIBADDRESSINPUT_PROVINCE
+IDS_LIBADDRESSINPUT_RECIPIENT_LABEL
+IDS_LIBADDRESSINPUT_STATE
+IDS_LIBADDRESSINPUT_UNKNOWN_VALUE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_ZIP_CODE_LABEL
+IDS_LOGIN_DIALOG_OK_BUTTON_LABEL
+IDS_LOGIN_DIALOG_PASSWORD_FIELD
+IDS_LOGIN_DIALOG_TITLE
+IDS_LOGIN_DIALOG_USERNAME_FIELD
+IDS_MANAGED_USER_AVATAR_LABEL
+IDS_MOBILE_WELCOME_URL
+IDS_NACL_DEBUG_MASK_CHOICE_DEBUG_ALL
+IDS_NACL_DEBUG_MASK_CHOICE_EXCLUDE_UTILS_PNACL
+IDS_NACL_DEBUG_MASK_CHOICE_INCLUDE_DEBUG
+IDS_NETWORK_PREDICTION_ENABLED_DESCRIPTION
+IDS_NET_EXPORT_NO_EMAIL_ACCOUNTS_ALERT_MESSAGE
+IDS_NET_EXPORT_NO_EMAIL_ACCOUNTS_ALERT_TITLE
+IDS_NEW_INCOGNITO_WINDOW_MAC
+IDS_NEW_TAB_CHROME_WELCOME_PAGE_TITLE
+IDS_NEW_TAB_MOST_VISITED
+IDS_NEW_TAB_OTHER_SESSIONS_COLLAPSE_SESSION
+IDS_NEW_TAB_OTHER_SESSIONS_EXPAND_SESSION
+IDS_NEW_TAB_OTHER_SESSIONS_OPEN_ALL
+IDS_NEW_TAB_RECENTLY_CLOSED
+IDS_NEW_TAB_RESTORE_THUMBNAILS_SHORT_LINK
+IDS_NEW_TAB_THUMBNAIL_REMOVED_NOTIFICATION
+IDS_NEW_TAB_TITLE
+IDS_NEW_TAB_UNDO_THUMBNAIL_REMOVE
+IDS_NUMBERED_PROFILE_NAME
+IDS_OK
+IDS_OMNIBOX_EMPTY_HINT
+IDS_ONE_CLICK_SIGNIN_CONFIRM_EMAIL_DIALOG_CANCEL_BUTTON
+IDS_OPEN_TABS_NOTYETSYNCED
+IDS_OPEN_TABS_PROMOCOMPUTER
+IDS_OPTIONS_ADVANCED_SECTION_TITLE_PRIVACY
+IDS_OPTIONS_DISABLE_WEB_SERVICES
+IDS_OPTIONS_ENABLE_LOGGING
+IDS_OPTIONS_IMPROVE_BROWSING_EXPERIENCE
+IDS_OPTIONS_PROXIES_CONFIGURE_BUTTON
+IDS_OTHER_DEVICES_X_MORE
+IDS_PAGEINFO_ADDRESS
+IDS_PAGEINFO_CERT_INFO_BUTTON
+IDS_PAGEINFO_PARTIAL_ADDRESS
+IDS_PAGE_INFO_HELP_CENTER_LINK
+IDS_PAGE_INFO_INTERNAL_PAGE
+IDS_PAGE_INFO_SECURITY_BUTTON_ACCESSIBILITY_LABEL
+IDS_PAGE_INFO_SECURITY_TAB_DEPRECATED_SIGNATURE_ALGORITHM
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_ERROR
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_WARNING
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_SENTENCE_LINK
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS_AEAD
+IDS_PAGE_INFO_SECURITY_TAB_FALLBACK_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_FIRST_VISITED_TODAY
+IDS_PAGE_INFO_SECURITY_TAB_INSECURE_IDENTITY
+IDS_PAGE_INFO_SECURITY_TAB_NON_UNIQUE_NAME
+IDS_PAGE_INFO_SECURITY_TAB_NOT_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_NO_REVOCATION_MECHANISM
+IDS_PAGE_INFO_SECURITY_TAB_RENEGOTIATION_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY_EV
+IDS_PAGE_INFO_SECURITY_TAB_SSL_VERSION
+IDS_PAGE_INFO_SECURITY_TAB_UNABLE_TO_CHECK_REVOCATION
+IDS_PAGE_INFO_SECURITY_TAB_UNKNOWN_PARTY
+IDS_PAGE_INFO_SECURITY_TAB_VISITED_BEFORE_TODAY
+IDS_PAGE_INFO_SECURITY_TAB_WEAK_ENCRYPTION_CONNECTION_TEXT
+IDS_PAGE_INFO_SITE_INFO_TITLE
+IDS_PASSWORDS_EXCEPTIONS_TAB_TITLE
+IDS_PASSWORDS_SHOW_PASSWORDS_TAB_TITLE
+IDS_PASSWORD_MANAGER_BLACKLIST_BUTTON
+IDS_PASSWORD_MANAGER_SAVE_BUTTON
+IDS_PASSWORD_MANAGER_SAVE_PASSWORD_PROMPT
+IDS_PAST_TIME_TODAY
+IDS_PAST_TIME_YESTERDAY
+IDS_PDF_INFOBAR_ALWAYS_USE_READER_BUTTON
+IDS_PLATFORM_LABEL
+IDS_PLUGIN_CONFIRM_INSTALL_DIALOG_ACCEPT_BUTTON
+IDS_PLUGIN_CONFIRM_INSTALL_DIALOG_TITLE
+IDS_POLICY_DEFAULT_SEARCH_DISABLED
+IDS_POLICY_DEPRECATED
+IDS_POLICY_DM_STATUS_HTTP_STATUS_ERROR
+IDS_POLICY_DM_STATUS_REQUEST_FAILED
+IDS_POLICY_DM_STATUS_REQUEST_INVALID
+IDS_POLICY_DM_STATUS_RESPONSE_DECODING_ERROR
+IDS_POLICY_DM_STATUS_SERVICE_ACTIVATION_PENDING
+IDS_POLICY_DM_STATUS_SERVICE_DEPROVISIONED
+IDS_POLICY_DM_STATUS_SERVICE_DEVICE_ID_CONFLICT
+IDS_POLICY_DM_STATUS_SERVICE_DEVICE_NOT_FOUND
+IDS_POLICY_DM_STATUS_SERVICE_DOMAIN_MISMATCH
+IDS_POLICY_DM_STATUS_SERVICE_INVALID_SERIAL_NUMBER
+IDS_POLICY_DM_STATUS_SERVICE_MANAGEMENT_NOT_SUPPORTED
+IDS_POLICY_DM_STATUS_SERVICE_MANAGEMENT_TOKEN_INVALID
+IDS_POLICY_DM_STATUS_SERVICE_MISSING_LICENSES
+IDS_POLICY_DM_STATUS_SERVICE_POLICY_NOT_FOUND
+IDS_POLICY_DM_STATUS_SUCCESS
+IDS_POLICY_DM_STATUS_TEMPORARY_UNAVAILABLE
+IDS_POLICY_DM_STATUS_UNKNOWN_ERROR
+IDS_POLICY_INVALID_BOOKMARK
+IDS_POLICY_INVALID_PROXY_MODE_ERROR
+IDS_POLICY_INVALID_SEARCH_URL_ERROR
+IDS_POLICY_LEVEL_ERROR
+IDS_POLICY_LIST_ENTRY_ERROR
+IDS_POLICY_NOT_SPECIFIED_ERROR
+IDS_POLICY_OUT_OF_RANGE_ERROR
+IDS_POLICY_OVERRIDDEN
+IDS_POLICY_PROXY_BOTH_SPECIFIED_ERROR
+IDS_POLICY_PROXY_MODE_AUTO_DETECT_ERROR
+IDS_POLICY_PROXY_MODE_DISABLED_ERROR
+IDS_POLICY_PROXY_MODE_FIXED_SERVERS_ERROR
+IDS_POLICY_PROXY_MODE_PAC_URL_ERROR
+IDS_POLICY_PROXY_MODE_SYSTEM_ERROR
+IDS_POLICY_PROXY_NEITHER_SPECIFIED_ERROR
+IDS_POLICY_SCHEMA_VALIDATION_ERROR
+IDS_POLICY_STORE_STATUS_BAD_STATE
+IDS_POLICY_STORE_STATUS_LOAD_ERROR
+IDS_POLICY_STORE_STATUS_OK
+IDS_POLICY_STORE_STATUS_PARSE_ERROR
+IDS_POLICY_STORE_STATUS_SERIALIZE_ERROR
+IDS_POLICY_STORE_STATUS_STORE_ERROR
+IDS_POLICY_STORE_STATUS_UNKNOWN_ERROR
+IDS_POLICY_STORE_STATUS_VALIDATION_ERROR
+IDS_POLICY_SUBKEY_ERROR
+IDS_POLICY_TYPE_ERROR
+IDS_POLICY_VALIDATION_BAD_INITIAL_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_KEY_VERIFICATION_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_TIMESTAMP
+IDS_POLICY_VALIDATION_BAD_USERNAME
+IDS_POLICY_VALIDATION_ERROR_CODE_PRESENT
+IDS_POLICY_VALIDATION_OK
+IDS_POLICY_VALIDATION_PAYLOAD_PARSE_ERROR
+IDS_POLICY_VALIDATION_POLICY_PARSE_ERROR
+IDS_POLICY_VALIDATION_UNKNOWN_ERROR
+IDS_POLICY_VALIDATION_WRONG_POLICY_TYPE
+IDS_POLICY_VALIDATION_WRONG_SETTINGS_ENTITY_ID
+IDS_POLICY_VALIDATION_WRONG_TOKEN
+IDS_PREFERENCES_CORRUPT_ERROR
+IDS_PREFERENCES_UNREADABLE_ERROR
+IDS_PRINT
+IDS_PRIVACY_POLICY_URL
+IDS_PRODUCT_NAME
+IDS_PROFILES_GUEST_PROFILE_NAME
+IDS_PROFILES_LOCAL_PROFILE_STATE
+IDS_PROFILE_TOO_NEW_ERROR
+IDS_SAD_TAB_MESSAGE
+IDS_SAD_TAB_TITLE
+IDS_SAFE_BROWSING_MALWARE_BACK_BUTTON
+IDS_SAFE_BROWSING_MALWARE_BACK_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_COLLAB_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_DIAGNOSTIC_PAGE
+IDS_SAFE_BROWSING_MALWARE_FEAR_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_LABEL
+IDS_SAFE_BROWSING_MALWARE_QUESTION_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_TITLE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION1
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION1_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION2
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION2_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION3
+IDS_SAFE_BROWSING_MALWARE_V2_DETAILS
+IDS_SAFE_BROWSING_MALWARE_V2_DETAILS_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_V2_HEADLINE_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_LEARN_MORE
+IDS_SAFE_BROWSING_MALWARE_V2_PROCEED_LINK
+IDS_SAFE_BROWSING_MALWARE_V2_REPORTING_AGREE
+IDS_SAFE_BROWSING_MALWARE_V2_SEE_MORE
+IDS_SAFE_BROWSING_MALWARE_V2_TITLE
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION2
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION3
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION_AGREE
+IDS_SAFE_BROWSING_MULTI_MALWARE_PROCEED_BUTTON
+IDS_SAFE_BROWSING_MULTI_PHISHING_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_THREAT_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_THREAT_DESCRIPTION2
+IDS_SAFE_BROWSING_MULTI_THREAT_TITLE
+IDS_SAFE_BROWSING_PHISHING_BACK_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_COLLAB_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_FEAR_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_LABEL
+IDS_SAFE_BROWSING_PHISHING_QUESTION_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_REPORT_ERROR
+IDS_SAFE_BROWSING_PHISHING_TITLE
+IDS_SAFE_BROWSING_PHISHING_V2_DESCRIPTION1
+IDS_SAFE_BROWSING_PHISHING_V2_DESCRIPTION2
+IDS_SAFE_BROWSING_PHISHING_V2_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_V2_REPORT_ERROR
+IDS_SAFE_BROWSING_PHISHING_V2_TITLE
+IDS_SAFE_BROWSING_PRIVACY_POLICY_PAGE_V2
+IDS_SAFE_BROWSING_PRIVACY_POLICY_URL
+IDS_SECURE_CONNECTION_EV
+IDS_SESSION_CRASHED_VIEW_MESSAGE
+IDS_SESSION_CRASHED_VIEW_RESTORE_BUTTON
+IDS_SETTINGS_SHOW_ADVANCED_SETTINGS
+IDS_SHORT_PRODUCT_NAME
+IDS_SHOW_HISTORY
+IDS_SIGNED_IN_WITH_SYNC_DISABLED
+IDS_SIGNED_IN_WITH_SYNC_SUPPRESSED
+IDS_SIGNIN_ERROR_BUBBLE_VIEW_TITLE
+IDS_SSL_BLOCKING_PAGE_TITLE
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER_WITH_MANAGE_LINK
+IDS_SYNC_AUTHENTICATING_LABEL
+IDS_SYNC_BASIC_ENCRYPTION_DATA
+IDS_SYNC_CLEAR_USER_DATA
+IDS_SYNC_CONFIGURE_ENCRYPTION
+IDS_SYNC_DATATYPE_AUTOFILL
+IDS_SYNC_DATATYPE_BOOKMARKS
+IDS_SYNC_DATATYPE_PASSWORDS
+IDS_SYNC_DATATYPE_PREFERENCES
+IDS_SYNC_DATATYPE_TABS
+IDS_SYNC_DATATYPE_TYPED_URLS
+IDS_SYNC_EMPTY_PASSPHRASE_ERROR
+IDS_SYNC_ENABLE_SYNC_ON_ACCOUNT
+IDS_SYNC_ENCRYPTION_SECTION_TITLE
+IDS_SYNC_ENTER_GOOGLE_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY_WITH_DATE
+IDS_SYNC_ENTER_PASSPHRASE_TITLE
+IDS_SYNC_ERROR_BUBBLE_VIEW_TITLE
+IDS_SYNC_ERROR_SIGNING_IN
+IDS_SYNC_FULL_ENCRYPTION_DATA
+IDS_SYNC_INVALID_USER_CREDENTIALS
+IDS_SYNC_LOGIN_INFO_OUT_OF_DATE
+IDS_SYNC_LOGIN_SETTING_UP
+IDS_SYNC_MENU_PRE_SYNCED_LABEL
+IDS_SYNC_MENU_SYNCED_LABEL
+IDS_SYNC_NTP_PASSWORD_ENABLE
+IDS_SYNC_NTP_PASSWORD_PROMO
+IDS_SYNC_NTP_PASSWORD_PROMO,
+IDS_SYNC_NTP_SETUP_IN_PROGRESS
+IDS_SYNC_OPTIONS_GROUP_NAME
+IDS_SYNC_OTHER_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_PASSPHRASE_LABEL
+IDS_SYNC_PASSPHRASE_MISMATCH_ERROR
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT_POSTFIX
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT_PREFIX
+IDS_SYNC_PASSWORD_SYNC_ATTENTION
+IDS_SYNC_PROMO_NTP_BUBBLE_MESSAGE
+IDS_SYNC_PROMO_TAB_TITLE
+IDS_SYNC_RELOGIN_LINK_LABEL
+IDS_SYNC_SERVER_IS_UNREACHABLE
+IDS_SYNC_SERVICE_UNAVAILABLE
+IDS_SYNC_SETUP_ERROR
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_SIGN_IN_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_START_SYNC_BUTTON_LABEL
+IDS_SYNC_STATUS_UNRECOVERABLE_ERROR
+IDS_SYNC_STOP_AND_RESTART_SYNC
+IDS_SYNC_TIME_JUST_NOW
+IDS_SYNC_TIME_NEVER
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_UNRECOVERABLE_ERROR_HELP_URL
+IDS_SYNC_UPGRADE_CLIENT
+IDS_SYSTEM_FLAGS_OWNER_ONLY
+IDS_TIME_DAYS_1ST_DEFAULT
+IDS_TIME_DAYS_1ST_FEW
+IDS_TIME_DAYS_1ST_MANY
+IDS_TIME_DAYS_1ST_SINGULAR
+IDS_TIME_DAYS_1ST_TWO
+IDS_TIME_DAYS_1ST_ZERO
+IDS_TIME_DAYS_DEFAULT
+IDS_TIME_DAYS_FEW
+IDS_TIME_DAYS_MANY
+IDS_TIME_DAYS_SINGULAR
+IDS_TIME_DAYS_TWO
+IDS_TIME_DAYS_ZERO
+IDS_TIME_ELAPSED_DAYS_DEFAULT
+IDS_TIME_ELAPSED_DAYS_FEW
+IDS_TIME_ELAPSED_DAYS_MANY
+IDS_TIME_ELAPSED_DAYS_SINGULAR
+IDS_TIME_ELAPSED_DAYS_TWO
+IDS_TIME_ELAPSED_DAYS_ZERO
+IDS_TIME_ELAPSED_HOURS_DEFAULT
+IDS_TIME_ELAPSED_HOURS_FEW
+IDS_TIME_ELAPSED_HOURS_MANY
+IDS_TIME_ELAPSED_HOURS_SINGULAR
+IDS_TIME_ELAPSED_HOURS_TWO
+IDS_TIME_ELAPSED_HOURS_ZERO
+IDS_TIME_ELAPSED_MINS_DEFAULT
+IDS_TIME_ELAPSED_MINS_FEW
+IDS_TIME_ELAPSED_MINS_MANY
+IDS_TIME_ELAPSED_MINS_SINGULAR
+IDS_TIME_ELAPSED_MINS_TWO
+IDS_TIME_ELAPSED_MINS_ZERO
+IDS_TIME_ELAPSED_SECS_DEFAULT
+IDS_TIME_ELAPSED_SECS_FEW
+IDS_TIME_ELAPSED_SECS_MANY
+IDS_TIME_ELAPSED_SECS_SINGULAR
+IDS_TIME_ELAPSED_SECS_TWO
+IDS_TIME_ELAPSED_SECS_ZERO
+IDS_TIME_HOURS_1ST_DEFAULT
+IDS_TIME_HOURS_1ST_FEW
+IDS_TIME_HOURS_1ST_MANY
+IDS_TIME_HOURS_1ST_SINGULAR
+IDS_TIME_HOURS_1ST_TWO
+IDS_TIME_HOURS_1ST_ZERO
+IDS_TIME_HOURS_2ND_DEFAULT
+IDS_TIME_HOURS_2ND_FEW
+IDS_TIME_HOURS_2ND_MANY
+IDS_TIME_HOURS_2ND_SINGULAR
+IDS_TIME_HOURS_2ND_TWO
+IDS_TIME_HOURS_2ND_ZERO
+IDS_TIME_HOURS_DEFAULT
+IDS_TIME_HOURS_FEW
+IDS_TIME_HOURS_MANY
+IDS_TIME_HOURS_SINGULAR
+IDS_TIME_HOURS_TWO
+IDS_TIME_HOURS_ZERO
+IDS_TIME_LONG_MINS_1ST_DEFAULT
+IDS_TIME_LONG_MINS_1ST_FEW
+IDS_TIME_LONG_MINS_1ST_MANY
+IDS_TIME_LONG_MINS_1ST_SINGULAR
+IDS_TIME_LONG_MINS_1ST_TWO
+IDS_TIME_LONG_MINS_1ST_ZERO
+IDS_TIME_LONG_MINS_2ND_DEFAULT
+IDS_TIME_LONG_MINS_2ND_FEW
+IDS_TIME_LONG_MINS_2ND_MANY
+IDS_TIME_LONG_MINS_2ND_SINGULAR
+IDS_TIME_LONG_MINS_2ND_TWO
+IDS_TIME_LONG_MINS_2ND_ZERO
+IDS_TIME_LONG_MINS_DEFAULT
+IDS_TIME_LONG_MINS_FEW
+IDS_TIME_LONG_MINS_MANY
+IDS_TIME_LONG_MINS_SINGULAR
+IDS_TIME_LONG_MINS_TWO
+IDS_TIME_LONG_MINS_ZERO
+IDS_TIME_LONG_SECS_2ND_DEFAULT
+IDS_TIME_LONG_SECS_2ND_FEW
+IDS_TIME_LONG_SECS_2ND_MANY
+IDS_TIME_LONG_SECS_2ND_SINGULAR
+IDS_TIME_LONG_SECS_2ND_TWO
+IDS_TIME_LONG_SECS_2ND_ZERO
+IDS_TIME_LONG_SECS_DEFAULT
+IDS_TIME_LONG_SECS_FEW
+IDS_TIME_LONG_SECS_MANY
+IDS_TIME_LONG_SECS_SINGULAR
+IDS_TIME_LONG_SECS_TWO
+IDS_TIME_LONG_SECS_ZERO
+IDS_TIME_MINS_DEFAULT
+IDS_TIME_MINS_FEW
+IDS_TIME_MINS_MANY
+IDS_TIME_MINS_SINGULAR
+IDS_TIME_MINS_TWO
+IDS_TIME_MINS_ZERO
+IDS_TIME_REMAINING_DAYS_DEFAULT
+IDS_TIME_REMAINING_DAYS_FEW
+IDS_TIME_REMAINING_DAYS_MANY
+IDS_TIME_REMAINING_DAYS_SINGULAR
+IDS_TIME_REMAINING_DAYS_TWO
+IDS_TIME_REMAINING_DAYS_ZERO
+IDS_TIME_REMAINING_HOURS_DEFAULT
+IDS_TIME_REMAINING_HOURS_FEW
+IDS_TIME_REMAINING_HOURS_MANY
+IDS_TIME_REMAINING_HOURS_SINGULAR
+IDS_TIME_REMAINING_HOURS_TWO
+IDS_TIME_REMAINING_HOURS_ZERO
+IDS_TIME_REMAINING_LONG_MINS_DEFAULT
+IDS_TIME_REMAINING_LONG_MINS_FEW
+IDS_TIME_REMAINING_LONG_MINS_MANY
+IDS_TIME_REMAINING_LONG_MINS_SINGULAR
+IDS_TIME_REMAINING_LONG_MINS_TWO
+IDS_TIME_REMAINING_LONG_MINS_ZERO
+IDS_TIME_REMAINING_LONG_SECS_DEFAULT
+IDS_TIME_REMAINING_LONG_SECS_FEW
+IDS_TIME_REMAINING_LONG_SECS_MANY
+IDS_TIME_REMAINING_LONG_SECS_SINGULAR
+IDS_TIME_REMAINING_LONG_SECS_TWO
+IDS_TIME_REMAINING_LONG_SECS_ZERO
+IDS_TIME_REMAINING_MINS_DEFAULT
+IDS_TIME_REMAINING_MINS_FEW
+IDS_TIME_REMAINING_MINS_MANY
+IDS_TIME_REMAINING_MINS_SINGULAR
+IDS_TIME_REMAINING_MINS_TWO
+IDS_TIME_REMAINING_MINS_ZERO
+IDS_TIME_REMAINING_SECS_DEFAULT
+IDS_TIME_REMAINING_SECS_FEW
+IDS_TIME_REMAINING_SECS_MANY
+IDS_TIME_REMAINING_SECS_SINGULAR
+IDS_TIME_REMAINING_SECS_TWO
+IDS_TIME_REMAINING_SECS_ZERO
+IDS_TIME_SECS_DEFAULT
+IDS_TIME_SECS_FEW
+IDS_TIME_SECS_MANY
+IDS_TIME_SECS_SINGULAR
+IDS_TIME_SECS_TWO
+IDS_TIME_SECS_ZERO
+IDS_TOOLTIP_STAR
+IDS_TOUCH_EVENTS_DESCRIPTION
+IDS_TOUCH_EVENTS_NAME
+IDS_TRANSLATE_INFOBAR_ACCEPT
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE_AUTODETERMINED_SOURCE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_BEFORE_MESSAGE
+IDS_TRANSLATE_INFOBAR_DENY
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_CONNECT
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_TRANSLATE
+IDS_TRANSLATE_INFOBAR_ERROR_SAME_LANGUAGE
+IDS_TRANSLATE_INFOBAR_OPTIONS_ABOUT
+IDS_TRANSLATE_INFOBAR_OPTIONS_ALWAYS
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_LANG
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_SITE
+IDS_TRANSLATE_INFOBAR_OPTIONS_REPORT_ERROR
+IDS_TRANSLATE_INFOBAR_RETRY
+IDS_TRANSLATE_INFOBAR_REVERT
+IDS_TRANSLATE_INFOBAR_TRANSLATING_TO
+IDS_TRANSLATE_INFOBAR_UNKNOWN_PAGE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_UNSUPPORTED_PAGE_LANGUAGE
+IDS_UPGRADE_AVAILABLE
+IDS_UPGRADE_AVAILABLE_BUTTON
+IDS_WEB_FONT_FAMILY
+IDS_WEB_FONT_SIZE
diff --git a/build/ios/mac_build.gypi b/build/ios/mac_build.gypi
new file mode 100644
index 0000000..4da21eb
--- /dev/null
+++ b/build/ios/mac_build.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Xcode throws an error if an iOS target depends on a Mac OS X target. So
+# any place a utility program needs to be build and run, an action is
+# used to run ninja as script to work around this.
+# Example:
+# {
+#   'target_name': 'foo',
+#   'type': 'none',
+#   'variables': {
+#     # The name of a directory used for ninja. This cannot be shared with
+#     # another mac build.
+#     'ninja_output_dir': 'ninja-foo',
+#     # The full path to the location in which the ninja executable should be
+#     # placed. This cannot be shared with another mac build.
+#    'ninja_product_dir':
+#      '<(DEPTH)/xcodebuild/<(ninja_output_dir)/<(CONFIGURATION_NAME)',
+#     # The list of all the gyp files that contain the targets to run.
+#     're_run_targets': [
+#       'foo.gyp',
+#     ],
+#   },
+#   'includes': ['path_to/mac_build.gypi'],
+#   'actions': [
+#     {
+#       'action_name': 'compile foo',
+#       'inputs': [],
+#       'outputs': [],
+#       'action': [
+#         '<@(ninja_cmd)',
+#         # All the targets to build.
+#         'foo1',
+#         'foo2',
+#       ],
+#     },
+#   ],
+# }
+{
+  'variables': {
+    'variables': {
+     'parent_generator%': '<(GENERATOR)',
+    },
+    'parent_generator%': '<(parent_generator)',
+    # Common ninja command line flags.
+    'ninja_cmd': [
+      # Bounce through clean_env to clean up the environment so things
+      # set by the iOS build don't pollute the Mac build.
+      '<(DEPTH)/build/ios/clean_env.py',
+      # ninja must be found in the PATH.
+      'ADD_TO_PATH=<!(echo $PATH)',
+      'ninja',
+      '-C',
+      '<(ninja_product_dir)',
+    ],
+
+    # Common syntax to rerun gyp to generate the Mac projects.
+    're_run_gyp': [
+      'build/gyp_chromium',
+      '--depth=.',
+      # Don't use anything set for the iOS side of things.
+      '--ignore-environment',
+      # Generate for ninja
+      '--format=ninja',
+      # Generate files into xcodebuild/ninja
+      '-Goutput_dir=xcodebuild/<(ninja_output_dir)',
+      # nacl isn't in the iOS checkout, make sure it's turned off
+      '-Ddisable_nacl=1',
+      # Pass through the Mac SDK version.
+      '-Dmac_sdk=<(mac_sdk)',
+      '-Dparent_generator=<(parent_generator)'
+    ],
+
+    # Rerun gyp for each of the projects needed. This is what actually
+    # generates the projects on disk.
+    're_run_gyp_execution':
+      '<!(cd <(DEPTH) && <@(re_run_gyp) <@(re_run_targets))',
+  },
+  # Since these are used to generate things needed by other targets, make
+  # them hard dependencies so they are always built first.
+  'hard_dependency': 1,
+}
diff --git a/build/isolate.gypi b/build/isolate.gypi
new file mode 100644
index 0000000..17bf3c7
--- /dev/null
+++ b/build/isolate.gypi
@@ -0,0 +1,121 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to "build" .isolate files into a .isolated file.
+#
+# To use this, create a gyp target with the following form:
+# 'conditions': [
+#   ['test_isolation_mode != "noop"', {
+#     'targets': [
+#       {
+#         'target_name': 'foo_test_run',
+#         'type': 'none',
+#         'dependencies': [
+#           'foo_test',
+#         ],
+#         'includes': [
+#           '../build/isolate.gypi',
+#         ],
+#         'sources': [
+#           'foo_test.isolate',
+#         ],
+#       },
+#     ],
+#   }],
+# ],
+#
+# Note: foo_test.isolate is included and a source file. It is an inherent
+# property of the .isolate format. This permits to define GYP variables but is
+# a stricter format than GYP so isolate.py can read it.
+#
+# The generated .isolated file will be:
+#   <(PRODUCT_DIR)/foo_test.isolated
+#
+# See http://dev.chromium.org/developers/testing/isolated-testing/for-swes
+# for more information.
+
+{
+  'includes': [
+    '../build/util/version.gypi',
+  ],
+  'rules': [
+    {
+      'rule_name': 'isolate',
+      'extension': 'isolate',
+      'inputs': [
+        # Files that are known to be involved in this step.
+        '<(DEPTH)/tools/isolate_driver.py',
+        '<(DEPTH)/tools/swarming_client/isolate.py',
+        '<(DEPTH)/tools/swarming_client/run_isolated.py',
+      ],
+      'outputs': [],
+      'action': [
+        'python',
+        '<(DEPTH)/tools/isolate_driver.py',
+        '<(test_isolation_mode)',
+        '--isolated', '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+        '--isolate', '<(RULE_INPUT_PATH)',
+
+        # Variables should use the -V FOO=<(FOO) form so frequent values,
+        # like '0' or '1', aren't stripped out by GYP. Run 'isolate.py help' for
+        # more details.
+        #
+        # This list needs to be kept in sync with the cmd line options
+        # in src/build/android/pylib/gtest/setup.py.
+
+        # Path variables are used to replace file paths when loading a .isolate
+        # file
+        '--path-variable', 'DEPTH', '<(DEPTH)',
+        '--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ',
+
+        # Extra variables are replaced on the 'command' entry and on paths in
+        # the .isolate file but are not considered relative paths.
+        '--extra-variable', 'version_full=<(version_full)',
+
+        '--config-variable', 'OS=<(OS)',
+        '--config-variable', 'CONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+        '--config-variable', 'asan=<(asan)',
+        '--config-variable', 'chromeos=<(chromeos)',
+        '--config-variable', 'component=<(component)',
+        '--config-variable', 'fastbuild=<(fastbuild)',
+        # TODO(kbr): move this to chrome_tests.gypi:gles2_conform_tests_run
+        # once support for user-defined config variables is added.
+        '--config-variable',
+          'internal_gles2_conform_tests=<(internal_gles2_conform_tests)',
+        '--config-variable', 'icu_use_data_file_flag=<(icu_use_data_file_flag)',
+        '--config-variable', 'lsan=<(lsan)',
+        '--config-variable', 'libpeer_target_type=<(libpeer_target_type)',
+        '--config-variable', 'use_openssl=<(use_openssl)',
+        '--config-variable', 'target_arch=<(target_arch)',
+        '--config-variable', 'use_ozone=<(use_ozone)',
+      ],
+      'conditions': [
+        # Note: When gyp merges lists, it appends them to the old value.
+        ['OS=="mac"', {
+          # <(mac_product_name) can contain a space, so don't use FOO=<(FOO)
+          # form.
+          'action': [
+            '--extra-variable', 'mac_product_name', '<(mac_product_name)',
+          ],
+        }],
+        ["test_isolation_outdir!=''", {
+          'action': [ '--isolate-server', '<(test_isolation_outdir)' ],
+        }],
+        ['test_isolation_fail_on_missing == 0', {
+          'action': ['--ignore_broken_items'],
+        }],
+        ["test_isolation_mode == 'prepare'", {
+          'outputs': [
+            '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated.gen.json',
+          ],
+        }, {
+          'outputs': [
+            '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+          ],
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/jar_file_jni_generator.gypi b/build/jar_file_jni_generator.gypi
new file mode 100644
index 0000000..4c01c8a
--- /dev/null
+++ b/build/jar_file_jni_generator.gypi
@@ -0,0 +1,77 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for system Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'android_jar_jni_headers',
+#   'type': 'none',
+#   'variables': {
+#     'jni_gen_package': 'chrome',
+#     'input_java_class': 'java/io/InputStream.class',
+#   },
+#   'includes': [ '../build/jar_file_jni_generator.gypi' ],
+# },
+#
+# Optional variables:
+#  input_jar_file - The input jar file, if omitted, android_sdk_jar will be used.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+    # A comma separated string of include files.
+    'jni_generator_includes%': (
+        'base/android/jni_generator/jni_generator_helper.h'
+    ),
+    'native_exports%': '',
+  },
+  'actions': [
+    {
+      'action_name': 'generate_jni_headers_from_jar_file',
+      'inputs': [
+        '<(jni_generator)',
+        '<(input_jar_file)',
+        '<(android_sdk_jar)',
+      ],
+      'variables': {
+        'java_class_name': '<!(basename <(input_java_class)|sed "s/\.class//")',
+        'input_jar_file%': '<(android_sdk_jar)'
+      },
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni/<(java_class_name)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '-j',
+        '<(input_jar_file)',
+        '--input_file',
+        '<(input_java_class)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni',
+        '--includes',
+        '<(jni_generator_includes)',
+        '--optimize_generation',
+        '<(optimize_jni_generation)',
+        '<(native_exports)',
+      ],
+      'message': 'Generating JNI bindings from  <(input_jar_file)/<(input_java_class)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+  'conditions': [
+    ['android_webview_build==1', {
+      'variables': {
+        'native_exports%': '--native_exports',
+      },
+      'dependencies': [
+        '<(DEPTH)/build/android/android_exports.gyp:android_exports',
+      ],
+    }],
+  ],
+}
diff --git a/build/java.gypi b/build/java.gypi
new file mode 100644
index 0000000..bf6f56c
--- /dev/null
+++ b/build/java.gypi
@@ -0,0 +1,315 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'java_in_dir': 'path/to/package/root',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  java_in_dir - The top-level java directory. The src should be in
+#    <java_in_dir>/src.
+# Optional/automatic variables:
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  javac_includes - A list of specific files to include. This is by default
+#    empty, which leads to inclusion of all files specified. May include
+#    wildcard, and supports '**/' for recursive path wildcards, ie.:
+#    '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'.
+#  has_java_resources - Set to 1 if the java target contains an
+#    Android-compatible resources folder named res.  If 1, R_package and
+#    R_package_relpath must also be set.
+#  R_package - The java package in which the R class (which maps resources to
+#    integer IDs) should be generated, e.g. org.chromium.content.
+#  R_package_relpath - Same as R_package, but replace each '.' with '/'.
+#  res_extra_dirs - A list of extra directories containing Android resources.
+#    These directories may be generated at build time.
+#  res_extra_files - A list of the files in res_extra_dirs.
+#  never_lint - Set to 1 to not run lint on this target.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+  ],
+  'variables': {
+    'android_jar': '<(android_sdk)/android.jar',
+    'input_jars_paths': [ '<(android_jar)' ],
+    'additional_src_dirs': [],
+    'javac_includes': [],
+    'jar_name': '<(_target_name).jar',
+    'jar_dir': '<(PRODUCT_DIR)/lib.java',
+    'jar_path': '<(intermediate_dir)/<(jar_name)',
+    'jar_final_path': '<(jar_dir)/<(jar_name)',
+    'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ],
+    'instr_stamp': '<(intermediate_dir)/instr.stamp',
+    'additional_input_paths': [],
+    'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+    'generated_src_dirs': ['>@(generated_R_dirs)'],
+    'generated_R_dirs': [],
+    'has_java_resources%': 0,
+    'res_extra_dirs': [],
+    'res_extra_files': [],
+    'res_v14_verify_only%': 0,
+    'resource_input_paths': ['>@(res_extra_files)'],
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'compile_stamp': '<(intermediate_dir)/compile.stamp',
+    'lint_stamp': '<(intermediate_dir)/lint.stamp',
+    'lint_result': '<(intermediate_dir)/lint_result.xml',
+    'lint_config': '<(intermediate_dir)/lint_config.xml',
+    'never_lint%': 0,
+    'proguard_config%': '',
+    'proguard_preprocess%': '0',
+    'variables': {
+      'variables': {
+        'proguard_preprocess%': 0,
+        'emma_never_instrument%': 0,
+      },
+      'conditions': [
+        ['proguard_preprocess == 1', {
+          'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+        }, {
+          'javac_jar_path': '<(jar_path)'
+        }],
+        ['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', {
+          'emma_instrument': 1,
+        }, {
+          'emma_instrument': 0,
+        }],
+      ],
+    },
+    'emma_instrument': '<(emma_instrument)',
+    'javac_jar_path': '<(javac_jar_path)',
+  },
+  # This all_dependent_settings is used for java targets only. This will add the
+  # jar path to the classpath of dependent java targets.
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(jar_final_path)'],
+      'library_dexed_jars_paths': ['<(dex_path)'],
+    },
+  },
+  'conditions': [
+    ['has_java_resources == 1', {
+      'variables': {
+        'res_dir': '<(java_in_dir)/res',
+        'res_input_dirs': ['<(res_dir)', '<@(res_extra_dirs)'],
+        'resource_input_paths': ['<!@(find <(res_dir) -type f)'],
+
+        'R_dir': '<(intermediate_dir)/java_R',
+        'R_text_file': '<(R_dir)/R.txt',
+
+        'generated_src_dirs': ['<(R_dir)'],
+        'additional_input_paths': ['<(resource_zip_path)', ],
+
+        'dependencies_res_zip_paths': [],
+        'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+      },
+      'all_dependent_settings': {
+        'variables': {
+          # Dependent libraries include this target's R.java file via
+          # generated_R_dirs.
+          'generated_R_dirs': ['<(R_dir)'],
+
+          # Dependent libraries and apks include this target's resources via
+          # dependencies_res_zip_paths.
+          'additional_input_paths': ['<(resource_zip_path)'],
+          'dependencies_res_zip_paths': ['<(resource_zip_path)'],
+
+          # additional_res_packages and additional_R_text_files are used to
+          # create this packages R.java files when building the APK.
+          'additional_res_packages': ['<(R_package)'],
+          'additional_R_text_files': ['<(R_text_file)'],
+        },
+      },
+      'actions': [
+        # Generate R.java and crunch image resources.
+        {
+          'action_name': 'process_resources',
+          'message': 'processing resources for <(_target_name)',
+          'variables': {
+            'android_manifest': '<(DEPTH)/build/android/AndroidManifest.xml',
+            # Write the inputs list to a file, so that its mtime is updated when
+            # the list of inputs changes.
+            'inputs_list_file': '>|(java_resources.<(_target_name).gypcmd >@(resource_input_paths))',
+            'process_resources_options': [],
+            'conditions': [
+              ['res_v14_verify_only == 1', {
+                'process_resources_options': ['--v14-verify-only']
+              }],
+            ],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/process_resources.py',
+            '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
+            '>@(resource_input_paths)',
+            '>@(dependencies_res_zip_paths)',
+            '>(inputs_list_file)',
+          ],
+          'outputs': [
+            '<(resource_zip_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+            '--android-sdk', '<(android_sdk)',
+            '--android-sdk-tools', '<(android_sdk_tools)',
+            '--non-constant-id',
+
+            '--android-manifest', '<(android_manifest)',
+            '--custom-package', '<(R_package)',
+
+            '--dependencies-res-zips', '>(dependencies_res_zip_paths)',
+            '--resource-dirs', '<(res_input_dirs)',
+
+            '--R-dir', '<(R_dir)',
+            '--resource-zip-out', '<(resource_zip_path)',
+
+            '<@(process_resources_options)',
+          ],
+        },
+      ],
+    }],
+    ['proguard_preprocess == 1', {
+      'actions': [
+        {
+          'action_name': 'proguard_<(_target_name)',
+          'message': 'Proguard preprocessing <(_target_name) jar',
+          'inputs': [
+            '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/proguard.py',
+            '<(javac_jar_path)',
+            '<(proguard_config)',
+          ],
+          'outputs': [
+            '<(jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/proguard.py',
+            '--proguard-path=<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '--input-path=<(javac_jar_path)',
+            '--output-path=<(jar_path)',
+            '--proguard-config=<(proguard_config)',
+            '--classpath=<(android_sdk_jar) >(input_jars_paths)',
+          ]
+        },
+      ],
+    }],
+  ],
+  'actions': [
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling <(_target_name) java sources',
+      'variables': {
+        'java_sources': ['>!@(find >(java_in_dir)/src >(additional_src_dirs) -name "*.java")'],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '>@(java_sources)',
+        '>@(input_jars_paths)',
+        '>@(additional_input_paths)',
+      ],
+      'outputs': [
+        '<(compile_stamp)',
+        '<(javac_jar_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--classpath=>(input_jars_paths)',
+        '--src-gendirs=>(generated_src_dirs)',
+        '--javac-includes=<(javac_includes)',
+        '--chromium-code=<(chromium_code)',
+        '--jar-path=<(javac_jar_path)',
+        '--jar-excluded-classes=<(jar_excluded_classes)',
+        '--stamp=<(compile_stamp)',
+        '>@(java_sources)',
+      ]
+    },
+    {
+      'action_name': 'instr_jar_<(_target_name)',
+      'message': 'Instrumenting <(_target_name) jar',
+      'variables': {
+        'input_path': '<(jar_path)',
+        'output_path': '<(jar_final_path)',
+        'stamp_path': '<(instr_stamp)',
+        'instr_type': 'jar',
+      },
+      'outputs': [
+        '<(jar_final_path)',
+      ],
+      'inputs': [
+        '<(jar_path)',
+      ],
+      'includes': [ 'android/instr_action.gypi' ],
+    },
+    {
+      'variables': {
+        'src_dirs': [
+          '<(java_in_dir)/src',
+          '>@(additional_src_dirs)',
+        ],
+        'stamp_path': '<(lint_stamp)',
+        'result_path': '<(lint_result)',
+        'config_path': '<(lint_config)',
+        'lint_jar_path': '<(jar_final_path)',
+      },
+      'inputs': [
+        '<(jar_final_path)',
+        '<(compile_stamp)',
+      ],
+      'outputs': [
+        '<(lint_stamp)',
+      ],
+      'includes': [ 'android/lint_action.gypi' ],
+    },
+    {
+      'action_name': 'jar_toc_<(_target_name)',
+      'message': 'Creating <(_target_name) jar.TOC',
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/util/md5_check.py',
+        '<(DEPTH)/build/android/gyp/jar_toc.py',
+        '<(jar_final_path)',
+      ],
+      'outputs': [
+        '<(jar_final_path).TOC',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/jar_toc.py',
+        '--jar-path=<(jar_final_path)',
+        '--toc-path=<(jar_final_path).TOC',
+      ]
+    },
+    {
+      'action_name': 'dex_<(_target_name)',
+      'variables': {
+        'conditions': [
+          ['emma_instrument != 0', {
+            'dex_no_locals': 1,
+          }],
+        ],
+        'dex_input_paths': [ '<(jar_final_path)' ],
+        'output_path': '<(dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/java_aidl.gypi b/build/java_aidl.gypi
new file mode 100644
index 0000000..8f111fd
--- /dev/null
+++ b/build/java_aidl.gypi
@@ -0,0 +1,78 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java aidl files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'aidl_aidl-file-name',
+#   'type': 'none',
+#   'variables': {
+#     'aidl_interface_file': '<interface-path>/<interface-file>.aidl',
+#     'aidl_import_include': '<(DEPTH)/<path-to-src-dir>',
+#   },
+#   'sources': {
+#     '<input-path1>/<input-file1>.aidl',
+#     '<input-path2>/<input-file2>.aidl',
+#     ...
+#   },
+#   'includes': ['<path-to-this-file>/java_aidl.gypi'],
+# }
+#
+#
+# The generated java files will be:
+#   <(PRODUCT_DIR)/lib.java/<input-file1>.java
+#   <(PRODUCT_DIR)/lib.java/<input-file2>.java
+#   ...
+#
+# Optional variables:
+#  aidl_import_include - This should be an absolute path to your java src folder
+#    that contains the classes that are imported by your aidl files.
+#
+# TODO(cjhopman): dependents need to rebuild when this target's inputs have changed.
+
+{
+  'variables': {
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/aidl',
+    'aidl_import_include%': '',
+    'additional_aidl_arguments': [],
+    'additional_aidl_input_paths': [],
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      'generated_src_dirs': ['<(intermediate_dir)/'],
+    },
+  },
+  'conditions': [
+    ['aidl_import_include != ""', {
+      'variables': {
+        'additional_aidl_arguments': [ '-I<(aidl_import_include)' ],
+        'additional_aidl_input_paths': [ '<!@(find <(aidl_import_include) -name "*.java" | sort)' ],
+      }
+    }],
+  ],
+  'rules': [
+    {
+      'rule_name': 'compile_aidl',
+      'extension': 'aidl',
+      'inputs': [
+        '<(android_sdk)/framework.aidl',
+        '<(aidl_interface_file)',
+        '<@(additional_aidl_input_paths)',
+      ],
+      'outputs': [
+        '<(intermediate_dir)/<(RULE_INPUT_ROOT).java',
+      ],
+      'action': [
+        '<(android_sdk_tools)/aidl',
+        '-p<(android_sdk)/framework.aidl',
+        '-p<(aidl_interface_file)',
+        '<@(additional_aidl_arguments)',
+        '<(RULE_INPUT_PATH)',
+        '<(intermediate_dir)/<(RULE_INPUT_ROOT).java',
+      ],
+    },
+  ],
+}
diff --git a/build/java_apk.gypi b/build/java_apk.gypi
new file mode 100644
index 0000000..fbc5a3a
--- /dev/null
+++ b/build/java_apk.gypi
@@ -0,0 +1,920 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Android APKs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_package_apk',
+#   'type': 'none',
+#   'variables': {
+#     'apk_name': 'MyPackage',
+#     'java_in_dir': 'path/to/package/root',
+#     'resource_dir': 'path/to/package/root/res',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  apk_name - The final apk will be named <apk_name>.apk
+#  java_in_dir - The top-level java directory. The src should be in
+#    <(java_in_dir)/src.
+# Optional/automatic variables:
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_res_dirs - Additional directories containing Android resources.
+#  additional_res_packages - Package names of the R.java files corresponding to
+#    each directory in additional_res_dirs.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  additional_bundled_libs - Additional libraries what will be stripped and
+#    bundled in the apk.
+#  asset_location - The directory where assets are located.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  is_test_apk - Set to 1 if building a test apk.  This prevents resources from
+#    dependencies from being re-included.
+#  native_lib_target - The target_name of the target which generates the final
+#    shared library to be included in this apk. A stripped copy of the
+#    library will be included in the apk.
+#  resource_dir - The directory for resources.
+#  R_package - A custom Java package to generate the resource file R.java in.
+#    By default, the package given in AndroidManifest.xml will be used.
+#  use_chromium_linker - Enable the content dynamic linker that allows sharing the
+#    RELRO section of the native libraries between the different processes.
+#  load_library_from_zip_file - When using the dynamic linker, load the library
+#    directly out of the zip file.
+#  use_relocation_packer - Enable relocation packing. Relies on the chromium
+#    linker, so use_chromium_linker must also be enabled.
+#  enable_chromium_linker_tests - Enable the content dynamic linker test support
+#    code. This allows a test APK to inject a Linker.TestRunner instance at
+#    runtime. Should only be used by the chromium_linker_test_apk target!!
+#  never_lint - Set to 1 to not run lint on this target.
+#  java_in_dir_suffix - To override the /src suffix on java_in_dir.
+#  app_manifest_version_name - set the apps 'human readable' version number.
+#  app_manifest_version_code - set the apps version number.
+{
+  'variables': {
+    'tested_apk_obfuscated_jar_path%': '/',
+    'tested_apk_dex_path%': '/',
+    'additional_input_paths': [],
+    'input_jars_paths': [],
+    'library_dexed_jars_paths': [],
+    'additional_src_dirs': [],
+    'generated_src_dirs': [],
+    'app_manifest_version_name%': '<(android_app_version_name)',
+    'app_manifest_version_code%': '<(android_app_version_code)',
+    # aapt generates this proguard.txt.
+    'generated_proguard_file': '<(intermediate_dir)/proguard.txt',
+    'proguard_enabled%': 'false',
+    'proguard_flags_paths': ['<(generated_proguard_file)'],
+    'jar_name': 'chromium_apk_<(_target_name).jar',
+    'resource_dir%':'<(DEPTH)/build/android/ant/empty/res',
+    'R_package%':'',
+    'additional_R_text_files': [],
+    'dependencies_res_zip_paths': [],
+    'additional_res_packages': [],
+    'additional_bundled_libs%': [],
+    'is_test_apk%': 0,
+    'extensions_to_not_compress%': '',
+    'resource_input_paths': [],
+    'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)',
+    'asset_location%': '<(intermediate_dir)/assets',
+    'codegen_stamp': '<(intermediate_dir)/codegen.stamp',
+    'package_input_paths': [],
+    'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json',
+    'additional_ordered_libraries_file': '<(intermediate_dir)/additional_native_libraries.json',
+    'native_libraries_template': '<(DEPTH)/base/android/java/templates/NativeLibraries.template',
+    'native_libraries_java_dir': '<(intermediate_dir)/native_libraries_java/',
+    'native_libraries_java_file': '<(native_libraries_java_dir)/NativeLibraries.java',
+    'native_libraries_java_stamp': '<(intermediate_dir)/native_libraries_java.stamp',
+    'native_libraries_template_data_dir': '<(intermediate_dir)/native_libraries/',
+    'native_libraries_template_data_file': '<(native_libraries_template_data_dir)/native_libraries_array.h',
+    'native_libraries_template_version_file': '<(native_libraries_template_data_dir)/native_libraries_version.h',
+    'compile_stamp': '<(intermediate_dir)/compile.stamp',
+    'lint_stamp': '<(intermediate_dir)/lint.stamp',
+    'lint_result': '<(intermediate_dir)/lint_result.xml',
+    'lint_config': '<(intermediate_dir)/lint_config.xml',
+    'never_lint%': 0,
+    'java_in_dir_suffix%': '/src',
+    'instr_stamp': '<(intermediate_dir)/instr.stamp',
+    'jar_stamp': '<(intermediate_dir)/jar.stamp',
+    'obfuscate_stamp': '<(intermediate_dir)/obfuscate.stamp',
+    'pack_arm_relocations_stamp': '<(intermediate_dir)/pack_arm_relocations.stamp',
+    'strip_stamp': '<(intermediate_dir)/strip.stamp',
+    'stripped_libraries_dir': '<(intermediate_dir)/stripped_libraries',
+    'strip_additional_stamp': '<(intermediate_dir)/strip_additional.stamp',
+    'version_stamp': '<(intermediate_dir)/version.stamp',
+    'javac_includes': [],
+    'jar_excluded_classes': [],
+    'javac_jar_path': '<(intermediate_dir)/<(_target_name).javac.jar',
+    'jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)',
+    'obfuscated_jar_path': '<(intermediate_dir)/obfuscated.jar',
+    'test_jar_path': '<(PRODUCT_DIR)/test.lib.java/<(apk_name).jar',
+    'dex_path': '<(intermediate_dir)/classes.dex',
+    'emma_device_jar': '<(android_sdk_root)/tools/lib/emma_device.jar',
+    'android_manifest_path%': '<(java_in_dir)/AndroidManifest.xml',
+    'push_stamp': '<(intermediate_dir)/push.stamp',
+    'link_stamp': '<(intermediate_dir)/link.stamp',
+    'package_resources_stamp': '<(intermediate_dir)/package_resources.stamp',
+    'resource_zip_path': '<(intermediate_dir)/<(_target_name).resources.zip',
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+    'unsigned_apk_path': '<(intermediate_dir)/<(apk_name)-unsigned.apk',
+    'final_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name).apk',
+    'incomplete_apk_path': '<(intermediate_dir)/<(apk_name)-incomplete.apk',
+    'apk_install_record': '<(intermediate_dir)/apk_install.record.stamp',
+    'device_intermediate_dir': '/data/data/org.chromium.gyp_managed_install/<(_target_name)/<(CONFIGURATION_NAME)',
+    'symlink_script_host_path': '<(intermediate_dir)/create_symlinks.sh',
+    'symlink_script_device_path': '<(device_intermediate_dir)/create_symlinks.sh',
+    'create_standalone_apk%': 1,
+    'res_v14_verify_only%': 0,
+    'variables': {
+      'variables': {
+        'native_lib_target%': '',
+        'native_lib_version_name%': '',
+        'use_chromium_linker%' : 0,
+        'load_library_from_zip_file%' : 0,
+        'use_relocation_packer%' : 0,
+        'enable_chromium_linker_tests%': 0,
+        'is_test_apk%': 0,
+      },
+      'conditions': [
+        ['gyp_managed_install == 1 and native_lib_target != ""', {
+          'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-standalone-unsigned.apk',
+        }, {
+          'unsigned_standalone_apk_path': '<(unsigned_apk_path)',
+        }],
+        ['gyp_managed_install == 1', {
+          'apk_package_native_libs_dir': '<(intermediate_dir)/libs.managed',
+        }, {
+          'apk_package_native_libs_dir': '<(intermediate_dir)/libs',
+        }],
+        ['is_test_apk == 0 and emma_coverage != 0', {
+          'emma_instrument%': 1,
+        },{
+          'emma_instrument%': 0,
+        }],
+      ],
+    },
+    'native_lib_target%': '',
+    'native_lib_version_name%': '',
+    'use_chromium_linker%' : 0,
+    'load_library_from_zip_file%' : 0,
+    'use_relocation_packer%' : 0,
+    'enable_chromium_linker_tests%': 0,
+    'emma_instrument%': '<(emma_instrument)',
+    'apk_package_native_libs_dir': '<(apk_package_native_libs_dir)',
+    'unsigned_standalone_apk_path': '<(unsigned_standalone_apk_path)',
+    'libchromium_android_linker': 'libchromium_android_linker.>(android_product_extension)',
+    'extra_native_libs': [],
+    'native_lib_placeholder_stamp': '<(apk_package_native_libs_dir)/<(android_app_abi)/native_lib_placeholder.stamp',
+    'native_lib_placeholders': [],
+  },
+  # Pass the jar path to the apk's "fake" jar target.  This would be better as
+  # direct_dependent_settings, but a variable set by a direct_dependent_settings
+  # cannot be lifted in a dependent to all_dependent_settings.
+  'all_dependent_settings': {
+    'conditions': [
+      ['proguard_enabled == "true"', {
+        'variables': {
+          'proguard_enabled': 'true',
+        }
+      }],
+    ],
+    'variables': {
+      'apk_output_jar_path': '<(jar_path)',
+      'tested_apk_obfuscated_jar_path': '<(obfuscated_jar_path)',
+      'tested_apk_dex_path': '<(dex_path)',
+    },
+  },
+  'conditions': [
+    ['resource_dir!=""', {
+      'variables': {
+        'resource_input_paths': [ '<!@(find <(resource_dir) -name "*")' ]
+      },
+    }],
+    ['R_package != ""', {
+      'variables': {
+        # We generate R.java in package R_package (in addition to the package
+        # listed in the AndroidManifest.xml, which is unavoidable).
+        'additional_res_packages': ['<(R_package)'],
+        'additional_R_text_files': ['<(intermediate_dir)/R.txt'],
+      },
+    }],
+    ['native_lib_target != "" and component == "shared_library"', {
+      'dependencies': [
+        '<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+      ],
+    }],
+    ['use_chromium_linker == 1', {
+      'dependencies': [
+        '<(DEPTH)/base/base.gyp:chromium_android_linker',
+      ],
+    }],
+    ['native_lib_target != ""', {
+      'variables': {
+        'generated_src_dirs': [ '<(native_libraries_java_dir)' ],
+        'native_libs_paths': [
+          '<(SHARED_LIB_DIR)/<(native_lib_target).>(android_product_extension)'
+        ],
+        'package_input_paths': [
+          '<(apk_package_native_libs_dir)/<(android_app_abi)/gdbserver',
+        ],
+      },
+      'copies': [
+        {
+          # gdbserver is always copied into the APK's native libs dir. The ant
+          # build scripts (apkbuilder task) will only include it in a debug
+          # build.
+          'destination': '<(apk_package_native_libs_dir)/<(android_app_abi)',
+          'files': [
+            '<(android_gdbserver)',
+          ],
+        },
+      ],
+      'actions': [
+        {
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1', {
+                'variables': {
+                  'linker_input_libraries': [
+                    '<(SHARED_LIB_DIR)/<(libchromium_android_linker)',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_input_libraries': [],
+                },
+              }],
+            ],
+            'input_libraries': [
+              '<@(native_libs_paths)',
+              '<@(extra_native_libs)',
+              '<@(linker_input_libraries)',
+            ],
+          },
+          'includes': ['../build/android/write_ordered_libraries.gypi'],
+        },
+        {
+          'action_name': 'native_libraries_<(_target_name)',
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1', {
+                'variables': {
+                  'linker_gcc_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_gcc_preprocess_defines': [],
+                },
+              }],
+              ['load_library_from_zip_file == 1', {
+                'variables': {
+                  'linker_load_from_zip_file_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_load_from_zip_file_preprocess_defines': [],
+                },
+              }],
+              ['enable_chromium_linker_tests == 1', {
+                'variables': {
+                  'linker_tests_gcc_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER_TESTS',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_tests_gcc_preprocess_defines': [],
+                },
+              }],
+            ],
+            'gcc_preprocess_defines': [
+              '<@(linker_load_from_zip_file_preprocess_defines)',
+              '<@(linker_gcc_preprocess_defines)',
+              '<@(linker_tests_gcc_preprocess_defines)',
+            ],
+          },
+          'message': 'Creating NativeLibraries.java for <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+            '<(ordered_libraries_file)',
+            '<(native_libraries_template)',
+          ],
+          'outputs': [
+            '<(native_libraries_java_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+            '--include-path=',
+            '--output=<(native_libraries_java_file)',
+            '--template=<(native_libraries_template)',
+            '--stamp=<(native_libraries_java_stamp)',
+            '--defines', 'NATIVE_LIBRARIES_LIST=@FileArg(<(ordered_libraries_file):java_libraries_list)',
+            '--defines', 'NATIVE_LIBRARIES_VERSION_NUMBER="<(native_lib_version_name)"',
+            '<@(gcc_preprocess_defines)',
+          ],
+        },
+        {
+          'action_name': 'strip_native_libraries',
+          'variables': {
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'input_paths': [
+              '<@(native_libs_paths)',
+              '<@(extra_native_libs)',
+            ],
+            'stamp': '<(strip_stamp)'
+          },
+          'includes': ['../build/android/strip_native_libraries.gypi'],
+        },
+        {
+          'action_name': 'pack_arm_relocations',
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1 and use_relocation_packer == 1', {
+                'enable_packing': 1,
+              }, {
+                'enable_packing': 0,
+              }],
+            ],
+            'exclude_packing_list': [
+              '<(libchromium_android_linker)',
+            ],
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'packed_libraries_dir': '<(libraries_source_dir)',
+            'input_paths': [
+              '<(strip_stamp)',
+            ],
+            'stamp': '<(pack_arm_relocations_stamp)',
+          },
+          'includes': ['../build/android/pack_arm_relocations.gypi'],
+        },
+        {
+          'action_name': 'insert_chromium_version',
+          'variables': {
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'libraries_source_dir%': '<(libraries_source_dir)',
+            'version_string': '<(native_lib_version_name)',
+            'input_paths': [
+              '<(pack_arm_relocations_stamp)',
+            ],
+            'stamp': '<(version_stamp)'
+          },
+          'includes': ['../build/android/insert_chromium_version.gypi'],
+        },
+        {
+          'variables': {
+            'input_libraries': [
+              '<@(additional_bundled_libs)',
+            ],
+            'ordered_libraries_file': '<(additional_ordered_libraries_file)',
+            'subtarget': '_additional_libraries',
+          },
+          'includes': ['../build/android/write_ordered_libraries.gypi'],
+        },
+        {
+          'action_name': 'strip_additional_libraries',
+          'variables': {
+            'ordered_libraries_file': '<(additional_ordered_libraries_file)',
+            'stripped_libraries_dir': '<(libraries_source_dir)',
+            'input_paths': [
+              '<@(additional_bundled_libs)',
+              '<(strip_stamp)',
+            ],
+            'stamp': '<(strip_additional_stamp)'
+          },
+          'includes': ['../build/android/strip_native_libraries.gypi'],
+        },
+        {
+          'action_name': 'Create native lib placeholder files for previous releases',
+          'variables': {
+            'placeholders': ['<@(native_lib_placeholders)'],
+            'conditions': [
+              ['gyp_managed_install == 1', {
+                # This "library" just needs to be put in the .apk. It is not loaded
+                # at runtime.
+                'placeholders': ['libfix.crbug.384638.so'],
+              }]
+            ],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/create_placeholder_files.py',
+          ],
+          'outputs': [
+            '<(native_lib_placeholder_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/create_placeholder_files.py',
+            '--dest-lib-dir=<(apk_package_native_libs_dir)/<(android_app_abi)/',
+            '--stamp=<(native_lib_placeholder_stamp)',
+            '<@(placeholders)',
+          ],
+        },
+      ],
+      'conditions': [
+        ['gyp_managed_install == 1', {
+          'variables': {
+            'libraries_top_dir': '<(intermediate_dir)/lib.stripped',
+            'libraries_source_dir': '<(libraries_top_dir)/lib/<(android_app_abi)',
+            'device_library_dir': '<(device_intermediate_dir)/lib.stripped',
+            'configuration_name': '<(CONFIGURATION_NAME)',
+          },
+          'dependencies': [
+            '<(DEPTH)/build/android/setup.gyp:get_build_device_configurations',
+          ],
+          'actions': [
+            {
+              'includes': ['../build/android/push_libraries.gypi'],
+            },
+            {
+              'action_name': 'create device library symlinks',
+              'message': 'Creating links on device for <(_target_name)',
+              'inputs': [
+                '<(DEPTH)/build/android/gyp/util/build_utils.py',
+                '<(DEPTH)/build/android/gyp/create_device_library_links.py',
+                '<(apk_install_record)',
+                '<(build_device_config_path)',
+                '<(ordered_libraries_file)',
+              ],
+              'outputs': [
+                '<(link_stamp)'
+              ],
+              'action': [
+                'python', '<(DEPTH)/build/android/gyp/create_device_library_links.py',
+                '--build-device-configuration=<(build_device_config_path)',
+                '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+                '--script-host-path=<(symlink_script_host_path)',
+                '--script-device-path=<(symlink_script_device_path)',
+                '--target-dir=<(device_library_dir)',
+                '--apk=<(incomplete_apk_path)',
+                '--stamp=<(link_stamp)',
+                '--configuration-name=<(CONFIGURATION_NAME)',
+              ],
+            },
+          ],
+          'conditions': [
+            ['create_standalone_apk == 1', {
+              'actions': [
+                {
+                  'action_name': 'create standalone APK',
+                  'variables': {
+                    'inputs': [
+                      '<(ordered_libraries_file)',
+                      '<(strip_additional_stamp)',
+                      '<(version_stamp)',
+                    ],
+                    'input_apk_path': '<(unsigned_apk_path)',
+                    'output_apk_path': '<(unsigned_standalone_apk_path)',
+                    'libraries_top_dir%': '<(libraries_top_dir)',
+                  },
+                  'includes': [ 'android/create_standalone_apk_action.gypi' ],
+                },
+              ],
+            }],
+          ],
+        }, {
+          # gyp_managed_install != 1
+          'variables': {
+            'libraries_source_dir': '<(apk_package_native_libs_dir)/<(android_app_abi)',
+            'package_input_paths': [
+              '<(strip_additional_stamp)',
+              '<(version_stamp)',
+            ],
+          },
+        }],
+      ],
+    }], # native_lib_target != ''
+    ['gyp_managed_install == 0 or create_standalone_apk == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize standalone apk',
+          'variables': {
+            'input_apk_path': '<(unsigned_standalone_apk_path)',
+            'output_apk_path': '<(final_apk_path)',
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+      ],
+      'dependencies': [
+        '<(DEPTH)/build/android/rezip.gyp:rezip_apk_jar',
+      ],
+    }],
+    ['gyp_managed_install == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize incomplete apk',
+          'variables': {
+            'input_apk_path': '<(unsigned_apk_path)',
+            'output_apk_path': '<(incomplete_apk_path)',
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+        {
+          'action_name': 'apk_install_<(_target_name)',
+          'message': 'Installing <(apk_name).apk',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/apk_install.py',
+            '<(build_device_config_path)',
+            '<(incomplete_apk_path)',
+          ],
+          'outputs': [
+            '<(apk_install_record)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/apk_install.py',
+            '--apk-path=<(incomplete_apk_path)',
+            '--build-device-configuration=<(build_device_config_path)',
+            '--install-record=<(apk_install_record)',
+            '--configuration-name=<(CONFIGURATION_NAME)',
+          ],
+        },
+      ],
+      'dependencies': [
+        '<(DEPTH)/build/android/rezip.gyp:rezip_apk_jar',
+      ],
+    }],
+    ['is_test_apk == 1', {
+      'dependencies': [
+        '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+      ]
+    }],
+  ],
+  'dependencies': [
+    '<(DEPTH)/tools/android/md5sum/md5sum.gyp:md5sum',
+  ],
+  'actions': [
+    {
+      'action_name': 'process_resources',
+      'message': 'processing resources for <(_target_name)',
+      'variables': {
+        # Write the inputs list to a file, so that its mtime is updated when
+        # the list of inputs changes.
+        'inputs_list_file': '>|(apk_codegen.<(_target_name).gypcmd >@(additional_input_paths) >@(resource_input_paths))',
+        'process_resources_options': [],
+        'conditions': [
+          ['is_test_apk == 1', {
+            'dependencies_res_zip_paths=': [],
+            'additional_res_packages=': [],
+          }],
+          ['res_v14_verify_only == 1', {
+            'process_resources_options': ['--v14-verify-only']
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/process_resources.py',
+        '<(android_manifest_path)',
+        '>@(additional_input_paths)',
+        '>@(resource_input_paths)',
+        '>@(dependencies_res_zip_paths)',
+        '>(inputs_list_file)',
+      ],
+      'outputs': [
+        '<(resource_zip_path)',
+        '<(generated_proguard_file)',
+        '<(codegen_stamp)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+        '--android-sdk', '<(android_sdk)',
+        '--android-sdk-tools', '<(android_sdk_tools)',
+
+        '--android-manifest', '<(android_manifest_path)',
+        '--dependencies-res-zips', '>(dependencies_res_zip_paths)',
+
+        '--extra-res-packages', '>(additional_res_packages)',
+        '--extra-r-text-files', '>(additional_R_text_files)',
+
+        '--proguard-file', '<(generated_proguard_file)',
+
+        '--resource-dirs', '<(resource_dir)',
+        '--resource-zip-out', '<(resource_zip_path)',
+
+        '--R-dir', '<(intermediate_dir)/gen',
+
+        '--stamp', '<(codegen_stamp)',
+
+        '<@(process_resources_options)',
+      ],
+    },
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling java for <(_target_name)',
+      'variables': {
+        'gen_src_dirs': [
+          '<(intermediate_dir)/gen',
+          '>@(generated_src_dirs)',
+        ],
+        # If there is a separate find for additional_src_dirs, it will find the
+        # wrong .java files when additional_src_dirs is empty.
+        # TODO(thakis): Gyp caches >! evaluation by command. Both java.gypi and
+        # java_apk.gypi evaluate the same command, and at the moment two targets
+        # set java_in_dir to "java". Add a dummy comment here to make sure
+        # that the two targets (one uses java.gypi, the other java_apk.gypi)
+        # get distinct source lists. Medium-term, make targets list all their
+        # Java files instead of using find. (As is, this will be broken if two
+        # targets use the same java_in_dir and both use java_apk.gypi or
+        # both use java.gypi.)
+        'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java"  # apk)'],
+
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '>@(java_sources)',
+        '>@(input_jars_paths)',
+        '<(codegen_stamp)',
+      ],
+      'conditions': [
+        ['native_lib_target != ""', {
+          'inputs': [ '<(native_libraries_java_stamp)' ],
+        }],
+      ],
+      'outputs': [
+        '<(compile_stamp)',
+        '<(javac_jar_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--classpath=>(input_jars_paths) <(android_sdk_jar)',
+        '--src-gendirs=>(gen_src_dirs)',
+        '--javac-includes=<(javac_includes)',
+        '--chromium-code=<(chromium_code)',
+        '--jar-path=<(javac_jar_path)',
+        '--jar-excluded-classes=<(jar_excluded_classes)',
+        '--stamp=<(compile_stamp)',
+        '>@(java_sources)',
+      ],
+    },
+    {
+      'action_name': 'instr_jar_<(_target_name)',
+      'message': 'Instrumenting <(_target_name) jar',
+      'variables': {
+        'input_path': '<(javac_jar_path)',
+        'output_path': '<(jar_path)',
+        'stamp_path': '<(instr_stamp)',
+        'instr_type': 'jar',
+      },
+      'outputs': [
+        '<(instr_stamp)',
+        '<(jar_path)',
+      ],
+      'inputs': [
+        '<(javac_jar_path)',
+      ],
+      'includes': [ 'android/instr_action.gypi' ],
+    },
+    {
+      'variables': {
+        'src_dirs': [
+          '<(java_in_dir)<(java_in_dir_suffix)',
+          '>@(additional_src_dirs)',
+        ],
+        'lint_jar_path': '<(jar_path)',
+        'stamp_path': '<(lint_stamp)',
+        'result_path': '<(lint_result)',
+        'config_path': '<(lint_config)',
+      },
+      'outputs': [
+        '<(lint_stamp)',
+      ],
+      'includes': [ 'android/lint_action.gypi' ],
+    },
+    {
+      'action_name': 'obfuscate_<(_target_name)',
+      'message': 'Obfuscating <(_target_name)',
+      'variables': {
+        'additional_obfuscate_options': [],
+        'additional_obfuscate_input_paths': [],
+        'proguard_out_dir': '<(intermediate_dir)/proguard',
+        'proguard_input_jar_paths': [
+          '>@(input_jars_paths)',
+          '<(jar_path)',
+        ],
+        'target_conditions': [
+          ['is_test_apk == 1', {
+            'additional_obfuscate_options': [
+              '--testapp',
+            ],
+          }],
+          ['is_test_apk == 1 and tested_apk_obfuscated_jar_path != "/"', {
+            'additional_obfuscate_options': [
+              '--tested-apk-obfuscated-jar-path', '>(tested_apk_obfuscated_jar_path)',
+            ],
+            'additional_obfuscate_input_paths': [
+              '>(tested_apk_obfuscated_jar_path).info',
+            ],
+          }],
+          ['proguard_enabled == "true"', {
+            'additional_obfuscate_options': [
+              '--proguard-enabled',
+            ],
+          }],
+        ],
+        'obfuscate_input_jars_paths': [
+          '>@(input_jars_paths)',
+          '<(jar_path)',
+        ],
+      },
+      'conditions': [
+        ['is_test_apk == 1', {
+          'outputs': [
+            '<(test_jar_path)',
+          ],
+        }],
+      ],
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/apk_obfuscate.py',
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '>@(proguard_flags_paths)',
+        '>@(obfuscate_input_jars_paths)',
+        '>@(additional_obfuscate_input_paths)',
+        '<(instr_stamp)',
+      ],
+      'outputs': [
+        '<(obfuscate_stamp)',
+
+        # In non-Release builds, these paths will all be empty files.
+        '<(obfuscated_jar_path)',
+        '<(obfuscated_jar_path).info',
+        '<(obfuscated_jar_path).dump',
+        '<(obfuscated_jar_path).seeds',
+        '<(obfuscated_jar_path).mapping',
+        '<(obfuscated_jar_path).usage',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/apk_obfuscate.py',
+
+        '--configuration-name', '<(CONFIGURATION_NAME)',
+
+        '--android-sdk', '<(android_sdk)',
+        '--android-sdk-tools', '<(android_sdk_tools)',
+        '--android-sdk-jar', '<(android_sdk_jar)',
+
+        '--input-jars-paths=>(proguard_input_jar_paths)',
+        '--proguard-configs=>(proguard_flags_paths)',
+
+
+        '--test-jar-path', '<(test_jar_path)',
+        '--obfuscated-jar-path', '<(obfuscated_jar_path)',
+
+        '--proguard-jar-path', '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+
+        '--stamp', '<(obfuscate_stamp)',
+
+        '>@(additional_obfuscate_options)',
+      ],
+    },
+    {
+      'action_name': 'dex_<(_target_name)',
+      'variables': {
+        'dex_input_paths': [
+          '>@(library_dexed_jars_paths)',
+          '<(jar_path)',
+        ],
+        'output_path': '<(dex_path)',
+        'proguard_enabled_input_path': '<(obfuscated_jar_path)',
+      },
+      'target_conditions': [
+        ['emma_instrument != 0', {
+          'variables': {
+            'dex_no_locals': 1,
+            'dex_input_paths': [
+              '<(emma_device_jar)'
+            ],
+          },
+        }],
+        ['is_test_apk == 1 and tested_apk_dex_path != "/"', {
+          'variables': {
+            'dex_additional_options': [
+              '--excluded-paths-file', '>(tested_apk_dex_path).inputs'
+            ],
+          },
+          'inputs': [
+            '>(tested_apk_dex_path).inputs',
+          ],
+        }],
+        ['proguard_enabled == "true"', {
+          'inputs': [ '<(obfuscate_stamp)' ]
+        }, {
+          'inputs': [ '<(instr_stamp)' ]
+        }],
+      ],
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+    {
+      'action_name': 'package_resources',
+      'message': 'packaging resources for <(_target_name)',
+      'variables': {
+        'package_resource_zip_input_paths': [
+          '<(resource_zip_path)',
+          '>@(dependencies_res_zip_paths)',
+        ],
+      },
+      'conditions': [
+        ['is_test_apk == 1', {
+          'variables': {
+            'dependencies_res_zip_paths=': [],
+            'additional_res_packages=': [],
+          }
+        }],
+      ],
+      'inputs': [
+        # TODO: This isn't always rerun correctly, http://crbug.com/351928
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/package_resources.py',
+        '<(android_manifest_path)',
+
+        '>@(package_resource_zip_input_paths)',
+
+        '<(codegen_stamp)',
+      ],
+      'outputs': [
+        '<(resource_packaged_apk_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/package_resources.py',
+        '--android-sdk', '<(android_sdk)',
+        '--android-sdk-tools', '<(android_sdk_tools)',
+
+        '--configuration-name', '<(CONFIGURATION_NAME)',
+
+        '--android-manifest', '<(android_manifest_path)',
+        '--version-code', '<(app_manifest_version_code)',
+        '--version-name', '<(app_manifest_version_name)',
+
+        '--asset-dir', '<(asset_location)',
+        '--resource-zips', '>(package_resource_zip_input_paths)',
+
+        '--no-compress', '<(extensions_to_not_compress)',
+
+        '--apk-path', '<(resource_packaged_apk_path)',
+      ],
+    },
+    {
+      'action_name': 'ant_package_<(_target_name)',
+      'message': 'Packaging <(_target_name)',
+      'variables': {
+        # Write the inputs list to a file, so that its mtime is updated when
+        # the list of inputs changes.
+        'inputs_list_file': '>|(apk_package.<(_target_name).gypcmd >@(package_input_paths))'
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/ant/apk-package.xml',
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/ant.py',
+        '<(dex_path)',
+        '<(codegen_stamp)',
+        '<(obfuscate_stamp)',
+        '<(resource_packaged_apk_path)',
+        '>@(package_input_paths)',
+        '>(inputs_list_file)',
+      ],
+      'outputs': [
+        '<(unsigned_apk_path)',
+      ],
+      'conditions': [
+        ['native_lib_target != ""', {
+          'inputs': ['<(native_lib_placeholder_stamp)'],
+        }],
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/ant.py',
+        '--',
+        '-quiet',
+        '-DDEX_FILE_PATH=<(intermediate_dir)/classes.dex',
+        '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+        '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+        '-DRESOURCE_PACKAGED_APK_NAME=<(resource_packaged_apk_name)',
+        '-DAPK_NAME=<(apk_name)',
+        '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+        '-DNATIVE_LIBS_DIR=<(apk_package_native_libs_dir)',
+        '-DOUT_DIR=<(intermediate_dir)',
+        '-DUNSIGNED_APK_PATH=<(unsigned_apk_path)',
+        '-DEMMA_INSTRUMENT=<(emma_instrument)',
+        '-DEMMA_DEVICE_JAR=<(emma_device_jar)',
+
+        '-Dbasedir=.',
+        '-buildfile',
+        '<(DEPTH)/build/android/ant/apk-package.xml',
+      ]
+    },
+  ],
+}
diff --git a/build/java_prebuilt.gypi b/build/java_prebuilt.gypi
new file mode 100644
index 0000000..a3a8cc0
--- /dev/null
+++ b/build/java_prebuilt.gypi
@@ -0,0 +1,93 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to package prebuilt Java JARs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'jar_path': 'path/to/your.jar',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  jar_path - The path to the prebuilt Java JAR file.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+  ],
+  'variables': {
+    'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'android_jar': '<(android_sdk)/android.jar',
+    'input_jars_paths': [ '<(android_jar)' ],
+    'proguard_config%': '',
+    'proguard_preprocess%': '0',
+    'variables': {
+      'variables': {
+        'proguard_preprocess%': 0,
+      },
+      'conditions': [
+        ['proguard_preprocess == 1', {
+          'dex_input_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+        }, {
+          'dex_input_jar_path': '<(jar_path)'
+        }],
+      ],
+    },
+    'dex_input_jar_path': '<(dex_input_jar_path)',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(dex_input_jar_path)'],
+      'library_dexed_jars_paths': ['<(dex_path)'],
+    },
+  },
+  'conditions' : [
+    ['proguard_preprocess == 1', {
+      'actions': [
+        {
+          'action_name': 'proguard_<(_target_name)',
+          'message': 'Proguard preprocessing <(_target_name) jar',
+          'inputs': [
+            '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/proguard.py',
+            '<(jar_path)',
+            '<(proguard_config)',
+          ],
+          'outputs': [
+            '<(dex_input_jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/proguard.py',
+            '--proguard-path=<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '--input-path=<(jar_path)',
+            '--output-path=<(dex_input_jar_path)',
+            '--proguard-config=<(proguard_config)',
+            '--classpath=>(input_jars_paths)',
+          ]
+        },
+      ],
+    }],
+  ],
+  'actions': [
+    {
+      'action_name': 'dex_<(_target_name)',
+      'message': 'Dexing <(_target_name) jar',
+      'variables': {
+        'dex_input_paths': [
+          '<(dex_input_jar_path)',
+        ],
+        'output_path': '<(dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/java_strings_grd.gypi b/build/java_strings_grd.gypi
new file mode 100644
index 0000000..7534be5
--- /dev/null
+++ b/build/java_strings_grd.gypi
@@ -0,0 +1,62 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate localized strings.xml from a grd file.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_strings_grd',
+#   'type': 'none',
+#   'variables': {
+#     'grd_file': 'path/to/grd/file',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  grd_file - The path to the grd file to use.
+{
+  'variables': {
+    'res_grit_dir': '<(INTERMEDIATE_DIR)/<(_target_name)/res_grit',
+    'grit_grd_file': '<(grd_file)',
+    'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+    'grit_additional_defines': ['-E', 'ANDROID_JAVA_TAGGED_ONLY=false'],
+    'grit_out_dir': '<(res_grit_dir)',
+    # resource_ids is unneeded since we don't generate .h headers.
+    'grit_resource_ids': '',
+    'grit_outputs': [
+      '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+          '--outputs \'<(grit_out_dir)\' '
+          '<(grit_grd_file) -f "<(grit_resource_ids)")',
+          ]
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'additional_input_paths': ['<(resource_zip_path)'],
+      'dependencies_res_zip_paths': ['<(resource_zip_path)'],
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'generate_localized_strings_xml',
+      'includes': ['../build/grit_action.gypi'],
+    },
+    {
+      'action_name': 'create_resources_zip',
+      'inputs': [
+          '<(DEPTH)/build/android/gyp/zip.py',
+          '<@(grit_outputs)',
+      ],
+      'outputs': [
+          '<(resource_zip_path)',
+      ],
+      'action': [
+          'python', '<(DEPTH)/build/android/gyp/zip.py',
+          '--input-dir', '<(res_grit_dir)',
+          '--output', '<(resource_zip_path)',
+      ],
+    }
+  ],
+}
diff --git a/build/jni_generator.gypi b/build/jni_generator.gypi
new file mode 100644
index 0000000..6edc512
--- /dev/null
+++ b/build/jni_generator.gypi
@@ -0,0 +1,97 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'base_jni_headers',
+#    'type': 'none',
+#    'sources': [
+#      'android/java/src/org/chromium/base/BuildInfo.java',
+#      ...
+#      ...
+#      'android/java/src/org/chromium/base/SystemMessageHandler.java',
+#    ],
+#    'variables': {
+#      'jni_gen_package': 'base',
+#    },
+#    'includes': [ '../build/jni_generator.gypi' ],
+#  },
+#
+# The generated file name pattern can be seen on the "outputs" section below.
+# (note that RULE_INPUT_ROOT is the basename for the java file).
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+    'jni_generator_jarjar_file%': '',
+    'jni_generator_ptr_type%': 'long',
+    # A comma separated string of include files.
+    'jni_generator_includes%': (
+        'base/android/jni_generator/jni_generator_helper.h'
+    ),
+    'native_exports%': '',
+  },
+  'rules': [
+    {
+      'rule_name': 'generate_jni_headers',
+      'extension': 'java',
+      'inputs': [
+        '<(jni_generator)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni/<(RULE_INPUT_ROOT)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '--input_file',
+        '<(RULE_INPUT_PATH)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni',
+        '--includes',
+        '<(jni_generator_includes)',
+        '--optimize_generation',
+        '<(optimize_jni_generation)',
+        '--jarjar',
+        '<(jni_generator_jarjar_file)',
+        '--ptr_type',
+        '<(jni_generator_ptr_type)',
+        '<(native_exports)',
+      ],
+      'message': 'Generating JNI bindings from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+      'conditions': [
+        ['jni_generator_jarjar_file != ""', {
+          'inputs': [
+            '<(jni_generator_jarjar_file)',
+          ],
+        }]
+      ],
+    },
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)',
+    ],
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+  'conditions': [
+    ['android_webview_build==1', {
+      'variables': {
+        'native_exports%': '--native_exports',
+      },
+      'dependencies': [
+        '<(DEPTH)/build/android/android_exports.gyp:android_exports',
+      ],
+    }],
+  ],
+}
+
diff --git a/build/json_schema_api.gni b/build/json_schema_api.gni
new file mode 100644
index 0000000..5857739
--- /dev/null
+++ b/build/json_schema_api.gni
@@ -0,0 +1,209 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines a static library corresponding to the output of schema compiler tools
+# over a set of extensions API schemas (IDL or JSON format.) The library target
+# has implicit hard dependencies on all schema files listed by the invoker and
+# is itself a hard dependency.
+#
+# Invocations of this template may use the following variables:
+#
+# sources [required] A list of schema files to be compiled.
+#
+# root_namespace [required]
+#     A Python string substituion pattern used to generate the C++
+#     namespace for each API. Use %(namespace)s to replace with the API
+#     namespace, like "toplevel::%(namespace)s_api".
+#
+# schema_include_rules [optional]
+#     A list of paths to include when searching for referenced objects,
+#     with the namespace separated by a :.
+#     Example:
+#       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+#
+# schemas [optional, default = false]
+#   Boolean indicating if the schema files should be generated.
+#
+# bundle [optional, default = false]
+#   Boolean indicating if the schema bundle files should be generated.
+#
+# bundle_registration [optional, default = false]
+#   Boolean indicating if the API registration bundle files should be generated.
+#
+# impl_dir [required if bundle_registration = true, otherwise unused]
+#   The path containing C++ implementations of API functions. This path is
+#   used as the root path when looking for {schema}/{schema}_api.h headers
+#   when generating API registration bundles. Such headers, if found, are
+#   automatically included by the generated code.
+#
+# uncompiled_sources [optional, only used when bundle = true or
+#     bundle_registration = true]
+#   A list of schema files which should not be compiled, but which should still
+#   be processed for API bundle generation.
+#
+# deps [optional]
+#   If any deps are specified they will be inherited by the static library
+#   target.
+#
+# The static library target also inherits the visibility and output_name
+# of its invoker.
+
+template("json_schema_api") {
+  assert(defined(invoker.sources),
+         "\"sources\" must be defined for the $target_name template.")
+  assert(defined(invoker.root_namespace),
+         "\"root_namespace\" must be defined for the $target_name template.")
+
+  schemas = defined(invoker.schemas) && invoker.schemas
+  bundle = defined(invoker.bundle) && invoker.bundle
+  bundle_registration = defined(invoker.bundle_registration) &&
+      invoker.bundle_registration
+
+  schema_include_rules = ""
+  if (defined(invoker.schema_include_rules)) {
+    schema_include_rules = invoker.schema_include_rules
+  }
+
+  # Keep a copy of the target_name here since it will be trampled
+  # in nested targets.
+  target_visibility = [ ":$target_name" ]
+
+  generated_config_name = target_name + "_generated_config"
+  config(generated_config_name) {
+    include_dirs = [ target_gen_dir ]
+    visibility = target_visibility
+  }
+
+  sources = invoker.sources
+  root_namespace = invoker.root_namespace
+
+  compiler_root = "//tools/json_schema_compiler"
+  compiler_script = "$compiler_root/compiler.py"
+  compiler_sources = [
+    "$compiler_root/cc_generator.py",
+    "$compiler_root/code.py",
+    "$compiler_root/compiler.py",
+    "$compiler_root/cpp_generator.py",
+    "$compiler_root/cpp_type_generator.py",
+    "$compiler_root/cpp_util.py",
+    "$compiler_root/h_generator.py",
+    "$compiler_root/idl_schema.py",
+    "$compiler_root/model.py",
+    "$compiler_root/util_cc_helper.py",
+  ]
+
+  if (schemas) {
+    schema_generator_name = target_name + "_schema_generator"
+    action_foreach(schema_generator_name) {
+      script = compiler_script
+      inputs = compiler_sources
+      outputs = [
+        "$target_gen_dir/{{source_name_part}}.cc",
+        "$target_gen_dir/{{source_name_part}}.h",
+      ]
+      args = [
+        "{{source}}",
+        "--root=" + rebase_path("//", root_build_dir),
+        "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+        "--namespace=$root_namespace",
+        "--generator=cpp",
+        "--include-rules=$schema_include_rules" ]
+
+      if (defined(invoker.visibility)) {
+        # If visibility is restricted, add our own target to it.
+        visibility = [ invoker.visibility, target_visibility ]
+      }
+    }
+  }
+
+  if (bundle) {
+    uncompiled_sources = []
+    if (defined(invoker.uncompiled_sources)) {
+     uncompiled_sources = invoker.uncompiled_sources
+    }
+
+    bundle_generator_schema_name = target_name + "_bundle_generator_schema"
+    action(bundle_generator_schema_name) {
+      script = compiler_script
+      inputs = compiler_sources + sources + uncompiled_sources
+      outputs = [
+        "$target_gen_dir/generated_schemas.cc",
+        "$target_gen_dir/generated_schemas.h",
+      ]
+      args = [
+        "--root=" + rebase_path("//", root_build_dir),
+        "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+        "--namespace=$root_namespace",
+        "--generator=cpp-bundle-schema",
+        "--include-rules=$schema_include_rules" ]
+        + rebase_path(sources, root_build_dir)
+        + rebase_path(uncompiled_sources, root_build_dir)
+    }
+  }
+
+  if (bundle_registration) {
+    uncompiled_sources = []
+    if (defined(invoker.uncompiled_sources)) {
+     uncompiled_sources = invoker.uncompiled_sources
+    }
+
+    assert(defined(invoker.impl_dir),
+           "\"impl_dir\" must be defined for the $target_name template.")
+    impl_dir = invoker.impl_dir
+
+    bundle_generator_registration_name = target_name +
+        "_bundle_generator_registration"
+    action(bundle_generator_registration_name) {
+      script = compiler_script
+      inputs = compiler_sources + sources + uncompiled_sources
+      outputs = [
+        "$root_gen_dir/$impl_dir/generated_api_registration.cc",
+        "$root_gen_dir/$impl_dir/generated_api_registration.h",
+      ]
+      args = [
+        "--root=" + rebase_path("//", root_build_dir),
+        "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+        "--namespace=$root_namespace",
+        "--generator=cpp-bundle-registration",
+        "--impl-dir=" + rebase_path(impl_dir, "//"),
+        "--include-rules=$schema_include_rules" ]
+        + rebase_path(sources, root_build_dir)
+        + rebase_path(uncompiled_sources, root_build_dir)
+    }
+  }
+
+  source_set(target_name) {
+    sources = []
+    deps = []
+    public_deps = []
+
+    if (schemas) {
+      sources += get_target_outputs(":$schema_generator_name")
+      public_deps += [ ":$schema_generator_name" ]
+      deps += [ "//tools/json_schema_compiler:generated_api_util" ]
+    }
+
+    if (bundle) {
+      sources += get_target_outputs(":$bundle_generator_schema_name")
+      deps += [ ":$bundle_generator_schema_name" ]
+    }
+
+    if (bundle_registration) {
+      sources += get_target_outputs(":$bundle_generator_registration_name")
+      deps += [ ":$bundle_generator_registration_name" ]
+    }
+
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    public_configs = [ ":$generated_config_name" ]
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    if (defined(invoker.output_name)) {
+      output_name = invoker.output_name
+    }
+  }
+}
diff --git a/build/json_schema_bundle_compile.gypi b/build/json_schema_bundle_compile.gypi
new file mode 100644
index 0000000..a302013
--- /dev/null
+++ b/build/json_schema_bundle_compile.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   schema_include_rules (optional):
+    #     An array of paths to include when searching for referenced objects,
+    #     with the namespace separated by a :.
+    #     Example:
+    #       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+    #   cc_dir:
+    #     The directory to put the generated code in.
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    # The default root path of API implementation sources is
+    # chrome/browser/extensions/api and can be overridden by setting "impl_dir".
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'generator_files': [
+      '<(api_gen_dir)/cc_generator.py',
+      '<(api_gen_dir)/code.py',
+      '<(api_gen_dir)/compiler.py',
+      '<(api_gen_dir)/cpp_bundle_generator.py',
+      '<(api_gen_dir)/cpp_type_generator.py',
+      '<(api_gen_dir)/cpp_util.py',
+      '<(api_gen_dir)/h_generator.py',
+      '<(api_gen_dir)/idl_schema.py',
+      '<(api_gen_dir)/json_schema.py',
+      '<(api_gen_dir)/model.py',
+      '<(api_gen_dir)/util_cc_helper.py',
+    ],
+    'schema_include_rules': [],
+  },
+  'actions': [
+    {
+      'action_name': 'genapi_bundle_schema',
+      'inputs': [
+        '<@(generator_files)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp-bundle-schema',
+        '--include-rules=<(schema_include_rules)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code for schemas',
+      'process_outputs_as_sources': 1,
+      # Avoid running MIDL compiler on IDL input files.
+      'explicit_idl_action': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_schema_bundle_registration_compile.gypi b/build/json_schema_bundle_registration_compile.gypi
new file mode 100644
index 0000000..8c5af4e
--- /dev/null
+++ b/build/json_schema_bundle_registration_compile.gypi
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   impl_dir_:
+    #     The root path of API implementations; also used for the
+    #     output location. (N.B. Named as such to prevent gyp from
+    #     expanding it as a relative path.)
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'generator_files': [
+      '<(api_gen_dir)/cc_generator.py',
+      '<(api_gen_dir)/code.py',
+      '<(api_gen_dir)/compiler.py',
+      '<(api_gen_dir)/cpp_bundle_generator.py',
+      '<(api_gen_dir)/cpp_type_generator.py',
+      '<(api_gen_dir)/cpp_util.py',
+      '<(api_gen_dir)/h_generator.py',
+      '<(api_gen_dir)/idl_schema.py',
+      '<(api_gen_dir)/json_schema.py',
+      '<(api_gen_dir)/model.py',
+      '<(api_gen_dir)/util_cc_helper.py',
+    ],
+  },
+  'actions': [
+    {
+      # GN version: json_schema_api.gni
+      'action_name': 'genapi_bundle_registration',
+      'inputs': [
+        '<@(generator_files)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp-bundle-registration',
+        '--impl-dir=<(impl_dir_)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code for function registration',
+      'process_outputs_as_sources': 1,
+      # Avoid running MIDL compiler on IDL input files.
+      'explicit_idl_action': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_schema_compile.gypi b/build/json_schema_compile.gypi
new file mode 100644
index 0000000..6e5727a
--- /dev/null
+++ b/build/json_schema_compile.gypi
@@ -0,0 +1,123 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   schema_include_rules (optional):
+    #     An array of paths to include when searching for referenced objects,
+    #     with the namespace separated by a :.
+    #     Example:
+    #       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+    #   cc_dir:
+    #     The directory to put the generated code in.
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'schema_include_rules': [],
+  },
+  'rules': [
+    {
+      # GN version: json_schema_api.gni
+      'rule_name': 'genapi',
+      'msvs_external_rule': 1,
+      'extension': 'json',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_generator.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/json_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp',
+        '--include-rules=<(schema_include_rules)'
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) json files',
+      'process_outputs_as_sources': 1,
+    },
+    {
+      'rule_name': 'genapi_idl',
+      'msvs_external_rule': 1,
+      'extension': 'idl',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_generator.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/idl_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp',
+        '--include-rules=<(schema_include_rules)'
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) IDL files',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'dependencies':[
+    '<(DEPTH)/tools/json_schema_compiler/api_gen_util.gyp:api_gen_util',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_to_struct.gypi b/build/json_to_struct.gypi
new file mode 100644
index 0000000..57271c8
--- /dev/null
+++ b/build/json_to_struct.gypi
@@ -0,0 +1,50 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_file: a json file that comprise the structure model.
+    #   namespace: the C++ namespace that all generated files go under
+    #   cc_dir: path to generated files
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'struct_gen_dir': '<(DEPTH)/tools/json_to_struct',
+    'struct_gen': '<(struct_gen_dir)/json_to_struct.py',
+  },
+  'rules': [
+    {
+      # GN version: //tools/json_to_struct/json_to_struct.gni
+      'rule_name': 'genstaticinit',
+      'extension': 'json',
+      'inputs': [
+        '<(struct_gen_dir)/element_generator.py',
+        '<(struct_gen_dir)/json_to_struct.py',
+        '<(struct_gen_dir)/struct_generator.py',
+        '<(schema_file)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(struct_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--destbase=<(SHARED_INTERMEDIATE_DIR)',
+        '--destdir=<(cc_dir)',
+        '--namespace=<(namespace)',
+        '--schema=<(schema_file)',
+      ],
+      'message': 'Generating C++ static initializers from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/landmine_utils.py b/build/landmine_utils.py
new file mode 100644
index 0000000..7737832
--- /dev/null
+++ b/build/landmine_utils.py
@@ -0,0 +1,114 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import functools
+import logging
+import os
+import shlex
+import sys
+
+
+def memoize(default=None):
+  """This decorator caches the return value of a parameterless pure function"""
+  def memoizer(func):
+    val = []
+    @functools.wraps(func)
+    def inner():
+      if not val:
+        ret = func()
+        val.append(ret if ret is not None else default)
+        if logging.getLogger().isEnabledFor(logging.INFO):
+          print '%s -> %r' % (func.__name__, val[0])
+      return val[0]
+    return inner
+  return memoizer
+
+
+@memoize()
+def IsWindows():
+  return sys.platform in ['win32', 'cygwin']
+
+
+@memoize()
+def IsLinux():
+  return sys.platform.startswith(('linux', 'freebsd'))
+
+
+@memoize()
+def IsMac():
+  return sys.platform == 'darwin'
+
+
+@memoize()
+def gyp_defines():
+  """Parses and returns GYP_DEFINES env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+@memoize()
+def gyp_msvs_version():
+  return os.environ.get('GYP_MSVS_VERSION', '')
+
+@memoize()
+def distributor():
+  """
+  Returns a string which is the distributed build engine in use (if any).
+  Possible values: 'goma', 'ib', ''
+  """
+  if 'goma' in gyp_defines():
+    return 'goma'
+  elif IsWindows():
+    if 'CHROME_HEADLESS' in os.environ:
+      return 'ib' # use (win and !goma and headless) as approximation of ib
+
+
+@memoize()
+def platform():
+  """
+  Returns a string representing the platform this build is targetted for.
+  Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+  """
+  if 'OS' in gyp_defines():
+    if 'android' in gyp_defines()['OS']:
+      return 'android'
+    else:
+      return gyp_defines()['OS']
+  elif IsWindows():
+    return 'win'
+  elif IsLinux():
+    return 'linux'
+  else:
+    return 'mac'
+
+
+@memoize()
+def builder():
+  """
+  Returns a string representing the build engine (not compiler) to use.
+  Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
+  """
+  if 'GYP_GENERATORS' in os.environ:
+    # for simplicity, only support the first explicit generator
+    generator = os.environ['GYP_GENERATORS'].split(',')[0]
+    if generator.endswith('-android'):
+      return generator.split('-')[0]
+    elif generator.endswith('-ninja'):
+      return 'ninja'
+    else:
+      return generator
+  else:
+    if platform() == 'android':
+      # Good enough for now? Do any android bots use make?
+      return 'ninja'
+    elif platform() == 'ios':
+      return 'xcode'
+    elif IsWindows():
+      return 'ninja'
+    elif IsLinux():
+      return 'ninja'
+    elif IsMac():
+      return 'ninja'
+    else:
+      assert False, 'Don\'t know what builder we\'re using!'
diff --git a/build/landmines.py b/build/landmines.py
new file mode 100755
index 0000000..a034864
--- /dev/null
+++ b/build/landmines.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every build as the first hook (See DEPS). If it detects that
+the build should be clobbered, it will delete the contents of the build
+directory.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+"""
+
+import difflib
+import errno
+import gyp_environment
+import logging
+import optparse
+import os
+import shutil
+import sys
+import subprocess
+import time
+
+import landmine_utils
+
+
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+
+def get_build_dir(build_tool, is_iphone=False):
+  """
+  Returns output directory absolute path dependent on build and targets.
+  Examples:
+    r'c:\b\build\slave\win\build\src\out'
+    '/mnt/data/b/build/slave/linux/build/src/out'
+    '/b/build/slave/ios_rel_device/build/src/xcodebuild'
+
+  Keep this function in sync with tools/build/scripts/slave/compile.py
+  """
+  ret = None
+  if build_tool == 'xcode':
+    ret = os.path.join(SRC_DIR, 'xcodebuild')
+  elif build_tool in ['make', 'ninja', 'ninja-ios']:  # TODO: Remove ninja-ios.
+    ret = os.path.join(SRC_DIR, os.environ.get('CHROMIUM_OUT_DIR', 'out'))
+  else:
+    raise NotImplementedError('Unexpected GYP_GENERATORS (%s)' % build_tool)
+  return os.path.abspath(ret)
+
+
+def clobber_if_necessary(new_landmines):
+  """Does the work of setting, planting, and triggering landmines."""
+  out_dir = get_build_dir(landmine_utils.builder())
+  landmines_path = os.path.normpath(os.path.join(out_dir, '..', '.landmines'))
+  try:
+    os.makedirs(out_dir)
+  except OSError as e:
+    if e.errno == errno.EEXIST:
+      pass
+
+  if os.path.exists(landmines_path):
+    with open(landmines_path, 'r') as f:
+      old_landmines = f.readlines()
+    if old_landmines != new_landmines:
+      old_date = time.ctime(os.stat(landmines_path).st_ctime)
+      diff = difflib.unified_diff(old_landmines, new_landmines,
+          fromfile='old_landmines', tofile='new_landmines',
+          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+      sys.stdout.write('Clobbering due to:\n')
+      sys.stdout.writelines(diff)
+
+      # Clobber contents of build directory but not directory itself: some
+      # checkouts have the build directory mounted.
+      for f in os.listdir(out_dir):
+        path = os.path.join(out_dir, f)
+        if os.path.isfile(path):
+          os.unlink(path)
+        elif os.path.isdir(path):
+          shutil.rmtree(path)
+
+  # Save current set of landmines for next time.
+  with open(landmines_path, 'w') as f:
+    f.writelines(new_landmines)
+
+
+def process_options():
+  """Returns a list of landmine emitting scripts."""
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '-s', '--landmine-scripts', action='append',
+      default=[os.path.join(SRC_DIR, 'build', 'get_landmines.py')],
+      help='Path to the script which emits landmines to stdout. The target '
+           'is passed to this script via option -t. Note that an extra '
+           'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+  parser.add_option('-v', '--verbose', action='store_true',
+      default=('LANDMINES_VERBOSE' in os.environ),
+      help=('Emit some extra debugging information (default off). This option '
+          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+          'variable.'))
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('Unknown arguments %s' % args)
+
+  logging.basicConfig(
+      level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+  if extra_script:
+    return options.landmine_scripts + [extra_script]
+  else:
+    return options.landmine_scripts
+
+
+def main():
+  landmine_scripts = process_options()
+
+  if landmine_utils.builder() in ('dump_dependency_json', 'eclipse'):
+    return 0
+
+  gyp_environment.SetEnvironment()
+
+  landmines = []
+  for s in landmine_scripts:
+    proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
+    output, _ = proc.communicate()
+    landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+  clobber_if_necessary(landmines)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/linux/bin/eu-strip.sha1 b/build/linux/bin/eu-strip.sha1
new file mode 100644
index 0000000..43f290a
--- /dev/null
+++ b/build/linux/bin/eu-strip.sha1
@@ -0,0 +1 @@
+0a9b8f68615ce388b65201e6d22da7a9cf2e729c
\ No newline at end of file
diff --git a/build/linux/chrome_linux.croc b/build/linux/chrome_linux.croc
new file mode 100644
index 0000000..f400306
--- /dev/null
+++ b/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include non-Linux platform dirs
+    {
+      'regexp' : '.*/(chromeos|views)/',
+      'include' : 0,
+    },
+    # Don't include chromeos, windows, or mac specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_linux\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/linux/dump_app_syms b/build/linux/dump_app_syms
new file mode 100755
index 0000000..cbeb676
--- /dev/null
+++ b/build/linux/dump_app_syms
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+set -e
+
+usage() {
+  echo -n "$0 <dump_syms_exe> <strip_binary> " >&2
+  echo "<binary_with_symbols> <symbols_output>" >&2
+}
+
+
+if [ $# -ne 4 ]; then
+  usage
+  exit 1
+fi
+
+SCRIPTDIR="$(readlink -f "$(dirname "$0")")"
+DUMPSYMS="$1"
+STRIP_BINARY="$2"
+INFILE="$3"
+OUTFILE="$4"
+
+# Dump the symbols from the given binary.
+if [ ! -e "$OUTFILE" -o "$INFILE" -nt "$OUTFILE" ]; then
+  "$DUMPSYMS" -r "$INFILE" > "$OUTFILE"
+fi
+
+if [ "$STRIP_BINARY" != "0" ]; then
+  strip "$INFILE"
+fi
diff --git a/build/linux/install-arm-sysroot.py b/build/linux/install-arm-sysroot.py
new file mode 100755
index 0000000..4d593cc
--- /dev/null
+++ b/build/linux/install-arm-sysroot.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to install ARM root image for cross building of ARM chrome on linux.
+This script can be run manually but is more often run as part of gclient
+hooks. When run from hooks this script should be a no-op on non-linux
+platforms.
+
+The sysroot image could be constructed from scratch based on the current
+state or precise/arm but for consistency we currently use a pre-built root
+image which was originally designed for building trusted NaCl code. The image
+will normally need to be rebuilt every time chrome's build dependancies are
+changed.
+
+Steps to rebuild the arm sysroot image:
+
+- cd $SRC/native_client
+- ./tools/trusted_cross_toolchains/trusted-toolchain-creator.armel.precise.sh \
+    UpdatePackageLists
+- ./tools/trusted_cross_toolchains/trusted-toolchain-creator.armel.precise.sh \
+    BuildJail $SRC/out/arm-sysroot.tar.gz
+- gsutil cp -a public-read $SRC/out/arm-sysroot.tar.gz \
+    nativeclient-archive2/toolchain/$NACL_REV/sysroot-arm-trusted.tgz
+"""
+
+# TODO(sbc): merge this script into:
+#  chrome/installer/linux/sysroot_scripts/install-debian.wheezy.sysroot.py
+
+import hashlib
+import os
+import shutil
+import subprocess
+import sys
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+URL_PREFIX = 'https://storage.googleapis.com'
+URL_PATH = 'chrome-linux-sysroot/toolchain'
+REVISION = 285950
+TARBALL = 'debian_wheezy_arm_sysroot.tgz'
+TARBALL_SHA1SUM = 'fc2f54db168887c5190c4c6686c869bedf668b4e'
+
+
+def get_sha1(filename):
+  sha1 = hashlib.sha1()
+  with open(filename, 'rb') as f:
+    while True:
+      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      sha1.update(chunk)
+  return sha1.hexdigest()
+
+
+def main(args):
+  if '--linux-only' in args:
+    # This argument is passed when run from the gclient hooks.
+    # In this case we return early on non-linux platforms
+    # or if GYP_DEFINES doesn't include target_arch=arm
+    if not sys.platform.startswith('linux'):
+      return 0
+
+    if "target_arch=arm" not in os.environ.get('GYP_DEFINES', ''):
+      return 0
+
+  src_root = os.path.dirname(os.path.dirname(SCRIPT_DIR))
+  sysroot = os.path.join(src_root, 'arm-sysroot')
+  url = "%s/%s/%s/%s" % (URL_PREFIX, URL_PATH, REVISION, TARBALL)
+
+  stamp = os.path.join(sysroot, ".stamp")
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == url:
+        print "ARM root image already up-to-date: %s" % sysroot
+        return 0
+
+  print "Installing ARM root image: %s" % sysroot
+  if os.path.isdir(sysroot):
+    shutil.rmtree(sysroot)
+  os.mkdir(sysroot)
+  tarball = os.path.join(sysroot, TARBALL)
+  curl = ['curl', '--fail', '-L', url, '-o', tarball]
+  if os.isatty(sys.stdout.fileno()):
+    curl.append('--progress')
+  else:
+    curl.append('--silent')
+  subprocess.check_call(curl)
+  sha1sum = get_sha1(tarball)
+  if sha1sum != TARBALL_SHA1SUM:
+    print 'Tarball sha1sum is wrong.'
+    print 'Expected %s, actual: %s' % (TARBALL_SHA1SUM, sha1sum)
+    return 1
+  subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
+  os.remove(tarball)
+
+  with open(stamp, 'w') as s:
+    s.write(url)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/linux/install-chromeos-fonts.py b/build/linux/install-chromeos-fonts.py
new file mode 100755
index 0000000..98c3a57
--- /dev/null
+++ b/build/linux/install-chromeos-fonts.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install the Chrome OS fonts on Linux.
+# This script can be run manually (as root), but is also run as part
+# install-build-deps.sh.
+
+import os
+import shutil
+import subprocess
+import sys
+
+URL_PREFIX = 'https://commondatastorage.googleapis.com'
+URL_DIR = 'chromeos-localmirror/distfiles'
+URL_FILE = 'notofonts-20121206.tar.gz'
+FONTS_DIR = '/usr/local/share/fonts'
+
+# The URL matches the URL in the ebuild script in chromiumos. See:
+#  /path/to/chromiumos/src/
+#  third_party/chromiumos-overlay/media-fonts/notofonts/
+#  notofonts-20121206.ebuild
+
+def main(args):
+  if not sys.platform.startswith('linux'):
+    print "Error: %s must be run on Linux." % __file__
+    return 1
+
+  if os.getuid() != 0:
+    print "Error: %s must be run as root." % __file__
+    return 1
+
+  if not os.path.isdir(FONTS_DIR):
+    print "Error: Destination directory does not exist: %s" % FONTS_DIR
+    return 1
+
+  dest_dir = os.path.join(FONTS_DIR, 'chromeos')
+
+  url = "%s/%s/%s" % (URL_PREFIX, URL_DIR, URL_FILE)
+
+  stamp = os.path.join(dest_dir, ".stamp02")
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == url:
+        print "Chrome OS fonts already up-to-date in %s." % dest_dir
+        return 0
+
+  if os.path.isdir(dest_dir):
+    shutil.rmtree(dest_dir)
+  os.mkdir(dest_dir)
+  os.chmod(dest_dir, 0755)
+
+  print "Installing Chrome OS fonts to %s." % dest_dir
+  tarball = os.path.join(dest_dir, URL_FILE)
+  subprocess.check_call(['curl', '-L', url, '-o', tarball])
+  subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
+                         '-xf', tarball, '-C', dest_dir])
+  os.remove(tarball)
+
+  readme = os.path.join(dest_dir, "README")
+  with open(readme, 'w') as s:
+    s.write("This directory and its contents are auto-generated.\n")
+    s.write("It may be deleted and recreated. Do not modify.\n")
+    s.write("Script: %s\n" % __file__)
+
+  with open(stamp, 'w') as s:
+    s.write(url)
+
+  for base, dirs, files in os.walk(dest_dir):
+    for dir in dirs:
+      os.chmod(os.path.join(base, dir), 0755)
+    for file in files:
+      os.chmod(os.path.join(base, file), 0644)
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/linux/pkg-config-wrapper b/build/linux/pkg-config-wrapper
new file mode 100755
index 0000000..b759564
--- /dev/null
+++ b/build/linux/pkg-config-wrapper
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+# Additional directories containing .pc files may be specified by setting
+# the PKG_CONFIG_PATH environment variable- these will be prepended to the
+# generated paths.
+
+root="$1"
+shift
+target_arch="$1"
+shift
+libpath="$1"
+shift
+
+if [ -z "$root" -o -z "$target_arch" ]
+then
+  echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2
+  exit 1
+fi
+
+if [ "$target_arch" = "x64" ]
+then
+  : ${libpath:="lib64"}
+else
+  : ${libpath:="lib"}
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+config_path=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
+
+# prepend any paths specified by the environment
+if [ -n "$PKG_CONFIG_PATH" ]
+then
+  config_path="$PKG_CONFIG_PATH:$config_path"
+fi
+
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000..30f22f0
--- /dev/null
+++ b/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+                  '-idirafter',
+                  '-imacros',
+                  '-imultilib',
+                  '-include',
+                  '-iprefix',
+                  '-iquote',
+                  '-isystem',
+                  '-L']
+
+def RewritePath(path, opts):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  sysroot = opts.sysroot
+  prefix = opts.strip_prefix
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(prefix):
+      path = path[len(prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def RewriteLine(line, opts):
+  """Rewrites all the paths in recognized options."""
+  args = line.split()
+  count = len(args)
+  i = 0
+  while i < count:
+    for prefix in REWRITE_PREFIX:
+      # The option can be either in the form "-I /path/to/dir" or
+      # "-I/path/to/dir" so handle both.
+      if args[i] == prefix:
+        i += 1
+        try:
+          args[i] = RewritePath(args[i], opts)
+        except IndexError:
+          sys.stderr.write('Missing argument following %s\n' % prefix)
+          break
+      elif args[i].startswith(prefix):
+        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+    i += 1
+
+  return ' '.join(args)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+  opts, args = parser.parse_args(argv[1:])
+
+  for line in sys.stdin.readlines():
+    line = RewriteLine(line.strip(), opts)
+    print line
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/linux/sysroot_ld_path.sh b/build/linux/sysroot_ld_path.sh
new file mode 100755
index 0000000..4b8bf73
--- /dev/null
+++ b/build/linux/sysroot_ld_path.sh
@@ -0,0 +1,100 @@
+#!/bin/sh
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
+# appropriate linker flags.
+#
+#  sysroot_ld_path.sh /abspath/to/sysroot
+#
+
+log_error_and_exit() {
+  echo $0: $@
+  exit 1
+}
+
+process_entry() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_entry()"
+  fi
+  local root="$1"
+  local localpath="$2"
+
+  echo $localpath | grep -qs '^/'
+  if [ $? -ne 0 ]; then
+    log_error_and_exit $localpath does not start with /
+  fi
+  local entry="$root$localpath"
+  echo -L$entry
+  echo -Wl,-rpath-link=$entry
+}
+
+process_ld_so_conf() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_ld_so_conf()"
+  fi
+  local root="$1"
+  local ld_so_conf="$2"
+
+  # ld.so.conf may include relative include paths. pushd is a bashism.
+  local saved_pwd=$(pwd)
+  cd $(dirname "$ld_so_conf")
+
+  cat "$ld_so_conf" | \
+    while read ENTRY; do
+      echo "$ENTRY" | grep -qs ^include
+      if [ $? -eq 0 ]; then
+        local included_files=$(echo "$ENTRY" | sed 's/^include //')
+        echo "$included_files" | grep -qs ^/
+        if [ $? -eq 0 ]; then
+          if ls $root$included_files >/dev/null 2>&1 ; then
+            for inc_file in $root$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        else
+          if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
+            for inc_file in $(pwd)/$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        fi
+        continue
+      fi
+
+      echo "$ENTRY" | grep -qs ^/
+      if [ $? -eq 0 ]; then
+        process_entry "$root" "$ENTRY"
+      fi
+    done
+
+  # popd is a bashism
+  cd "$saved_pwd"
+}
+
+# Main
+
+if [ $# -ne 1 ]; then
+  echo Usage $0 /abspath/to/sysroot
+  exit 1
+fi
+
+echo $1 | grep -qs ' '
+if [ $? -eq 0 ]; then
+  log_error_and_exit $1 contains whitespace.
+fi
+
+LD_SO_CONF="$1/etc/ld.so.conf"
+LD_SO_CONF_D="$1/etc/ld.so.conf.d"
+
+if [ -e "$LD_SO_CONF" ]; then
+  process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
+elif [ -e "$LD_SO_CONF_D" ]; then
+  find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
+  if [ $? -eq 0 ]; then
+    for entry in $LD_SO_CONF_D/*.conf; do
+      process_ld_so_conf "$1" "$entry"
+    done | xargs echo
+  fi
+fi
diff --git a/build/linux/system.gyp b/build/linux/system.gyp
new file mode 100644
index 0000000..8f9b2d8
--- /dev/null
+++ b/build/linux/system.gyp
@@ -0,0 +1,1080 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'conditions': [
+      ['sysroot!=""', {
+        'pkg-config': '<(chroot_cmd) ./pkg-config-wrapper "<(sysroot)" "<(target_arch)" "<(system_libdir)"',
+      }, {
+        'pkg-config': 'pkg-config',
+      }],
+    ],
+
+    # If any of the linux_link_FOO below are set to 1, then the corresponding
+    # target will be linked against the FOO library (either dynamically or
+    # statically, depending on the pkg-config files), as opposed to loading the
+    # FOO library dynamically with dlopen.
+    'linux_link_libgps%': 0,
+    'linux_link_libpci%': 0,
+    'linux_link_libspeechd%': 0,
+    'linux_link_libbrlapi%': 0,
+  },
+  'conditions': [
+    [ 'chromeos==0 and use_ozone==0', {
+      # Hide GTK and related dependencies for Chrome OS and Ozone, so they won't get
+      # added back to Chrome OS and Ozone. Don't try to use GTK on Chrome OS and Ozone.
+      'targets': [
+        {
+          'target_name': 'gdk',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gdk-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtk',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'variables': {
+            # gtk requires gmodule, but it does not list it as a dependency
+            # in some misconfigured systems.
+            'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0',
+          },
+          'conditions': [
+            ['_toolset=="target"', {
+              'all_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }, {
+              'all_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtkprint',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+      ],  # targets
+    }],
+    [ 'use_x11==1 or ozone_platform_ozonex==1', {
+      # Hide X11 and related dependencies when use_x11=0
+      'targets': [
+        {
+          'target_name': 'x11',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags x11)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l x11 xi)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags x11)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l x11 xi)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'xcursor',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xcursor)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xcursor)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xcursor)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xcomposite',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xcomposite)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xcomposite)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xcomposite)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xdamage',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xdamage)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xdamage)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xdamage)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xext',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xext)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xext)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xfixes',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xfixes)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xfixes)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xi',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xi)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xi)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xi)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xrandr',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags xrandr)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other xrandr)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l xrandr)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags xrandr)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other xrandr)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l xrandr)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'xrender',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xrender)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xrender)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xrender)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xtst',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags xtst)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other xtst)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l xtst)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags xtst)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other xtst)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l xtst)',
+                ],
+              },
+            }]
+          ]
+        }
+      ],  # targets
+    }],
+    ['use_x11==1 and chromeos==0', {
+      'targets': [
+        {
+          'target_name': 'xscrnsaver',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xscrnsaver)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xscrnsaver)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xscrnsaver)',
+            ],
+          },
+        },
+      ],  # targets
+    }],
+    ['use_evdev_gestures==1', {
+      'targets': [
+        {
+          'target_name': 'libevdev-cros',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libevdev-cros)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libevdev-cros)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libevdev-cros)',
+            ],
+          },
+        },
+        {
+          'target_name': 'libgestures',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libgestures)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libgestures)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libgestures)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['ozone_platform_gbm==1', {
+      'targets': [
+        {
+          'target_name': 'gbm',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gbm)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gbm)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gbm)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['ozone_platform_dri==1 or ozone_platform_gbm==1', {
+      'targets': [
+        {
+          'target_name': 'libdrm',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libdrm)',
+            ],
+          },
+          'link_settings': {
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libdrm)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['use_udev==1', {
+      'targets': [
+        {
+          'target_name': 'udev',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags libudev)'
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other libudev)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l libudev)',
+                ],
+              },
+            }],
+          ],
+        },
+      ],
+    }],
+  ],  # conditions
+  'targets': [
+    {
+      'target_name': 'dbus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(<(pkg-config) --cflags dbus-1)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
+        ],
+        'libraries': [
+          '<!@(<(pkg-config) --libs-only-l dbus-1)',
+        ],
+      },
+    },
+    {
+      'target_name': 'fontconfig',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_system_fontconfig==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags fontconfig)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l fontconfig)',
+                ],
+              },
+            }, {  # use_system_fontconfig==0
+              'dependencies': [
+                '../../third_party/fontconfig/fontconfig.gyp:fontconfig',
+              ],
+              'export_dependent_settings' : [
+                '../../third_party/fontconfig/fontconfig.gyp:fontconfig',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'freetype2',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags freetype2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l freetype2)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gconf',
+      'type': 'none',
+      'conditions': [
+        ['use_gconf==1 and _toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gconf-2.0)',
+            ],
+            'defines': [
+              'USE_GCONF',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gio',
+      'type': 'static_library',
+      'conditions': [
+        ['use_gio==1 and _toolset=="target"', {
+          'cflags': [
+            '<!@(<(pkg-config) --cflags gio-2.0)',
+          ],
+          'variables': {
+            'gio_warning_define': [
+              # glib >=2.40 deprecate g_settings_list_schemas in favor of
+              # g_settings_schema_source_list_schemas. This function is not
+              # available on earlier versions that we still need to support
+              # (specifically, 2.32), so disable the warning.
+              # TODO(mgiuca): Remove this suppression (and variable) when we
+              # drop support for Ubuntu 13.10 (saucy) and earlier. Update the
+              # code to use g_settings_schema_source_list_schemas instead.
+              'GLIB_DISABLE_DEPRECATION_WARNINGS',
+            ],
+          },
+          'defines': [
+            '<(gio_warning_define)',
+          ],
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gio-2.0)',
+            ],
+            'defines': [
+              'USE_GIO',
+              '<(gio_warning_define)',
+            ],
+            'include_dirs': [
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'include_dirs': [
+            '../..',
+          ],
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gio-2.0)',
+            ],
+            'conditions': [
+              ['linux_link_gsettings==0 and OS=="linux"', {
+                'libraries': [
+                  '-ldl',
+                ],
+              }],
+            ],
+          },
+          'hard_dependency': 1,
+          'actions': [
+            {
+              'variables': {
+                'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libgio.h',
+                'output_cc': '<(INTERMEDIATE_DIR)/libgio_loader.cc',
+                'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+              },
+              'action_name': 'generate_libgio_loader',
+              'inputs': [
+                '<(generator)',
+              ],
+              'outputs': [
+                '<(output_h)',
+                '<(output_cc)',
+              ],
+              'action': ['python',
+                         '<(generator)',
+                         '--name', 'LibGioLoader',
+                         '--output-h', '<(output_h)',
+                         '--output-cc', '<(output_cc)',
+                         '--header', '<gio/gio.h>',
+                         '--link-directly=<(linux_link_gsettings)',
+                         'g_settings_new',
+                         'g_settings_get_child',
+                         'g_settings_get_string',
+                         'g_settings_get_boolean',
+                         'g_settings_get_int',
+                         'g_settings_get_strv',
+                         'g_settings_list_schemas',
+              ],
+              'message': 'Generating libgio library loader',
+              'process_outputs_as_sources': 1,
+            },
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'glib',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0',
+      },
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l <(glib_packages))',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l <(glib_packages))',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gnome_keyring',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'conditions': [
+            ['linux_link_gnome_keyring!=0', {
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+                ],
+              },
+            }, {
+              'conditions': [
+                ['OS=="linux"', {
+                 'link_settings': {
+                   'libraries': [
+                     '-ldl',
+                   ],
+                 },
+                }],
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      # The unit tests use a few convenience functions from the GNOME
+      # Keyring library directly. We ignore linux_link_gnome_keyring and
+      # link directly in this version of the target to allow this.
+      # *** Do not use this target in the main binary! ***
+      'target_name': 'gnome_keyring_direct',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libbrlapi',
+      'type': 'static_library',
+      'all_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'defines': [
+          'USE_BRLAPI',
+        ],
+        'conditions': [
+          ['linux_link_libbrlapi==1', {
+            'link_settings': {
+              'libraries': [
+                '-lbrlapi',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libbrlapi.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libbrlapi_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+          },
+          'action_name': 'generate_brlapi_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibBrlapiLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<brlapi.h>',
+                     '--link-directly=<(linux_link_libbrlapi)',
+                     'brlapi_getHandleSize',
+                     'brlapi_error_location',
+                     'brlapi_strerror',
+                     'brlapi__acceptKeys',
+                     'brlapi__openConnection',
+                     'brlapi__closeConnection',
+                     'brlapi__getDisplaySize',
+                     'brlapi__enterTtyModeWithPath',
+                     'brlapi__leaveTtyMode',
+                     'brlapi__writeDots',
+                     'brlapi__readKey',
+          ],
+          'message': 'Generating libbrlapi library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'libcap',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lcap',
+        ],
+      },
+    },
+    {
+      'target_name': 'libpci',
+      'type': 'static_library',
+      'cflags': [
+        '<!@(<(pkg-config) --cflags libpci)',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'conditions': [
+          ['linux_link_libpci==1', {
+            'link_settings': {
+              'ldflags': [
+                '<!@(<(pkg-config) --libs-only-L --libs-only-other libpci)',
+              ],
+              'libraries': [
+                '<!@(<(pkg-config) --libs-only-l libpci)',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libpci.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libpci_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+          },
+          'action_name': 'generate_libpci_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibPciLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<pci/pci.h>',
+                     # TODO(phajdan.jr): Report problem to pciutils project
+                     # and get it fixed so that we don't need --use-extern-c.
+                     '--use-extern-c',
+                     '--link-directly=<(linux_link_libpci)',
+                     'pci_alloc',
+                     'pci_init',
+                     'pci_cleanup',
+                     'pci_scan_bus',
+                     'pci_fill_info',
+                     'pci_lookup_name',
+          ],
+          'message': 'Generating libpci library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'libresolv',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lresolv',
+        ],
+      },
+    },
+    {
+      # GN version: //third_party/speech-dispatcher
+      'target_name': 'libspeechd',
+      'type': 'static_library',
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'conditions': [
+          ['linux_link_libspeechd==1', {
+            'link_settings': {
+              'libraries': [
+                '-lspeechd',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libspeechd.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libspeechd_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+
+            # speech-dispatcher >= 0.8 installs libspeechd.h into
+            # speech-dispatcher/libspeechd.h, whereas speech-dispatcher < 0.8
+            # puts libspeechd.h in the top-level include directory.
+            # Since we need to support both cases for now, we ship a copy of
+            # libspeechd.h in third_party/speech-dispatcher. If the user
+            # prefers to link against the speech-dispatcher directly, the
+            # `libspeechd_h_prefix' variable can be passed to gyp with a value
+            # such as "speech-dispatcher/" that will be prepended to
+            # "libspeechd.h" in the #include directive.
+            # TODO(phaldan.jr): Once we do not need to support
+            # speech-dispatcher < 0.8 we can get rid of all this (including
+            # third_party/speech-dispatcher) and just include
+            # speech-dispatcher/libspeechd.h unconditionally.
+            'libspeechd_h_prefix%': '',
+          },
+          'action_name': 'generate_libspeechd_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibSpeechdLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<<(libspeechd_h_prefix)libspeechd.h>',
+                     '--bundled-header',
+                     '"third_party/speech-dispatcher/libspeechd.h"',
+                     '--link-directly=<(linux_link_libspeechd)',
+                     'spd_open',
+                     'spd_say',
+                     'spd_stop',
+                     'spd_close',
+                     'spd_pause',
+                     'spd_resume',
+                     'spd_set_notification_on',
+                     'spd_set_voice_rate',
+                     'spd_set_voice_pitch',
+                     'spd_list_synthesis_voices',
+                     'spd_set_synthesis_voice',
+                     'spd_list_modules',
+                     'spd_set_output_module',
+          ],
+          'message': 'Generating libspeechd library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'pangocairo',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags pangocairo pangoft2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo pangoft2)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l pangocairo pangoft2)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags pangocairo pangoft2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other pangocairo pangoft2)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l pangocairo pangoft2)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'ssl',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_openssl==1', {
+              'dependencies': [
+                '../../third_party/boringssl/boringssl.gyp:boringssl',
+              ],
+            }],
+            ['use_openssl==0', {
+              'dependencies': [
+                '../../net/third_party/nss/ssl.gyp:libssl',
+              ],
+              'direct_dependent_settings': {
+                'include_dirs+': [
+                  # We need for our local copies of the libssl3 headers to come
+                  # before other includes, as we are shadowing system headers.
+                  '<(DEPTH)/net/third_party/nss/ssl',
+                ],
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+                ],
+              },
+            }],
+            ['use_openssl==0 and clang==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  # There is a broken header guard in /usr/include/nss/secmod.h:
+                  # https://bugzilla.mozilla.org/show_bug.cgi?id=884072
+                  '-Wno-header-guard',
+                ],
+              },
+            }],
+          ]
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/README b/build/linux/unbundle/README
new file mode 100644
index 0000000..d1b2a96
--- /dev/null
+++ b/build/linux/unbundle/README
@@ -0,0 +1,44 @@
+This directory contains files that make it possible to use system libraries.
+
+For more info please read the following:
+
+ - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
+ - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
+ - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles
+
+For more Chromium-specific context please read
+http://spot.livejournal.com/312320.html .
+
+This directory is provided in the source tree to follow above guidelines.
+It is a compromise solution which takes into account Chromium developers
+who want to avoid the perceived burden of more conditional code in gyp,
+and expectations of Open Source community, where using system-provided
+libraries is the norm.
+
+Usage:
+
+1. remove_bundled_libraries.py <preserved-directories>
+
+   For example: remove_bundled_libraries.py third_party/mesa
+
+   The script scans sources looking for third_party directories.
+   Everything that is not explicitly preserved is removed (except for
+   gyp files), and the script fails if any directory passed on command
+   line does not exist (to ensure list is kept up to date).
+
+   This is intended to be used on sources extracted from a tarball,
+   not a repository.
+
+   NOTE: by default this will not remove anything (for safety). Pass
+   --do-remove flag to actually remove files.
+
+2. replace_gyp_files.py <gyp-flags>
+
+   For example: replace_gyp_files.py -Duse_system_harfbuzz=1
+
+   The script ignores flags other than -D for convenience. This makes it
+   possible to have a variable e.g. ${myconf} with all the options, and
+   execute:
+
+   build/linux/unbundle/replace_gyp_files.py ${myconf}
+   build/gyp_chromium ${myconf}
diff --git a/build/linux/unbundle/expat.gyp b/build/linux/unbundle/expat.gyp
new file mode 100644
index 0000000..030fb85
--- /dev/null
+++ b/build/linux/unbundle/expat.gyp
@@ -0,0 +1,17 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'expat',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lexpat',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/ffmpeg.gyp b/build/linux/unbundle/ffmpeg.gyp
new file mode 100644
index 0000000..e3c3723
--- /dev/null
+++ b/build/linux/unbundle/ffmpeg.gyp
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'ffmpeg',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libavcodec libavformat libavutil)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_CODEC_ID_OPUS; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_OPUS=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_CODEC_ID_VP9; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_VP9=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { struct AVFrame frame;\n'
+              'return av_frame_get_channels(&frame); }" '
+              '--on-failure -DCHROMIUM_NO_AVFRAME_CHANNELS=1)',
+        ],
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          'USE_SYSTEM_FFMPEG',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libavcodec libavformat libavutil)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libavcodec libavformat libavutil)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/flac.gyp b/build/linux/unbundle/flac.gyp
new file mode 100644
index 0000000..9e4a664
--- /dev/null
+++ b/build/linux/unbundle/flac.gyp
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libflac',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'include',
+        'header_filenames': [
+          'FLAC/callback.h',
+          'FLAC/metadata.h',
+          'FLAC/assert.h',
+          'FLAC/export.h',
+          'FLAC/format.h',
+          'FLAC/stream_decoder.h',
+          'FLAC/stream_encoder.h',
+          'FLAC/ordinals.h',
+          'FLAC/all.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other flac)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l flac)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/harfbuzz.gyp b/build/linux/unbundle/harfbuzz.gyp
new file mode 100644
index 0000000..3bc1744
--- /dev/null
+++ b/build/linux/unbundle/harfbuzz.gyp
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # Check for presence of harfbuzz-icu library, use it if present.
+    'harfbuzz_libraries':
+        '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+        '--code "int main() { return 0; }" '
+        '--run-linker '
+        '--on-success "harfbuzz harfbuzz-icu" '
+        '--on-failure "harfbuzz" '
+        '-- -lharfbuzz-icu)',
+  },
+  'targets': [
+    {
+      'target_name': 'harfbuzz-ng',
+      'type': 'none',
+      'cflags': [
+        '<!@(pkg-config --cflags <(harfbuzz_libraries))',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags <(harfbuzz_libraries))',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other <(harfbuzz_libraries))',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l <(harfbuzz_libraries))',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          'hb.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/icu.gyp b/build/linux/unbundle/icu.gyp
new file mode 100644
index 0000000..16c36df
--- /dev/null
+++ b/build/linux/unbundle/icu.gyp
@@ -0,0 +1,248 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'icudata',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-uc)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-uc)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-uc)',
+        ],
+      },
+    },
+    {
+      'target_name': 'icui18n',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-i18n)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-i18n)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-i18n)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/i18n',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/icu/source/i18n/unicode -iname '*.h' \
+          # -printf "'%p',\n" | \
+          # sed -e 's|third_party/icu/source/i18n/||' | sort -u
+          'unicode/basictz.h',
+          'unicode/bmsearch.h',
+          'unicode/bms.h',
+          'unicode/calendar.h',
+          'unicode/choicfmt.h',
+          'unicode/coleitr.h',
+          'unicode/colldata.h',
+          'unicode/coll.h',
+          'unicode/curramt.h',
+          'unicode/currpinf.h',
+          'unicode/currunit.h',
+          'unicode/datefmt.h',
+          'unicode/dcfmtsym.h',
+          'unicode/decimfmt.h',
+          'unicode/dtfmtsym.h',
+          'unicode/dtitvfmt.h',
+          'unicode/dtitvinf.h',
+          'unicode/dtptngen.h',
+          'unicode/dtrule.h',
+          'unicode/fieldpos.h',
+          'unicode/fmtable.h',
+          'unicode/format.h',
+          'unicode/fpositer.h',
+          'unicode/gregocal.h',
+          'unicode/locdspnm.h',
+          'unicode/measfmt.h',
+          'unicode/measunit.h',
+          'unicode/measure.h',
+          'unicode/msgfmt.h',
+          'unicode/numfmt.h',
+          'unicode/numsys.h',
+          'unicode/plurfmt.h',
+          'unicode/plurrule.h',
+          'unicode/rbnf.h',
+          'unicode/rbtz.h',
+          'unicode/regex.h',
+          'unicode/search.h',
+          'unicode/selfmt.h',
+          'unicode/simpletz.h',
+          'unicode/smpdtfmt.h',
+          'unicode/sortkey.h',
+          'unicode/stsearch.h',
+          'unicode/tblcoll.h',
+          'unicode/timezone.h',
+          'unicode/tmunit.h',
+          'unicode/tmutamt.h',
+          'unicode/tmutfmt.h',
+          'unicode/translit.h',
+          'unicode/tzrule.h',
+          'unicode/tztrans.h',
+          'unicode/ucal.h',
+          'unicode/ucoleitr.h',
+          'unicode/ucol.h',
+          'unicode/ucsdet.h',
+          'unicode/ucurr.h',
+          'unicode/udat.h',
+          'unicode/udatpg.h',
+          'unicode/uldnames.h',
+          'unicode/ulocdata.h',
+          'unicode/umsg.h',
+          'unicode/unirepl.h',
+          'unicode/unum.h',
+          'unicode/uregex.h',
+          'unicode/usearch.h',
+          'unicode/uspoof.h',
+          'unicode/utmscale.h',
+          'unicode/utrans.h',
+          'unicode/vtzone.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+    {
+      'target_name': 'icuuc',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-uc)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-uc)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-uc)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/common',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/icu/source/common/unicode -iname '*.h' \
+          # -printf "'%p',\n" | \
+          # sed -e 's|third_party/icu/source/common/||' | sort -u
+          'unicode/brkiter.h',
+          'unicode/bytestream.h',
+          'unicode/caniter.h',
+          'unicode/chariter.h',
+          'unicode/dbbi.h',
+          'unicode/docmain.h',
+          'unicode/dtintrv.h',
+          'unicode/errorcode.h',
+          'unicode/icudataver.h',
+          'unicode/icuplug.h',
+          'unicode/idna.h',
+          'unicode/localpointer.h',
+          'unicode/locid.h',
+          'unicode/normalizer2.h',
+          'unicode/normlzr.h',
+          'unicode/pandroid.h',
+          'unicode/parseerr.h',
+          'unicode/parsepos.h',
+          'unicode/pfreebsd.h',
+          'unicode/plinux.h',
+          'unicode/pmac.h',
+          'unicode/popenbsd.h',
+          'unicode/ppalmos.h',
+          'unicode/ptypes.h',
+          'unicode/putil.h',
+          'unicode/pwin32.h',
+          'unicode/rbbi.h',
+          'unicode/rep.h',
+          'unicode/resbund.h',
+          'unicode/schriter.h',
+          'unicode/std_string.h',
+          'unicode/strenum.h',
+          'unicode/stringpiece.h',
+          'unicode/symtable.h',
+          'unicode/ubidi.h',
+          'unicode/ubrk.h',
+          'unicode/ucasemap.h',
+          'unicode/ucat.h',
+          'unicode/uchar.h',
+          'unicode/uchriter.h',
+          'unicode/uclean.h',
+          'unicode/ucnv_cb.h',
+          'unicode/ucnv_err.h',
+          'unicode/ucnv.h',
+          'unicode/ucnvsel.h',
+          'unicode/uconfig.h',
+          'unicode/udata.h',
+          'unicode/udeprctd.h',
+          'unicode/udraft.h',
+          'unicode/uenum.h',
+          'unicode/uidna.h',
+          'unicode/uintrnal.h',
+          'unicode/uiter.h',
+          'unicode/uloc.h',
+          'unicode/umachine.h',
+          'unicode/umisc.h',
+          'unicode/unifilt.h',
+          'unicode/unifunct.h',
+          'unicode/unimatch.h',
+          'unicode/uniset.h',
+          'unicode/unistr.h',
+          'unicode/unorm2.h',
+          'unicode/unorm.h',
+          'unicode/uobject.h',
+          'unicode/uobslete.h',
+          'unicode/urename.h',
+          'unicode/urep.h',
+          'unicode/ures.h',
+          'unicode/uscript.h',
+          'unicode/uset.h',
+          'unicode/usetiter.h',
+          'unicode/ushape.h',
+          'unicode/usprep.h',
+          'unicode/ustring.h',
+          'unicode/usystem.h',
+          'unicode/utext.h',
+          'unicode/utf16.h',
+          'unicode/utf32.h',
+          'unicode/utf8.h',
+          'unicode/utf.h',
+          'unicode/utf_old.h',
+          'unicode/utrace.h',
+          'unicode/utypeinfo.h',
+          'unicode/utypes.h',
+          'unicode/uvernum.h',
+          'unicode/uversion.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/jsoncpp.gyp b/build/linux/unbundle/jsoncpp.gyp
new file mode 100644
index 0000000..c397f64
--- /dev/null
+++ b/build/linux/unbundle/jsoncpp.gyp
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'jsoncpp',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'source/include',
+        'header_filenames': [
+          'json/assertions.h',
+          'json/autolink.h',
+          'json/config.h',
+          'json/features.h',
+          'json/forwards.h',
+          'json/json.h',
+          'json/reader.h',
+          'json/value.h',
+          'json/writer.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '/usr/include/jsoncpp',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-ljsoncpp',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libXNVCtrl.gyp b/build/linux/unbundle/libXNVCtrl.gyp
new file mode 100644
index 0000000..f076bdb
--- /dev/null
+++ b/build/linux/unbundle/libXNVCtrl.gyp
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libXNVCtrl',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'NVCtrlLib.h',
+          'NVCtrl.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+            '<!@(pkg-config --cflags libXNVCtrl)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libXNVCtrl)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libXNVCtrl)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libevent.gyp b/build/linux/unbundle/libevent.gyp
new file mode 100644
index 0000000..99d7435
--- /dev/null
+++ b/build/linux/unbundle/libevent.gyp
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libevent',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'event.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-levent',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libjpeg.gyp b/build/linux/unbundle/libjpeg.gyp
new file mode 100644
index 0000000..f56e7aa
--- /dev/null
+++ b/build/linux/unbundle/libjpeg.gyp
@@ -0,0 +1,29 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libjpeg',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_LIBJPEG',
+        ],
+        'conditions': [
+          ['os_bsd==1', {
+            'include_dirs': [
+              '/usr/local/include',
+            ],
+          }],
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-ljpeg',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libpng.gyp b/build/linux/unbundle/libpng.gyp
new file mode 100644
index 0000000..d6933fc
--- /dev/null
+++ b/build/linux/unbundle/libpng.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libpng',
+      'type': 'none',
+      'dependencies': [
+        '../zlib/zlib.gyp:zlib',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libpng)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libpng)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libpng)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'png.h',
+          'pngconf.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libusb.gyp b/build/linux/unbundle/libusb.gyp
new file mode 100644
index 0000000..1c18033
--- /dev/null
+++ b/build/linux/unbundle/libusb.gyp
@@ -0,0 +1,34 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libusb',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'src/libusb',
+        'header_filenames': [
+          'libusb.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libusb-1.0)',
+        ],
+        'link_settings': {
+          'ldflags': [
+            '<!@(pkg-config --libs-only-L --libs-only-other libusb-1.0)',
+          ],
+          'libraries': [
+            '<!@(pkg-config --libs-only-l libusb-1.0)',
+          ],
+        },
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libvpx.gyp b/build/linux/unbundle/libvpx.gyp
new file mode 100644
index 0000000..cdcf6fa
--- /dev/null
+++ b/build/linux/unbundle/libvpx.gyp
@@ -0,0 +1,42 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'libvpx',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags vpx)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/libvpx',
+        'header_filenames': [
+          'vpx/vpx_codec_impl_bottom.h',
+          'vpx/vpx_image.h',
+          'vpx/vpx_decoder.h',
+          'vpx/vp8.h',
+          'vpx/vpx_codec.h',
+          'vpx/vpx_codec_impl_top.h',
+          'vpx/vp8cx.h',
+          'vpx/vpx_integer.h',
+          'vpx/vp8dx.h',
+          'vpx/vpx_encoder.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other vpx)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l vpx)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libwebp.gyp b/build/linux/unbundle/libwebp.gyp
new file mode 100644
index 0000000..6dbce2e
--- /dev/null
+++ b/build/linux/unbundle/libwebp.gyp
@@ -0,0 +1,28 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libwebp',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'ENABLE_WEBP',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          # Check for presence of webpdemux library, use it if present.
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+          '--code "int main() { return 0; }" '
+          '--run-linker '
+          '--on-success "-lwebp -lwebpdemux" '
+          '--on-failure "-lwebp" '
+          '-- -lwebpdemux)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libxml.gyp b/build/linux/unbundle/libxml.gyp
new file mode 100644
index 0000000..bc4f9fc
--- /dev/null
+++ b/build/linux/unbundle/libxml.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libxml',
+      'type': 'static_library',
+      'sources': [
+        'chromium/libxml_utils.h',
+        'chromium/libxml_utils.cc',
+      ],
+      'cflags': [
+        '<!@(pkg-config --cflags libxml-2.0)',
+      ],
+      'defines': [
+        'USE_SYSTEM_LIBXML',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libxml-2.0)',
+        ],
+        'defines': [
+          'USE_SYSTEM_LIBXML',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libxml-2.0)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libxml-2.0)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libxslt.gyp b/build/linux/unbundle/libxslt.gyp
new file mode 100644
index 0000000..f7f6bb9
--- /dev/null
+++ b/build/linux/unbundle/libxslt.gyp
@@ -0,0 +1,25 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libxslt',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libxslt)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libxslt)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libxslt)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/openssl.gyp b/build/linux/unbundle/openssl.gyp
new file mode 100644
index 0000000..d832ba7
--- /dev/null
+++ b/build/linux/unbundle/openssl.gyp
@@ -0,0 +1,25 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'openssl',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags openssl)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other openssl)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l openssl)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/opus.gyp b/build/linux/unbundle/opus.gyp
new file mode 100644
index 0000000..e8c30ba
--- /dev/null
+++ b/build/linux/unbundle/opus.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'opus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags opus)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src/include',
+        'header_filenames': [
+          'opus_custom.h',
+          'opus_defines.h',
+          'opus_multistream.h',
+          'opus_types.h',
+          'opus.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other opus)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l opus)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/protobuf.gyp b/build/linux/unbundle/protobuf.gyp
new file mode 100644
index 0000000..7bcd992
--- /dev/null
+++ b/build/linux/unbundle/protobuf.gyp
@@ -0,0 +1,149 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'protobuf_lite',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          # Use full protobuf, because vanilla protobuf doesn't have
+          # our custom patch to retain unknown fields in lite mode.
+          '<!@(pkg-config --cflags protobuf)',
+        ],
+        'defines': [
+          'USE_SYSTEM_PROTOBUF',
+
+          # This macro must be defined to suppress the use
+          # of dynamic_cast<>, which requires RTTI.
+          'GOOGLE_PROTOBUF_NO_RTTI',
+          'GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER',
+        ],
+      },
+      'link_settings': {
+        # Use full protobuf, because vanilla protobuf doesn't have
+        # our custom patch to retain unknown fields in lite mode.
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other protobuf)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l protobuf)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/protobuf/src -iname '*.h' -printf "'%p',\n" | \
+          # sed -e 's|third_party/protobuf/src/||' | sort -u
+          'google/protobuf/compiler/code_generator.h',
+          'google/protobuf/compiler/command_line_interface.h',
+          'google/protobuf/compiler/cpp/cpp_enum_field.h',
+          'google/protobuf/compiler/cpp/cpp_enum.h',
+          'google/protobuf/compiler/cpp/cpp_extension.h',
+          'google/protobuf/compiler/cpp/cpp_field.h',
+          'google/protobuf/compiler/cpp/cpp_file.h',
+          'google/protobuf/compiler/cpp/cpp_generator.h',
+          'google/protobuf/compiler/cpp/cpp_helpers.h',
+          'google/protobuf/compiler/cpp/cpp_message_field.h',
+          'google/protobuf/compiler/cpp/cpp_message.h',
+          'google/protobuf/compiler/cpp/cpp_options.h',
+          'google/protobuf/compiler/cpp/cpp_primitive_field.h',
+          'google/protobuf/compiler/cpp/cpp_service.h',
+          'google/protobuf/compiler/cpp/cpp_string_field.h',
+          'google/protobuf/compiler/cpp/cpp_unittest.h',
+          'google/protobuf/compiler/importer.h',
+          'google/protobuf/compiler/java/java_doc_comment.h',
+          'google/protobuf/compiler/java/java_enum_field.h',
+          'google/protobuf/compiler/java/java_enum.h',
+          'google/protobuf/compiler/java/java_extension.h',
+          'google/protobuf/compiler/java/java_field.h',
+          'google/protobuf/compiler/java/java_file.h',
+          'google/protobuf/compiler/java/java_generator.h',
+          'google/protobuf/compiler/java/java_helpers.h',
+          'google/protobuf/compiler/java/java_message_field.h',
+          'google/protobuf/compiler/java/java_message.h',
+          'google/protobuf/compiler/java/java_primitive_field.h',
+          'google/protobuf/compiler/java/java_service.h',
+          'google/protobuf/compiler/java/java_string_field.h',
+          'google/protobuf/compiler/mock_code_generator.h',
+          'google/protobuf/compiler/package_info.h',
+          'google/protobuf/compiler/parser.h',
+          'google/protobuf/compiler/plugin.h',
+          'google/protobuf/compiler/plugin.pb.h',
+          'google/protobuf/compiler/python/python_generator.h',
+          'google/protobuf/compiler/subprocess.h',
+          'google/protobuf/compiler/zip_writer.h',
+          'google/protobuf/descriptor_database.h',
+          'google/protobuf/descriptor.h',
+          'google/protobuf/descriptor.pb.h',
+          'google/protobuf/dynamic_message.h',
+          'google/protobuf/extension_set.h',
+          'google/protobuf/generated_enum_reflection.h',
+          'google/protobuf/generated_message_reflection.h',
+          'google/protobuf/generated_message_util.h',
+          'google/protobuf/io/coded_stream.h',
+          'google/protobuf/io/coded_stream_inl.h',
+          'google/protobuf/io/gzip_stream.h',
+          'google/protobuf/io/package_info.h',
+          'google/protobuf/io/printer.h',
+          'google/protobuf/io/tokenizer.h',
+          'google/protobuf/io/zero_copy_stream.h',
+          'google/protobuf/io/zero_copy_stream_impl.h',
+          'google/protobuf/io/zero_copy_stream_impl_lite.h',
+          'google/protobuf/message.h',
+          'google/protobuf/message_lite.h',
+          'google/protobuf/package_info.h',
+          'google/protobuf/reflection_ops.h',
+          'google/protobuf/repeated_field.h',
+          'google/protobuf/service.h',
+          'google/protobuf/stubs/atomicops.h',
+          'google/protobuf/stubs/atomicops_internals_arm64_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_arm_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_arm_qnx.h',
+          'google/protobuf/stubs/atomicops_internals_atomicword_compat.h',
+          'google/protobuf/stubs/atomicops_internals_macosx.h',
+          'google/protobuf/stubs/atomicops_internals_mips_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_pnacl.h',
+          'google/protobuf/stubs/atomicops_internals_tsan.h',
+          'google/protobuf/stubs/atomicops_internals_x86_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_x86_msvc.h',
+          'google/protobuf/stubs/common.h',
+          'google/protobuf/stubs/hash.h',
+          'google/protobuf/stubs/map-util.h',
+          'google/protobuf/stubs/once.h',
+          'google/protobuf/stubs/platform_macros.h',
+          'google/protobuf/stubs/stl_util.h',
+          'google/protobuf/stubs/stringprintf.h',
+          'google/protobuf/stubs/strutil.h',
+          'google/protobuf/stubs/substitute.h',
+          'google/protobuf/stubs/template_util.h',
+          'google/protobuf/stubs/type_traits.h',
+          'google/protobuf/testing/file.h',
+          'google/protobuf/testing/googletest.h',
+          'google/protobuf/test_util.h',
+          'google/protobuf/test_util_lite.h',
+          'google/protobuf/text_format.h',
+          'google/protobuf/unknown_field_set.h',
+          'google/protobuf/wire_format.h',
+          'google/protobuf/wire_format_lite.h',
+          'google/protobuf/wire_format_lite_inl.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+    {
+      'target_name': 'protoc',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+    },
+    {
+      'target_name': 'py_proto',
+      'type': 'none',
+    },
+  ],
+}
diff --git a/build/linux/unbundle/re2.gyp b/build/linux/unbundle/re2.gyp
new file mode 100644
index 0000000..e2e567a
--- /dev/null
+++ b/build/linux/unbundle/re2.gyp
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 're2',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          're2/filtered_re2.h',
+          're2/re2.h',
+          're2/set.h',
+          're2/stringpiece.h',
+          're2/variadic_function.h',
+        ],
+        'shim_generator_additional_args': [
+          # Chromium copy of re2 is patched to rename POSIX to POSIX_SYNTAX
+          # because of collision issues that break the build.
+          # Upstream refuses to make changes:
+          # http://code.google.com/p/re2/issues/detail?id=73 .
+          '--define', 'POSIX=POSIX_SYNTAX',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lre2',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/remove_bundled_libraries.py b/build/linux/unbundle/remove_bundled_libraries.py
new file mode 100755
index 0000000..69e76f5
--- /dev/null
+++ b/build/linux/unbundle/remove_bundled_libraries.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Removes bundled libraries to make sure they are not used.
+
+See README for more details.
+"""
+
+
+import optparse
+import os.path
+import sys
+
+
+def DoMain(argv):
+  my_dirname = os.path.abspath(os.path.dirname(__file__))
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
+    print ('Sanity check failed: please run this script from ' +
+           'build/linux/unbundle directory.')
+    return 1
+
+  parser = optparse.OptionParser()
+  parser.add_option('--do-remove', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  exclusion_used = {}
+  for exclusion in args:
+    exclusion_used[exclusion] = False
+
+  for root, dirs, files in os.walk(source_tree_root, topdown=False):
+    # Only look at paths which contain a "third_party" component
+    # (note that e.g. third_party.png doesn't count).
+    root_relpath = os.path.relpath(root, source_tree_root)
+    if 'third_party' not in root_relpath.split(os.sep):
+      continue
+
+    for f in files:
+      path = os.path.join(root, f)
+      relpath = os.path.relpath(path, source_tree_root)
+
+      excluded = False
+      for exclusion in args:
+        # Require precise exclusions. Find the right-most third_party
+        # in the relative path, and if there is more than one ignore
+        # the exclusion if it's completely contained within the part
+        # before right-most third_party path component.
+        split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1)
+        if len(split) > 1 and split[0].startswith(exclusion):
+          continue
+
+        if relpath.startswith(exclusion):
+          # Multiple exclusions can match the same path. Go through all of them
+          # and mark each one as used.
+          exclusion_used[exclusion] = True
+          excluded = True
+      if excluded:
+        continue
+
+      # Deleting gyp files almost always leads to gyp failures.
+      # These files come from Chromium project, and can be replaced if needed.
+      if f.endswith('.gyp') or f.endswith('.gypi'):
+        continue
+
+      # Deleting .isolate files leads to gyp failures. They are usually
+      # not used by a distro build anyway.
+      # See http://www.chromium.org/developers/testing/isolated-testing
+      # for more info.
+      if f.endswith('.isolate'):
+        continue
+
+      if options.do_remove:
+        # Delete the file - best way to ensure it's not used during build.
+        os.remove(path)
+      else:
+        # By default just print paths that would be removed.
+        print path
+
+  exit_code = 0
+
+  # Fail if exclusion list contains stale entries - this helps keep it
+  # up to date.
+  for exclusion, used in exclusion_used.iteritems():
+    if not used:
+      print '%s does not exist' % exclusion
+      exit_code = 1
+
+  if not options.do_remove:
+    print ('To actually remove files printed above, please pass ' +
+           '--do-remove flag.')
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv[1:]))
diff --git a/build/linux/unbundle/replace_gyp_files.py b/build/linux/unbundle/replace_gyp_files.py
new file mode 100755
index 0000000..a780fc5
--- /dev/null
+++ b/build/linux/unbundle/replace_gyp_files.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Replaces gyp files in tree with files from here that
+make the build use system libraries.
+"""
+
+
+import optparse
+import os.path
+import shutil
+import sys
+
+
+REPLACEMENTS = {
+  'use_system_expat': 'third_party/expat/expat.gyp',
+  'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp',
+  'use_system_flac': 'third_party/flac/flac.gyp',
+  'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp',
+  'use_system_icu': 'third_party/icu/icu.gyp',
+  'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp',
+  'use_system_libevent': 'third_party/libevent/libevent.gyp',
+  'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp',
+  'use_system_libpng': 'third_party/libpng/libpng.gyp',
+  'use_system_libusb': 'third_party/libusb/libusb.gyp',
+  'use_system_libvpx': 'third_party/libvpx/libvpx.gyp',
+  'use_system_libwebp': 'third_party/libwebp/libwebp.gyp',
+  'use_system_libxml': 'third_party/libxml/libxml.gyp',
+  'use_system_libxnvctrl' : 'third_party/libXNVCtrl/libXNVCtrl.gyp',
+  'use_system_libxslt': 'third_party/libxslt/libxslt.gyp',
+  'use_system_openssl': 'third_party/boringssl/boringssl.gyp',
+  'use_system_opus': 'third_party/opus/opus.gyp',
+  'use_system_protobuf': 'third_party/protobuf/protobuf.gyp',
+  'use_system_re2': 'third_party/re2/re2.gyp',
+  'use_system_snappy': 'third_party/snappy/snappy.gyp',
+  'use_system_speex': 'third_party/speex/speex.gyp',
+  'use_system_sqlite': 'third_party/sqlite/sqlite.gyp',
+  'use_system_v8': 'v8/tools/gyp/v8.gyp',
+  'use_system_zlib': 'third_party/zlib/zlib.gyp',
+}
+
+
+def DoMain(argv):
+  my_dirname = os.path.dirname(__file__)
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  parser = optparse.OptionParser()
+
+  # Accept arguments in gyp command-line syntax, so that the caller can re-use
+  # command-line for this script and gyp.
+  parser.add_option('-D', dest='defines', action='append')
+
+  parser.add_option('--undo', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  for flag, path in REPLACEMENTS.items():
+    if '%s=1' % flag not in options.defines:
+      continue
+
+    if options.undo:
+      # Restore original file, and also remove the backup.
+      # This is meant to restore the source tree to its original state.
+      os.rename(os.path.join(source_tree_root, path + '.orig'),
+                os.path.join(source_tree_root, path))
+    else:
+      # Create a backup copy for --undo.
+      shutil.copyfile(os.path.join(source_tree_root, path),
+                      os.path.join(source_tree_root, path + '.orig'))
+
+      # Copy the gyp file from directory of this script to target path.
+      shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)),
+                      os.path.join(source_tree_root, path))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv))
diff --git a/build/linux/unbundle/snappy.gyp b/build/linux/unbundle/snappy.gyp
new file mode 100644
index 0000000..ab856ed
--- /dev/null
+++ b/build/linux/unbundle/snappy.gyp
@@ -0,0 +1,29 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'snappy',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          'snappy-c.h',
+          'snappy-sinksource.h',
+          'snappy-stubs-public.h',
+          'snappy.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lsnappy',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/speex.gyp b/build/linux/unbundle/speex.gyp
new file mode 100644
index 0000000..75376c8
--- /dev/null
+++ b/build/linux/unbundle/speex.gyp
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libspeex',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'include',
+        'header_filenames': [
+          'speex/speex_types.h',
+          'speex/speex_callbacks.h',
+          'speex/speex_config_types.h',
+          'speex/speex_stereo.h',
+          'speex/speex_echo.h',
+          'speex/speex_preprocess.h',
+          'speex/speex_jitter.h',
+          'speex/speex.h',
+          'speex/speex_resampler.h',
+          'speex/speex_buffer.h',
+          'speex/speex_header.h',
+          'speex/speex_bits.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags speex)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other speex)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l speex)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/sqlite.gyp b/build/linux/unbundle/sqlite.gyp
new file mode 100644
index 0000000..918da92
--- /dev/null
+++ b/build/linux/unbundle/sqlite.gyp
@@ -0,0 +1,28 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sqlite',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags sqlite3)',
+        ],
+        'defines': [
+          'USE_SYSTEM_SQLITE',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other sqlite3)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l sqlite3)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/v8.gyp b/build/linux/unbundle/v8.gyp
new file mode 100644
index 0000000..9b06347
--- /dev/null
+++ b/build/linux/unbundle/v8.gyp
@@ -0,0 +1,64 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#     * Neither the name of Google Inc. nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+  'includes': ['../../build/toolchain.gypi', '../../build/features.gypi'],
+  'targets': [
+    {
+      'target_name': 'v8',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'headers_root_path': '../../include',
+        'header_filenames': [
+          'v8-debug.h',
+          'v8-preparser.h',
+          'v8-profiler.h',
+          'v8-testing.h',
+          'v8.h',
+          'v8stdint.h',
+        ],
+      },
+      'includes': [
+        '../../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lv8',
+        ],
+      },
+    },
+    {
+      'target_name': 'v8_shell',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'dependencies': [
+        'v8'
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/zlib.gyp b/build/linux/unbundle/zlib.gyp
new file mode 100644
index 0000000..0a85ff0
--- /dev/null
+++ b/build/linux/unbundle/zlib.gyp
@@ -0,0 +1,67 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'zlib',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'zlib.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_ZLIB',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-lz',
+        ],
+      },
+    },
+    {
+      'target_name': 'minizip',
+      'type': 'static_library',
+      'all_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_MINIZIP',
+        ],
+      },
+      'defines': [
+        'USE_SYSTEM_MINIZIP',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lminizip',
+        ],
+      },
+    },
+    {
+      'target_name': 'zip',
+      'type': 'static_library',
+      'dependencies': [
+        'minizip',
+        '../../base/base.gyp:base',
+      ],
+      'include_dirs': [
+        '../..',
+      ],
+      'sources': [
+        'google/zip.cc',
+        'google/zip.h',
+        'google/zip_internal.cc',
+        'google/zip_internal.h',
+        'google/zip_reader.cc',
+        'google/zip_reader.h',
+      ],
+    },
+  ],
+}
diff --git a/build/mac/OWNERS b/build/mac/OWNERS
new file mode 100644
index 0000000..c56e89d
--- /dev/null
+++ b/build/mac/OWNERS
@@ -0,0 +1,2 @@
+mark@chromium.org
+thomasvl@chromium.org
diff --git a/build/mac/asan.gyp b/build/mac/asan.gyp
new file mode 100644
index 0000000..fabe910
--- /dev/null
+++ b/build/mac/asan.gyp
@@ -0,0 +1,53 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+   'targets': [
+     {   
+       'target_name': 'asan_dynamic_runtime',
+       'type': 'none',
+       'variables': {
+         # Every target is going to depend on asan_dynamic_runtime, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+         # Path is relative to this GYP file.
+         'asan_rtl_mask_path':
+             '../../third_party/llvm-build/Release+Asserts/lib/clang/*/lib/darwin',
+         'asan_osx_dynamic':
+             '<(asan_rtl_mask_path)/libclang_rt.asan_osx_dynamic.dylib',
+         'asan_iossim_dynamic':
+             '<(asan_rtl_mask_path)/libclang_rt.asan_iossim_dynamic.dylib',
+       },
+       'conditions': [
+         ['OS=="mac"', {
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'files': [
+                 '<!(/bin/ls <(asan_osx_dynamic))',
+               ],
+             },
+           ],
+         }],
+         # ASan works with iOS simulator only, not bare-metal iOS.
+         ['OS=="ios" and target_arch=="ia32"', {
+           'toolsets': ['host', 'target'],
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'target_conditions': [
+                 ['_toolset=="host"', {
+                   'files': [ '<!(/bin/ls <(asan_osx_dynamic))'],
+                 }],
+                 ['_toolset=="target"', {
+                   'files': [ '<!(/bin/ls <(asan_iossim_dynamic))'],
+                 }],
+               ],
+             },
+           ],
+         }],
+       ],
+     },  
+   ],  
+}
diff --git a/build/mac/change_mach_o_flags.py b/build/mac/change_mach_o_flags.py
new file mode 100755
index 0000000..c2aeaec
--- /dev/null
+++ b/build/mac/change_mach_o_flags.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: change_mach_o_flags.py [--executable-heap] [--no-pie] <executablepath>
+
+Arranges for the executable at |executable_path| to have its data (heap)
+pages protected to prevent execution on Mac OS X 10.7 ("Lion"), and to have
+the PIE (position independent executable) bit set to enable ASLR (address
+space layout randomization). With --executable-heap or --no-pie, the
+respective bits are cleared instead of set, making the heap executable or
+disabling PIE/ASLR.
+
+This script is able to operate on thin (single-architecture) Mach-O files
+and fat (universal, multi-architecture) files. When operating on fat files,
+it will set or clear the bits for each architecture contained therein.
+
+NON-EXECUTABLE HEAP
+
+Traditionally in Mac OS X, 32-bit processes did not have data pages set to
+prohibit execution. Although user programs could call mprotect and
+mach_vm_protect to deny execution of code in data pages, the kernel would
+silently ignore such requests without updating the page tables, and the
+hardware would happily execute code on such pages. 64-bit processes were
+always given proper hardware protection of data pages. This behavior was
+controllable on a system-wide level via the vm.allow_data_exec sysctl, which
+is set by default to 1. The bit with value 1 (set by default) allows code
+execution on data pages for 32-bit processes, and the bit with value 2
+(clear by default) does the same for 64-bit processes.
+
+In Mac OS X 10.7, executables can "opt in" to having hardware protection
+against code execution on data pages applied. This is done by setting a new
+bit in the |flags| field of an executable's |mach_header|. When
+MH_NO_HEAP_EXECUTION is set, proper protections will be applied, regardless
+of the setting of vm.allow_data_exec. See xnu-1699.22.73/osfmk/vm/vm_map.c
+override_nx and xnu-1699.22.73/bsd/kern/mach_loader.c load_machfile.
+
+The Apple toolchain has been revised to set the MH_NO_HEAP_EXECUTION when
+producing executables, provided that -allow_heap_execute is not specified
+at link time. Only linkers shipping with Xcode 4.0 and later (ld64-123.2 and
+later) have this ability. See ld64-123.2.1/src/ld/Options.cpp
+Options::reconfigureDefaults() and
+ld64-123.2.1/src/ld/HeaderAndLoadCommands.hpp
+HeaderAndLoadCommandsAtom<A>::flags().
+
+This script sets the MH_NO_HEAP_EXECUTION bit on Mach-O executables. It is
+intended for use with executables produced by a linker that predates Apple's
+modifications to set this bit itself. It is also useful for setting this bit
+for non-i386 executables, including x86_64 executables. Apple's linker only
+sets it for 32-bit i386 executables, presumably under the assumption that
+the value of vm.allow_data_exec is set in stone. However, if someone were to
+change vm.allow_data_exec to 2 or 3, 64-bit x86_64 executables would run
+without hardware protection against code execution on data pages. This
+script can set the bit for x86_64 executables, guaranteeing that they run
+with appropriate protection even when vm.allow_data_exec has been tampered
+with.
+
+POSITION-INDEPENDENT EXECUTABLES/ADDRESS SPACE LAYOUT RANDOMIZATION
+
+This script sets or clears the MH_PIE bit in an executable's Mach-O header,
+enabling or disabling position independence on Mac OS X 10.5 and later.
+Processes running position-independent executables have varying levels of
+ASLR protection depending on the OS release. The main executable's load
+address, shared library load addresess, and the heap and stack base
+addresses may be randomized. Position-independent executables are produced
+by supplying the -pie flag to the linker (or defeated by supplying -no_pie).
+Executables linked with a deployment target of 10.7 or higher have PIE on
+by default.
+
+This script is never strictly needed during the build to enable PIE, as all
+linkers used are recent enough to support -pie. However, it's used to
+disable the PIE bit as needed on already-linked executables.
+"""
+
+import optparse
+import os
+import struct
+import sys
+
+
+# <mach-o/fat.h>
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+
+# <mach-o/loader.h>
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+MH_EXECUTE = 0x2
+MH_PIE = 0x00200000
+MH_NO_HEAP_EXECUTION = 0x01000000
+
+
+class MachOError(Exception):
+  """A class for exceptions thrown by this module."""
+
+  pass
+
+
+def CheckedSeek(file, offset):
+  """Seeks the file-like object at |file| to offset |offset| and raises a
+  MachOError if anything funny happens."""
+
+  file.seek(offset, os.SEEK_SET)
+  new_offset = file.tell()
+  if new_offset != offset:
+    raise MachOError, \
+          'seek: expected offset %d, observed %d' % (offset, new_offset)
+
+
+def CheckedRead(file, count):
+  """Reads |count| bytes from the file-like |file| object, raising a
+  MachOError if any other number of bytes is read."""
+
+  bytes = file.read(count)
+  if len(bytes) != count:
+    raise MachOError, \
+          'read: expected length %d, observed %d' % (count, len(bytes))
+
+  return bytes
+
+
+def ReadUInt32(file, endian):
+  """Reads an unsinged 32-bit integer from the file-like |file| object,
+  treating it as having endianness specified by |endian| (per the |struct|
+  module), and returns it as a number. Raises a MachOError if the proper
+  length of data can't be read from |file|."""
+
+  bytes = CheckedRead(file, 4)
+
+  (uint32,) = struct.unpack(endian + 'I', bytes)
+  return uint32
+
+
+def ReadMachHeader(file, endian):
+  """Reads an entire |mach_header| structure (<mach-o/loader.h>) from the
+  file-like |file| object, treating it as having endianness specified by
+  |endian| (per the |struct| module), and returns a 7-tuple of its members
+  as numbers. Raises a MachOError if the proper length of data can't be read
+  from |file|."""
+
+  bytes = CheckedRead(file, 28)
+
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      struct.unpack(endian + '7I', bytes)
+  return magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags
+
+
+def ReadFatArch(file):
+  """Reads an entire |fat_arch| structure (<mach-o/fat.h>) from the file-like
+  |file| object, treating it as having endianness specified by |endian|
+  (per the |struct| module), and returns a 5-tuple of its members as numbers.
+  Raises a MachOError if the proper length of data can't be read from
+  |file|."""
+
+  bytes = CheckedRead(file, 20)
+
+  cputype, cpusubtype, offset, size, align = struct.unpack('>5I', bytes)
+  return cputype, cpusubtype, offset, size, align
+
+
+def WriteUInt32(file, uint32, endian):
+  """Writes |uint32| as an unsinged 32-bit integer to the file-like |file|
+  object, treating it as having endianness specified by |endian| (per the
+  |struct| module)."""
+
+  bytes = struct.pack(endian + 'I', uint32)
+  assert len(bytes) == 4
+
+  file.write(bytes)
+
+
+def HandleMachOFile(file, options, offset=0):
+  """Seeks the file-like |file| object to |offset|, reads its |mach_header|,
+  and rewrites the header's |flags| field if appropriate. The header's
+  endianness is detected. Both 32-bit and 64-bit Mach-O headers are supported
+  (mach_header and mach_header_64). Raises MachOError if used on a header that
+  does not have a known magic number or is not of type MH_EXECUTE. The
+  MH_PIE and MH_NO_HEAP_EXECUTION bits are set or cleared in the |flags| field
+  according to |options| and written to |file| if any changes need to be made.
+  If already set or clear as specified by |options|, nothing is written."""
+
+  CheckedSeek(file, offset)
+  magic = ReadUInt32(file, '<')
+  if magic == MH_MAGIC or magic == MH_MAGIC_64:
+    endian = '<'
+  elif magic == MH_CIGAM or magic == MH_CIGAM_64:
+    endian = '>'
+  else:
+    raise MachOError, \
+          'Mach-O file at offset %d has illusion of magic' % offset
+
+  CheckedSeek(file, offset)
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      ReadMachHeader(file, endian)
+  assert magic == MH_MAGIC or magic == MH_MAGIC_64
+  if filetype != MH_EXECUTE:
+    raise MachOError, \
+          'Mach-O file at offset %d is type 0x%x, expected MH_EXECUTE' % \
+              (offset, filetype)
+
+  original_flags = flags
+
+  if options.no_heap_execution:
+    flags |= MH_NO_HEAP_EXECUTION
+  else:
+    flags &= ~MH_NO_HEAP_EXECUTION
+
+  if options.pie:
+    flags |= MH_PIE
+  else:
+    flags &= ~MH_PIE
+
+  if flags != original_flags:
+    CheckedSeek(file, offset + 24)
+    WriteUInt32(file, flags, endian)
+
+
+def HandleFatFile(file, options, fat_offset=0):
+  """Seeks the file-like |file| object to |offset| and loops over its
+  |fat_header| entries, calling HandleMachOFile for each."""
+
+  CheckedSeek(file, fat_offset)
+  magic = ReadUInt32(file, '>')
+  assert magic == FAT_MAGIC
+
+  nfat_arch = ReadUInt32(file, '>')
+
+  for index in xrange(0, nfat_arch):
+    cputype, cpusubtype, offset, size, align = ReadFatArch(file)
+    assert size >= 28
+
+    # HandleMachOFile will seek around. Come back here after calling it, in
+    # case it sought.
+    fat_arch_offset = file.tell()
+    HandleMachOFile(file, options, offset)
+    CheckedSeek(file, fat_arch_offset)
+
+
+def main(me, args):
+  parser = optparse.OptionParser('%prog [options] <executable_path>')
+  parser.add_option('--executable-heap', action='store_false',
+                    dest='no_heap_execution', default=True,
+                    help='Clear the MH_NO_HEAP_EXECUTION bit')
+  parser.add_option('--no-pie', action='store_false',
+                    dest='pie', default=True,
+                    help='Clear the MH_PIE bit')
+  (options, loose_args) = parser.parse_args(args)
+  if len(loose_args) != 1:
+    parser.print_usage()
+    return 1
+
+  executable_path = loose_args[0]
+  executable_file = open(executable_path, 'rb+')
+
+  magic = ReadUInt32(executable_file, '<')
+  if magic == FAT_CIGAM:
+    # Check FAT_CIGAM and not FAT_MAGIC because the read was little-endian.
+    HandleFatFile(executable_file, options)
+  elif magic == MH_MAGIC or magic == MH_CIGAM or \
+      magic == MH_MAGIC_64 or magic == MH_CIGAM_64:
+    HandleMachOFile(executable_file, options)
+  else:
+    raise MachOError, '%s is not a Mach-O or fat file' % executable_file
+
+  executable_file.close()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[0], sys.argv[1:]))
diff --git a/build/mac/change_mach_o_flags_from_xcode.sh b/build/mac/change_mach_o_flags_from_xcode.sh
new file mode 100755
index 0000000..1824f8d
--- /dev/null
+++ b/build/mac/change_mach_o_flags_from_xcode.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small wrapper script around change_mach_o_flags.py allowing it to
+# be invoked easily from Xcode. change_mach_o_flags.py expects its arguments
+# on the command line, but Xcode puts its parameters in the environment.
+
+set -e
+
+exec "$(dirname "${0}")/change_mach_o_flags.py" \
+     "${@}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/chrome_mac.croc b/build/mac/chrome_mac.croc
new file mode 100644
index 0000000..8cde00c
--- /dev/null
+++ b/build/mac/chrome_mac.croc
@@ -0,0 +1,36 @@
+# -*- python -*-
+# Crocodile config file for Chromium mac
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, linux, or windows specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|win|views)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_mac\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/mac/copy_asan_runtime_dylib.sh b/build/mac/copy_asan_runtime_dylib.sh
new file mode 100755
index 0000000..f221c4a
--- /dev/null
+++ b/build/mac/copy_asan_runtime_dylib.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# For app bundles built with ASan, copies the runtime lib
+# (libclang_rt.asan_osx_dynamic.dylib), on which their executables depend, from
+# the compiler installation path into the bundle and fixes the dylib's install
+# name in the binary to be relative to @executable_path.
+
+set -e
+
+BINARY="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if [[ ! -f "$BINARY" ]]; then
+  # This is neither an .app bundle nor a standalone executable.
+  # Most certainly the script has been called for a data bundle.
+  exit 0
+fi
+
+BINARY_DIR="$(dirname "${BINARY}")"
+
+# Find the link to the ASan runtime encoded in the binary.
+BUILTIN_DYLIB_PATH=$(otool -L "${BINARY}" | \
+    sed -Ene 's/^[[:blank:]]+(.*libclang_rt\.asan_.*_dynamic\.dylib).*$/\1/p')
+
+if [[ "${BUILTIN_DYLIB_PATH}" == *asan_iossim_dynamic* ]]; then
+  ASAN_DYLIB_NAME=libclang_rt.asan_iossim_dynamic.dylib
+elif [[ "${BUILTIN_DYLIB_PATH}" == *asan_osx_dynamic* ]]; then
+  ASAN_DYLIB_NAME=libclang_rt.asan_osx_dynamic.dylib
+fi
+
+if [[ -z "${BUILTIN_DYLIB_PATH}" ]]; then
+  echo "${BINARY} does not depend on the ASan runtime library!" >&2
+  exit 1
+fi
+
+# TODO(glider): this doesn't work if we set CC and CXX to override the default
+# Clang.
+ASAN_DYLIB=$(find \
+    "${BUILT_PRODUCTS_DIR}/../../third_party/llvm-build/Release+Asserts/lib/clang/" \
+    -type f -path "*${ASAN_DYLIB_NAME}")
+
+DYLIB_BASENAME=$(basename "${ASAN_DYLIB}")
+if [[ "${DYLIB_BASENAME}" != "${ASAN_DYLIB_NAME}" ]]; then
+  echo "basename(${ASAN_DYLIB}) != ${ASAN_DYLIB_NAME}" >&2
+  exit 1
+fi
+
+# Check whether the directory containing the executable binary is named
+# "MacOS". In this case we're building a full-fledged OSX app and will put
+# the runtime into appname.app/Contents/Libraries/. Otherwise this is probably
+# an iOS gtest app, and the ASan runtime is put next to the executable.
+UPPER_DIR=$(dirname "${BINARY_DIR}")
+if [ "${UPPER_DIR}" == "MacOS" ]; then
+  LIBRARIES_DIR="${UPPER_DIR}/Libraries"
+  mkdir -p "${LIBRARIES_DIR}"
+  NEW_LC_ID_DYLIB="@executable_path/../Libraries/${ASAN_DYLIB_NAME}"
+else
+  LIBRARIES_DIR="${BINARY_DIR}"
+  NEW_LC_ID_DYLIB="@executable_path/${ASAN_DYLIB_NAME}"
+fi
+
+cp "${ASAN_DYLIB}" "${LIBRARIES_DIR}"
+
+# Make LC_ID_DYLIB of the runtime copy point to its location.
+install_name_tool \
+    -id "${NEW_LC_ID_DYLIB}" \
+    "${LIBRARIES_DIR}/${ASAN_DYLIB_NAME}"
+
+# Fix the rpath to the runtime library recorded in the binary.
+install_name_tool \
+    -change "${BUILTIN_DYLIB_PATH}" \
+    "${NEW_LC_ID_DYLIB}" \
+    "${BINARY}"
diff --git a/build/mac/copy_framework_unversioned.sh b/build/mac/copy_framework_unversioned.sh
new file mode 100755
index 0000000..380cc90
--- /dev/null
+++ b/build/mac/copy_framework_unversioned.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Copies a framework to its new home, "unversioning" it.
+#
+# Normally, frameworks are versioned bundles.  The contents of a framework are
+# stored in a versioned directory within the bundle, and symbolic links
+# provide access to the actual code and resources.  See
+# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
+#
+# The symbolic links usually found in frameworks create problems.  Symbolic
+# links are excluded from code signatures.  That means that it's possible to
+# remove or retarget a symbolic link within a framework without affecting the
+# seal.  In Chrome's case, the outer .app bundle contains a framework where
+# all application code and resources live.  In order for the signature on the
+# .app to be meaningful, it encompasses the framework.  Because framework
+# resources are accessed through the framework's symbolic links, this
+# arrangement results in a case where the resources can be altered without
+# affecting the .app signature's validity.
+#
+# Indirection through symbolic links also carries a runtime performance
+# penalty on open() operations, although open() typically completes so quickly
+# that this is not considered a major performance problem.
+#
+# To resolve these problems, the frameworks that ship within Chrome's .app
+# bundle are unversioned.  Unversioning is simple: instead of using the
+# original outer .framework directory as the framework that ships within the
+# .app, the inner versioned directory is used.  Instead of accessing bundled
+# resources through symbolic links, they are accessed directly.  In normal
+# situations, the only hard-coded use of the versioned directory is by dyld,
+# when loading the framework's code, but this is handled through a normal
+# Mach-O load command, and it is easy to adjust the load command to point to
+# the unversioned framework code rather than the versioned counterpart.
+#
+# The resulting framework bundles aren't strictly conforming, but they work
+# as well as normal versioned framework bundles.
+#
+# An option to skip running install_name_tool is available. By passing -I as
+# the first argument to this script, install_name_tool will be skipped. This
+# is only suitable for copied frameworks that will not be linked against, or
+# when install_name_tool will be run on any linker output when something is
+# linked against the copied framework. This option exists to allow signed
+# frameworks to pass through without subjecting them to any modifications that
+# would break their signatures.
+
+set -e
+
+RUN_INSTALL_NAME_TOOL=1
+if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then
+  shift
+  RUN_INSTALL_NAME_TOOL=
+fi
+
+if [ $# -ne 2 ] ; then
+  echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2
+  exit 1
+fi
+
+# FRAMEWORK should be a path to a versioned framework bundle, ending in
+# .framework.  DESTINATION_DIR is the directory that the unversioned framework
+# bundle will be copied to.
+
+FRAMEWORK="${1}"
+DESTINATION_DIR="${2}"
+
+FRAMEWORK_NAME="$(basename "${FRAMEWORK}")"
+if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2
+  exit 1
+fi
+FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}"
+
+# Find the current version.
+VERSIONS="${FRAMEWORK}/Versions"
+CURRENT_VERSION_LINK="${VERSIONS}/Current"
+CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")"
+CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}"
+
+# Make sure that the framework's structure makes sense as a versioned bundle.
+if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2
+  exit 1
+fi
+
+DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}"
+
+# Copy the versioned directory within the versioned framework to its
+# destination location.
+mkdir -p "${DESTINATION_DIR}"
+rsync -acC --delete --exclude Headers --exclude PrivateHeaders \
+    --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}"
+
+if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then
+  # Adjust the Mach-O LC_ID_DYLIB load command in the framework.  This does not
+  # change the LC_LOAD_DYLIB load commands in anything that may have already
+  # linked against the framework.  Not all frameworks will actually need this
+  # to be changed.  Some frameworks may already be built with the proper
+  # LC_ID_DYLIB for use as an unversioned framework.  Xcode users can do this
+  # by setting LD_DYLIB_INSTALL_NAME to
+  # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)
+  # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name
+  # at link time.
+  FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" |
+                         grep -A10 "^ *cmd LC_ID_DYLIB$" |
+                         grep -m1 "^ *name" |
+                         sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')"
+  VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" |
+                     sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")"
+
+  if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then
+    install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}"
+  fi
+fi
diff --git a/build/mac/edit_xibs.sh b/build/mac/edit_xibs.sh
new file mode 100755
index 0000000..b7b749e
--- /dev/null
+++ b/build/mac/edit_xibs.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp
+# with the Xcode generator (as you likely use ninja). Documentation:
+#   http://dev.chromium.org/developers/design-documents/mac-xib-files
+
+set -e
+
+RELSRC=$(dirname "$0")/../..
+SRC=$(cd "$RELSRC" && pwd)
+export PYTHONPATH="$PYTHONPATH:$SRC/build"
+export GYP_GENERATORS=xcode
+"$SRC/tools/gyp/gyp" -I"$SRC/build/common.gypi" "$SRC/chrome/chrome_nibs.gyp"
+echo "You can now edit XIB files in Xcode using:"
+echo "  $SRC/chrome/chrome_nibs.xcodeproj"
diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py
new file mode 100755
index 0000000..0534766
--- /dev/null
+++ b/build/mac/find_sdk.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output.
+
+Usage:
+  python find_sdk.py 10.6  # Ignores SDKs < 10.6
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+from optparse import OptionParser
+
+
+def parse_version(version_str):
+  """'10.6' => [10, 6]"""
+  return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+  parser = OptionParser()
+  parser.add_option("--verify",
+                    action="store_true", dest="verify", default=False,
+                    help="return the sdk argument and warn if it doesn't exist")
+  parser.add_option("--sdk_path",
+                    action="store", type="string", dest="sdk_path", default="",
+                    help="user-specified SDK path; bypasses verification")
+  parser.add_option("--print_sdk_path",
+                    action="store_true", dest="print_sdk_path", default=False,
+                    help="Additionaly print the path the SDK (appears first).")
+  (options, args) = parser.parse_args()
+  min_sdk_version = args[0]
+
+  job = subprocess.Popen(['xcode-select', '-print-path'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    print >> sys.stderr, out
+    print >> sys.stderr, err
+    raise Exception(('Error %d running xcode-select, you might have to run '
+      '|sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer| '
+      'if you are using Xcode 4.') % job.returncode)
+  # The Developer folder moved in Xcode 4.3.
+  xcode43_sdk_path = os.path.join(
+      out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
+  if os.path.isdir(xcode43_sdk_path):
+    sdk_dir = xcode43_sdk_path
+  else:
+    sdk_dir = os.path.join(out.rstrip(), 'SDKs')
+  sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
+  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
+          if parse_version(s) >= parse_version(min_sdk_version)]
+  if not sdks:
+    raise Exception('No %s+ SDK found' % min_sdk_version)
+  best_sdk = sorted(sdks, key=parse_version)[0]
+
+  if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           vvvvvvv'
+    print >> sys.stderr, ''
+    print >> sys.stderr, \
+        'This build requires the %s SDK, but it was not found on your system.' \
+        % min_sdk_version
+    print >> sys.stderr, \
+        'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           ^^^^^^^'
+    print >> sys.stderr, ''
+    return min_sdk_version
+
+  if options.print_sdk_path:
+    print subprocess.check_output(['xcodebuild', '-version', '-sdk',
+                                   'macosx' + best_sdk, 'Path']).strip()
+
+  return best_sdk
+
+
+if __name__ == '__main__':
+  if sys.platform != 'darwin':
+    raise Exception("This script only runs on Mac")
+  print main()
diff --git a/build/mac/make_more_helpers.sh b/build/mac/make_more_helpers.sh
new file mode 100755
index 0000000..6f5c474
--- /dev/null
+++ b/build/mac/make_more_helpers.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: make_more_helpers.sh <directory_within_contents> <app_name>
+#
+# This script creates additional helper .app bundles for Chromium, based on
+# the existing helper .app bundle, changing their Mach-O header's flags to
+# enable and disable various features. Based on Chromium Helper.app, it will
+# create Chromium Helper EH.app, which has the MH_NO_HEAP_EXECUTION bit
+# cleared to support Chromium child processes that require an executable heap,
+# and Chromium Helper NP.app, which has the MH_PIE bit cleared to support
+# Chromium child processes that cannot tolerate ASLR.
+#
+# This script expects to be called from the chrome_exe target as a postbuild,
+# and operates directly within the built-up browser app's versioned directory.
+#
+# Each helper is adjusted by giving it the proper bundle name, renaming the
+# executable, adjusting several Info.plist keys, and changing the executable's
+# Mach-O flags.
+
+set -eu
+
+make_helper() {
+  local containing_dir="${1}"
+  local app_name="${2}"
+  local feature="${3}"
+  local flags="${4}"
+
+  local helper_name="${app_name} Helper"
+  local helper_stem="${containing_dir}/${helper_name}"
+  local original_helper="${helper_stem}.app"
+  if [[ ! -d "${original_helper}" ]]; then
+    echo "${0}: error: ${original_helper} is a required directory" >& 2
+    exit 1
+  fi
+  local original_helper_exe="${original_helper}/Contents/MacOS/${helper_name}"
+  if [[ ! -f "${original_helper_exe}" ]]; then
+    echo "${0}: error: ${original_helper_exe} is a required file" >& 2
+    exit 1
+  fi
+
+  local feature_helper="${helper_stem} ${feature}.app"
+
+  rsync -acC --delete --include '*.so' "${original_helper}/" "${feature_helper}"
+
+  local helper_feature="${helper_name} ${feature}"
+  local helper_feature_exe="${feature_helper}/Contents/MacOS/${helper_feature}"
+  mv "${feature_helper}/Contents/MacOS/${helper_name}" "${helper_feature_exe}"
+
+  local change_flags="$(dirname "${0}")/change_mach_o_flags.py"
+  "${change_flags}" ${flags} "${helper_feature_exe}"
+
+  local feature_info="${feature_helper}/Contents/Info"
+  local feature_info_plist="${feature_info}.plist"
+
+  defaults write "${feature_info}" "CFBundleDisplayName" "${helper_feature}"
+  defaults write "${feature_info}" "CFBundleExecutable" "${helper_feature}"
+
+  cfbundleid="$(defaults read "${feature_info}" "CFBundleIdentifier")"
+  feature_cfbundleid="${cfbundleid}.${feature}"
+  defaults write "${feature_info}" "CFBundleIdentifier" "${feature_cfbundleid}"
+
+  cfbundlename="$(defaults read "${feature_info}" "CFBundleName")"
+  feature_cfbundlename="${cfbundlename} ${feature}"
+  defaults write "${feature_info}" "CFBundleName" "${feature_cfbundlename}"
+
+  # As usual, defaults might have put the plist into whatever format excites
+  # it, but Info.plists get converted back to the expected XML format.
+  plutil -convert xml1 "${feature_info_plist}"
+
+  # `defaults` also changes the file permissions, so make the file
+  # world-readable again.
+  chmod a+r "${feature_info_plist}"
+}
+
+if [[ ${#} -ne 2 ]]; then
+  echo "usage: ${0} <directory_within_contents> <app_name>" >& 2
+  exit 1
+fi
+
+DIRECTORY_WITHIN_CONTENTS="${1}"
+APP_NAME="${2}"
+
+CONTENTS_DIR="${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}"
+CONTAINING_DIR="${CONTENTS_DIR}/${DIRECTORY_WITHIN_CONTENTS}"
+
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "EH" "--executable-heap"
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "NP" "--no-pie"
diff --git a/build/mac/strip_from_xcode b/build/mac/strip_from_xcode
new file mode 100755
index 0000000..c26b9fb
--- /dev/null
+++ b/build/mac/strip_from_xcode
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a handy wrapper script that figures out how to call the strip
+# utility (strip_save_dsym in this case), if it even needs to be called at all,
+# and then does it.  This script should be called by a post-link phase in
+# targets that might generate Mach-O executables, dynamic libraries, or
+# loadable bundles.
+#
+# An example "Strip If Needed" build phase placed after "Link Binary With
+# Libraries" would do:
+# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
+
+if [ "${CONFIGURATION}" != "Release" ] ; then
+  # Only strip in release mode.
+  exit 0
+fi
+
+declare -a FLAGS
+
+# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
+# Weird.
+if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
+   [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
+  # Strip everything (no special flags).  No-op.
+  true
+elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
+     [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
+  # Strip debugging symbols and local symbols
+  FLAGS[${#FLAGS[@]}]=-S
+  FLAGS[${#FLAGS[@]}]=-x
+elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
+  # Don't strip static libraries.
+  exit 0
+else
+  # Warn, but don't treat this as an error.
+  echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
+  exit 0
+fi
+
+if [ -n "${STRIPFLAGS}" ] ; then
+  # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
+  # Flags".
+  for stripflag in "${STRIPFLAGS}" ; do
+    FLAGS[${#FLAGS[@]}]="${stripflag}"
+  done
+fi
+
+if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
+  # An Xcode project can communicate a file listing symbols to saved in this
+  # environment variable by setting it as a build setting.  This isn't a
+  # standard Xcode setting.  It's used in preference to STRIPFLAGS to
+  # eliminate quoting ambiguity concerns.
+  FLAGS[${#FLAGS[@]}]=-s
+  FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
+fi
+
+exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/strip_save_dsym b/build/mac/strip_save_dsym
new file mode 100755
index 0000000..c9cf226
--- /dev/null
+++ b/build/mac/strip_save_dsym
@@ -0,0 +1,335 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
+#
+# strip_save_dsym is a wrapper around the standard strip utility.  Given an
+# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
+# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
+# Note that the .dSYM file is a "fake" in that it's not a self-contained
+# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
+# file, and therefore contains references to object files on the filesystem.
+# The generated .dSYM bundle is therefore unsuitable for debugging in the
+# absence of these .o files.
+#
+# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
+# this utility does nothing.  That allows strip_save_dsym to be run on a file
+# that has already been stripped without trashing the .dSYM.
+#
+# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
+# slow.  On the other hand, doing a file copy (which is really all that
+# dsymutil does) is comparatively fast.  Since we usually just want to strip
+# a release-mode executable but still be able to debug it, and we don't care
+# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
+# If a real dSYM is ever needed, it's still possible to create one by running
+# dsymutil and pointing it at the original Mach-O file inside the "fake"
+# bundle, provided that the object files are available.
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+
+# Returns a list of architectures contained in a Mach-O file.  The file can be
+# a universal (fat) file, in which case there will be one list element for
+# each contained architecture, or it can be a thin single-architecture Mach-O
+# file, in which case the list will contain a single element identifying the
+# architecture.  On error, returns an empty list.  Determines the architecture
+# list by calling file.
+def macho_archs(macho):
+  macho_types = ["executable",
+                 "dynamically linked shared library",
+                 "bundle"]
+  macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
+
+  file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
+                              stdout=subprocess.PIPE)
+
+  archs = []
+
+  type_line = file_cmd.stdout.readline()
+  type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
+  if type_match:
+    archs.append(type_match.group(1))
+    return [type_match.group(1)]
+  else:
+    type_match = re.match("^Mach-O universal binary with (.*) architectures$",
+                          type_line)
+    if type_match:
+      for i in range(0, int(type_match.group(1))):
+        arch_line = file_cmd.stdout.readline()
+        arch_match = re.match(
+                     "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
+                     arch_line)
+        if arch_match:
+          archs.append(arch_match.group(1))
+
+  if file_cmd.wait() != 0:
+    archs = []
+
+  if len(archs) == 0:
+    print >> sys.stderr, "No architectures in %s" % macho
+
+  return archs
+
+# Returns a dictionary mapping architectures contained in the file as returned
+# by macho_archs to the LC_UUID load command for that architecture.
+# Architectures with no LC_UUID load command are omitted from the dictionary.
+# Determines the UUID value by calling otool.
+def macho_uuids(macho):
+  uuids = {}
+
+  archs = macho_archs(macho)
+  if len(archs) == 0:
+    return uuids
+
+  for arch in archs:
+    if arch == "":
+      continue
+
+    otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
+                                  macho],
+                                 stdout=subprocess.PIPE)
+    # state 0 is when nothing UUID-related has been seen yet.  State 1 is
+    # entered after a load command begins, but it may not be an LC_UUID load
+    # command.  States 2, 3, and 4 are intermediate states while reading an
+    # LC_UUID command.  State 5 is the terminal state for a successful LC_UUID
+    # read.  State 6 is the error state.
+    state = 0
+    uuid = ""
+    for otool_line in otool_cmd.stdout:
+      if state == 0:
+        if re.match("^Load command .*$", otool_line):
+          state = 1
+      elif state == 1:
+        if re.match("^     cmd LC_UUID$", otool_line):
+          state = 2
+        else:
+          state = 0
+      elif state == 2:
+        if re.match("^ cmdsize 24$", otool_line):
+          state = 3
+        else:
+          state = 6
+      elif state == 3:
+        # The UUID display format changed in the version of otool shipping
+        # with the Xcode 3.2.2 prerelease.  The new format is traditional:
+        #    uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # and with Xcode 3.2.6, then line is indented one more space:
+        #     uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # The old format, from cctools-750 and older's otool, breaks the UUID
+        # up into a sequence of bytes:
+        #    uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
+        #         0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
+        new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
+                                  otool_line)
+        if new_uuid_match:
+          uuid = new_uuid_match.group(1)
+
+          # Skip state 4, there is no second line to read.
+          state = 5
+        else:
+          old_uuid_match = re.match("^   uuid 0x(..) 0x(..) 0x(..) 0x(..) "
+                                    "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                    otool_line)
+          if old_uuid_match:
+            state = 4
+            uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
+                   old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
+                   old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
+                   old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
+          else:
+            state = 6
+      elif state == 4:
+        old_uuid_match = re.match("^        0x(..) 0x(..) 0x(..) 0x(..) "
+                                  "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                  otool_line)
+        if old_uuid_match:
+          state = 5
+          uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
+                  old_uuid_match.group(3) + old_uuid_match.group(4) + \
+                  old_uuid_match.group(5) + old_uuid_match.group(6) + \
+                  old_uuid_match.group(7) + old_uuid_match.group(8)
+        else:
+          state = 6
+
+    if otool_cmd.wait() != 0:
+      state = 6
+
+    if state == 5:
+      uuids[arch] = uuid.upper()
+
+  if len(uuids) == 0:
+    print >> sys.stderr, "No UUIDs in %s" % macho
+
+  return uuids
+
+# Given a path to a Mach-O file and possible information from the environment,
+# determines the desired path to the .dSYM.
+def dsym_path(macho):
+  # If building a bundle, the .dSYM should be placed next to the bundle.  Use
+  # WRAPPER_NAME to make this determination.  If called from xcodebuild,
+  # WRAPPER_NAME will be set to the name of the bundle.
+  dsym = ""
+  if "WRAPPER_NAME" in os.environ:
+    if "BUILT_PRODUCTS_DIR" in os.environ:
+      dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
+                          os.environ["WRAPPER_NAME"])
+    else:
+      dsym = os.environ["WRAPPER_NAME"]
+  else:
+    dsym = macho
+
+  dsym += ".dSYM"
+
+  return dsym
+
+# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
+# architectures and UUIDs specified by the uuids map.
+def make_fake_dsym(macho, dsym):
+  uuids = macho_uuids(macho)
+  if len(uuids) == 0:
+    return False
+
+  dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
+  dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
+  try:
+    os.makedirs(dwarf_dir)
+  except OSError, (err, error_string):
+    if err != errno.EEXIST:
+      raise
+  shutil.copyfile(macho, dwarf_file)
+
+  # info_template is the same as what dsymutil would have written, with the
+  # addition of the fake_dsym key.
+  info_template = \
+'''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+	<dict>
+		<key>CFBundleDevelopmentRegion</key>
+		<string>English</string>
+		<key>CFBundleIdentifier</key>
+		<string>com.apple.xcode.dsym.%(root_name)s</string>
+		<key>CFBundleInfoDictionaryVersion</key>
+		<string>6.0</string>
+		<key>CFBundlePackageType</key>
+		<string>dSYM</string>
+		<key>CFBundleSignature</key>
+		<string>????</string>
+		<key>CFBundleShortVersionString</key>
+		<string>1.0</string>
+		<key>CFBundleVersion</key>
+		<string>1</string>
+		<key>dSYM_UUID</key>
+		<dict>
+%(uuid_dict)s		</dict>
+		<key>fake_dsym</key>
+		<true/>
+	</dict>
+</plist>
+'''
+
+  root_name = os.path.basename(dsym)[:-5]  # whatever.dSYM without .dSYM
+  uuid_dict = ""
+  for arch in sorted(uuids):
+    uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
+                 "\t\t\t<string>" + uuids[arch] + "</string>\n"
+  info_dict = {
+    "root_name": root_name,
+    "uuid_dict": uuid_dict,
+  }
+  info_contents = info_template % info_dict
+  info_file = os.path.join(dsym, "Contents", "Info.plist")
+  info_fd = open(info_file, "w")
+  info_fd.write(info_contents)
+  info_fd.close()
+
+  return True
+
+# For a Mach-O file, determines where the .dSYM bundle should be located.  If
+# the bundle does not exist or has a modification time older than the Mach-O
+# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
+# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
+# file to be identical.
+def strip_and_make_fake_dsym(macho):
+  dsym = dsym_path(macho)
+  macho_stat = os.stat(macho)
+  dsym_stat = None
+  try:
+    dsym_stat = os.stat(dsym)
+  except OSError, (err, error_string):
+    if err != errno.ENOENT:
+      raise
+
+  if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
+    # Make a .dSYM bundle
+    if not make_fake_dsym(macho, dsym):
+      return False
+
+    # Strip the Mach-O file
+    remove_dsym = True
+    try:
+      strip_cmdline = ['xcrun', 'strip'] + sys.argv[1:]
+      strip_cmd = subprocess.Popen(strip_cmdline)
+      if strip_cmd.wait() == 0:
+        remove_dsym = False
+    finally:
+      if remove_dsym:
+        shutil.rmtree(dsym)
+
+    # Update modification time on the Mach-O file and .dSYM bundle
+    now = time.time()
+    os.utime(macho, (now, now))
+    os.utime(dsym, (now, now))
+
+  return True
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  # This only supports operating on one file at a time.  Look at the arguments
+  # to strip to figure out what the source to be stripped is.  Arguments are
+  # processed in the same way that strip does, although to reduce complexity,
+  # this doesn't do all of the same checking as strip.  For example, strip
+  # has no -Z switch and would treat -Z on the command line as an error.  For
+  # the purposes this is needed for, that's fine.
+  macho = None
+  process_switches = True
+  ignore_argument = False
+  for arg in argv[1:]:
+    if ignore_argument:
+      ignore_argument = False
+      continue
+    if process_switches:
+      if arg == "-":
+        process_switches = False
+      # strip has these switches accept an argument:
+      if arg in ["-s", "-R", "-d", "-o", "-arch"]:
+        ignore_argument = True
+      if arg[0] == "-":
+        continue
+    if macho is None:
+      macho = arg
+    else:
+      print >> sys.stderr, "Too many things to strip"
+      return 1
+
+  if macho is None:
+    print >> sys.stderr, "Nothing to strip"
+    return 1
+
+  if not strip_and_make_fake_dsym(macho):
+    return 1
+
+  return 0
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/mac/tweak_info_plist.py b/build/mac/tweak_info_plist.py
new file mode 100755
index 0000000..4a6c475
--- /dev/null
+++ b/build/mac/tweak_info_plist.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+#    this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+#    we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+#    the Info.plist has changed.  So even if we updated it, it's only looking
+#    at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place.  This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+import optparse
+import os
+from os import environ as env
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.join(env['SRCROOT'], '..')
+
+
+def _GetOutput(args):
+  """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+  of the process. stderr is attached to the parent."""
+  proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _GetOutputNoError(args):
+  """Similar to _GetOutput() but ignores stderr. If there's an error launching
+  the child (like file not found), the exception will be caught and (None, 1)
+  will be returned to mimic quiet failure."""
+  try:
+    proc = subprocess.Popen(args, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+  except OSError:
+    return (None, 1)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _RemoveKeys(plist, *keys):
+  """Removes a varargs of keys from the plist."""
+  for key in keys:
+    try:
+      del plist[key]
+    except KeyError:
+      pass
+
+
+def _AddVersionKeys(plist, version=None):
+  """Adds the product version number into the plist. Returns True on success and
+  False on error. The error will be printed to stderr."""
+  if version:
+    match = re.match('\d+\.\d+\.(\d+\.\d+)$', version)
+    if not match:
+      print >>sys.stderr, 'Invalid version string specified: "%s"' % version
+      return False
+
+    full_version = match.group(0)
+    bundle_version = match.group(1)
+
+  else:
+    # Pull in the Chrome version number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+
+    (stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
+    full_version = stdout.rstrip()
+
+    (stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@BUILD@.@PATCH@'])
+    bundle_version = stdout.rstrip()
+
+    # If either of the two version commands finished with non-zero returncode,
+    # report the error up.
+    if retval1 or retval2:
+      return False
+
+  # Add public version info so "Get Info" works.
+  plist['CFBundleShortVersionString'] = full_version
+
+  # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
+  # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
+  # have been fixed in later OS release, but hasn't been tested (it's easy
+  # enough to find out with "lsregister -dump).
+  # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+  # BUILD will always be an increasing value, so BUILD_PATH gives us something
+  # unique that meetings what LS wants.
+  plist['CFBundleVersion'] = bundle_version
+
+  # Return with no error.
+  return True
+
+
+def _DoSCMKeys(plist, add_keys):
+  """Adds the SCM information, visible in about:version, to property list. If
+  |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+  scm_revision = None
+  if add_keys:
+    # Pull in the Chrome revision number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+    (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
+                                  '@LASTCHANGE@'])
+    if retval:
+      return False
+    scm_revision = stdout.rstrip()
+
+  # See if the operation failed.
+  _RemoveKeys(plist, 'SCMRevision')
+  if scm_revision != None:
+    plist['SCMRevision'] = scm_revision
+  elif add_keys:
+    print >>sys.stderr, 'Could not determine SCM revision.  This may be OK.'
+
+  return True
+
+
+def _AddBreakpadKeys(plist, branding):
+  """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |branding| argument."""
+  plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
+  plist['BreakpadProduct'] = '%s_Mac' % branding
+  plist['BreakpadProductDisplay'] = branding
+  plist['BreakpadVersion'] = plist['CFBundleShortVersionString']
+  # These are both deliberately strings and not boolean.
+  plist['BreakpadSendAndExit'] = 'YES'
+  plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+  """Removes any set Breakpad keys."""
+  _RemoveKeys(plist,
+      'BreakpadURL',
+      'BreakpadReportInterval',
+      'BreakpadProduct',
+      'BreakpadProductDisplay',
+      'BreakpadVersion',
+      'BreakpadSendAndExit',
+      'BreakpadSkipConfirm')
+
+
+def _TagSuffixes():
+  # Keep this list sorted in the order that tag suffix components are to
+  # appear in a tag value. That is to say, it should be sorted per ASCII.
+  components = ('32bit', 'full')
+  assert tuple(sorted(components)) == components
+
+  components_len = len(components)
+  combinations = 1 << components_len
+  tag_suffixes = []
+  for combination in xrange(0, combinations):
+    tag_suffix = ''
+    for component_index in xrange(0, components_len):
+      if combination & (1 << component_index):
+        tag_suffix += '-' + components[component_index]
+    tag_suffixes.append(tag_suffix)
+  return tag_suffixes
+
+
+def _AddKeystoneKeys(plist, bundle_identifier):
+  """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |bundle_identifier| argument (com.example.product)."""
+  plist['KSVersion'] = plist['CFBundleShortVersionString']
+  plist['KSProductID'] = bundle_identifier
+  plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+  _RemoveKeys(plist, 'KSChannelID')
+  for tag_suffix in _TagSuffixes():
+    if tag_suffix:
+      plist['KSChannelID' + tag_suffix] = tag_suffix
+
+
+def _RemoveKeystoneKeys(plist):
+  """Removes any set Keystone keys."""
+  _RemoveKeys(plist,
+      'KSVersion',
+      'KSProductID',
+      'KSUpdateURL')
+
+  tag_keys = []
+  for tag_suffix in _TagSuffixes():
+    tag_keys.append('KSChannelID' + tag_suffix)
+  _RemoveKeys(plist, *tag_keys)
+
+
+def Main(argv):
+  parser = optparse.OptionParser('%prog [options]')
+  parser.add_option('--breakpad', dest='use_breakpad', action='store',
+      type='int', default=False, help='Enable Breakpad [1 or 0]')
+  parser.add_option('--breakpad_uploads', dest='breakpad_uploads',
+      action='store', type='int', default=False,
+      help='Enable Breakpad\'s uploading of crash dumps [1 or 0]')
+  parser.add_option('--keystone', dest='use_keystone', action='store',
+      type='int', default=False, help='Enable Keystone [1 or 0]')
+  parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
+      default=True, help='Add SCM metadata [1 or 0]')
+  parser.add_option('--branding', dest='branding', action='store',
+      type='string', default=None, help='The branding of the binary')
+  parser.add_option('--bundle_id', dest='bundle_identifier',
+      action='store', type='string', default=None,
+      help='The bundle id of the binary')
+  parser.add_option('--version', dest='version', action='store', type='string',
+      default=None, help='The version string [major.minor.build.patch]')
+  (options, args) = parser.parse_args(argv)
+
+  if len(args) > 0:
+    print >>sys.stderr, parser.get_usage()
+    return 1
+
+  # Read the plist into its parsed format.
+  DEST_INFO_PLIST = os.path.join(env['TARGET_BUILD_DIR'], env['INFOPLIST_PATH'])
+  plist = plistlib.readPlist(DEST_INFO_PLIST)
+
+  # Insert the product version.
+  if not _AddVersionKeys(plist, version=options.version):
+    return 2
+
+  # Add Breakpad if configured to do so.
+  if options.use_breakpad:
+    if options.branding is None:
+      print >>sys.stderr, 'Use of Breakpad requires branding.'
+      return 1
+    _AddBreakpadKeys(plist, options.branding)
+    if options.breakpad_uploads:
+      plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+    else:
+      # This allows crash dumping to a file without uploading the
+      # dump, for testing purposes.  Breakpad does not recognise
+      # "none" as a special value, but this does stop crash dump
+      # uploading from happening.  We need to specify something
+      # because if "BreakpadURL" is not present, Breakpad will not
+      # register its crash handler and no crash dumping will occur.
+      plist['BreakpadURL'] = 'none'
+  else:
+    _RemoveBreakpadKeys(plist)
+
+  # Only add Keystone in Release builds.
+  if options.use_keystone and env['CONFIGURATION'] == 'Release':
+    if options.bundle_identifier is None:
+      print >>sys.stderr, 'Use of Keystone requires the bundle id.'
+      return 1
+    _AddKeystoneKeys(plist, options.bundle_identifier)
+  else:
+    _RemoveKeystoneKeys(plist)
+
+  # Adds or removes any SCM keys.
+  if not _DoSCMKeys(plist, options.add_scm_info):
+    return 3
+
+  # Now that all keys have been mutated, rewrite the file.
+  temp_info_plist = tempfile.NamedTemporaryFile()
+  plistlib.writePlist(plist, temp_info_plist.name)
+
+  # Info.plist will work perfectly well in any plist format, but traditionally
+  # applications use xml1 for this, so convert it to ensure that it's valid.
+  proc = subprocess.Popen(['plutil', '-convert', 'xml1', '-o', DEST_INFO_PLIST,
+                           temp_info_plist.name])
+  proc.wait()
+  return proc.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/mac/verify_no_objc.sh b/build/mac/verify_no_objc.sh
new file mode 100755
index 0000000..e18a5ea
--- /dev/null
+++ b/build/mac/verify_no_objc.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes sure that no __OBJC,__image_info section appears in the
+# executable file built by the Xcode target that runs the script. If such a
+# section appears, the script prints an error message and exits nonzero.
+#
+# Why is this important?
+#
+# On 10.5, there's a bug in CFBundlePreflightExecutable that causes it to
+# crash when operating in an executable that has not loaded at its default
+# address (that is, when it's a position-independent executable with the
+# MH_PIE bit set in its mach_header) and the executable has an
+# __OBJC,__image_info section. See http://crbug.com/88697.
+#
+# Chrome's main executables don't use any Objective-C at all, and don't need
+# to carry this section around. Not linking them as Objective-C when they
+# don't need it anyway saves about 4kB in the linked executable, although most
+# of that 4kB is just filled with zeroes.
+#
+# This script makes sure that nobody goofs and accidentally introduces these
+# sections into the main executables.
+
+set -eu
+
+executable="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if xcrun otool -arch i386 -o "${executable}" | grep -q '^Contents.*section$'; \
+then
+  echo "${0}: ${executable} has an __OBJC,__image_info section" 2>&1
+  exit 1
+fi
+
+if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
+  echo "${0}: otool failed" 2>&1
+  exit 1
+fi
+
+exit 0
diff --git a/build/nocompile.gypi b/build/nocompile.gypi
new file mode 100644
index 0000000..f9021ae
--- /dev/null
+++ b/build/nocompile.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_module_nc_unittests',
+#   'type': 'executable',
+#   'sources': [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ],
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+{
+  # TODO(awong): Disabled until http://crbug.com/105388 is resolved.
+  'sources/': [['exclude', '\\.nc$']],
+  'conditions': [
+    [ 'OS=="linux" and clang==0', {
+      'rules': [
+        {
+          'variables': {
+            'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py',
+            'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/'
+                               '<(RULE_INPUT_ROOT)_nc.cc'),
+           },
+          'rule_name': 'run_nocompile',
+          'extension': 'nc',
+          'inputs': [
+            '<(nocompile_driver)',
+          ],
+          'outputs': [
+            '<(nc_result_path)'
+          ],
+          'action': [
+            'python',
+            '<(nocompile_driver)',
+            '4', # number of compilers to invoke in parallel.
+            '<(RULE_INPUT_PATH)',
+            '-Wall -Werror -Wfatal-errors -I<(DEPTH)',
+            '<(nc_result_path)',
+            ],
+          'message': 'Generating no compile results for <(RULE_INPUT_PATH)',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    }, {
+      'sources/': [['exclude', '\\.nc$']]
+    }],  # 'OS=="linux" and clang=="0"'
+  ],
+}
+
diff --git a/build/precompile.cc b/build/precompile.cc
new file mode 100644
index 0000000..db1ef6d
--- /dev/null
+++ b/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/build/precompile.h b/build/precompile.h
new file mode 100644
index 0000000..20ca73c
--- /dev/null
+++ b/build/precompile.h
@@ -0,0 +1,110 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header for Chromium project on Windows, not used by
+// other build configurations. Using precompiled headers speeds the
+// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
+// GB of memory.
+//
+// Numeric comments beside includes are the number of times they were
+// included under src/chrome/browser on 2011/8/20, which was used as a
+// baseline for deciding what to include in the PCH. Includes without
+// a numeric comment are generally included at least 5 times. It may
+// be possible to tweak the speed of the build by commenting out or
+// removing some of the less frequently used headers.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+#define _USE_MATH_DEFINES
+
+// The Windows header needs to come before almost all the other
+// Windows-specific headers.
+#include <Windows.h>
+#include <dwmapi.h>
+#include <shellapi.h>
+#include <wincrypt.h>  // 4
+#include <wtypes.h>  // 2
+
+// Defines in atlbase.h cause conflicts; if we could figure out how
+// this family of headers can be included in the PCH, it might speed
+// up the build as several of them are used frequently.
+/*
+#include <atlbase.h>
+#include <atlapp.h>
+#include <atlcom.h>
+#include <atlcrack.h>  // 2
+#include <atlctrls.h>  // 2
+#include <atlmisc.h>  // 2
+#include <atlsafe.h>  // 1
+#include <atltheme.h>  // 1
+#include <atlwin.h>  // 2
+*/
+
+// Objbase.h and other files that rely on it bring in [ #define
+// interface struct ] which can cause problems in a multi-platform
+// build like Chrome's. #undef-ing it does not work as there are
+// currently 118 targets that break if we do this, so leaving out of
+// the precompiled header for now.
+//#include <commctrl.h>  // 2
+//#include <commdlg.h>  // 3
+//#include <cryptuiapi.h>  // 2
+//#include <Objbase.h>  // 2
+//#include <objidl.h>  // 1
+//#include <ole2.h>  // 1
+//#include <oleacc.h>  // 2
+//#include <oleauto.h>  // 1
+//#include <oleidl.h>  // 1
+//#include <propkey.h>  // 2
+//#include <propvarutil.h>  // 2
+//#include <pstore.h>  // 2
+//#include <shlguid.h>  // 1
+//#include <shlwapi.h>  // 1
+//#include <shobjidl.h>  // 4
+//#include <urlhist.h>  // 2
+
+// Caused other conflicts in addition to the 'interface' issue above.
+// #include <shlobj.h>
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>  // 4
+#include <math.h>
+#include <memory.h>  // 1
+#include <signal.h>
+#include <stdarg.h>  // 1
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>  // 4
+
+#include <algorithm>
+#include <bitset>  // 3
+#include <cmath>
+#include <cstddef>
+#include <cstdio>  // 3
+#include <cstdlib>  // 2
+#include <cstring>
+#include <deque>
+#include <fstream>  // 3
+#include <functional>
+#include <iomanip>  // 2
+#include <iosfwd>  // 2
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>  // 2
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
diff --git a/build/protoc.gypi b/build/protoc.gypi
new file mode 100644
index 0000000..fafdf9d
--- /dev/null
+++ b/build/protoc.gypi
@@ -0,0 +1,123 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. For Java-targets, see
+# protoc_java.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# If you need to add an EXPORT macro to a protobuf's c++ header, set the
+# 'cc_generator_options' variable with the value: 'dllexport_decl=FOO_EXPORT:'
+# e.g. 'dllexport_decl=BASE_EXPORT:'
+#
+# It is likely you also need to #include a file for the above EXPORT macro to
+# work. You can do so with the 'cc_include' variable.
+# e.g. 'base/base_export.h'
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc_wrapper': '<(DEPTH)/tools/protoc_wrapper/protoc_wrapper.py',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'cc_generator_options%': '',
+    'cc_include%': '',
+    'proto_in_dir%': '.',
+    'conditions': [
+      ['use_system_protobuf==0', {
+        'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+      }, { # use_system_protobuf==1
+        'protoc': '<!(which protoc)',
+      }],
+    ],
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc_wrapper)',
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        'python',
+        '<(protoc_wrapper)',
+        '--include',
+        '<(cc_include)',
+        '--protobuf',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+        # Using the --arg val form (instead of --arg=val) allows gyp's msvs rule
+        # generation to correct 'val' which is a path.
+        '--proto-in-dir','<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '--proto-in-file','<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--use-system-protobuf=<(use_system_protobuf)',
+        '--',
+        '<(protoc)',
+        '--cpp_out', '<(cc_generator_options)<(cc_dir)',
+        '--python_out', '<(py_dir)',
+      ],
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+      '<(DEPTH)',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/protoc_java.gypi b/build/protoc_java.gypi
new file mode 100644
index 0000000..9ed597b
--- /dev/null
+++ b/build/protoc_java.gypi
@@ -0,0 +1,82 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. This is only to be included
+# for Java targets. When including this file, a .jar-file will be generated.
+# For other targets, see protoc.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     'proto_in_dir': '.'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'output_java_files' variable specifies a list of output files that will
+# be generated. It is based on the package and java_outer_classname fields in
+# the proto. All the values must be prefixed with >(java_out_dir), since that
+# is the root directory of all the output.
+#
+# Implementation notes:
+# A target_name of foo and proto-specified 'package' java.package.path produces:
+#   <(PRODUCT_DIR)/java_proto/foo/{java/package/path/}{Foo,Bar}.java
+# where Foo and Bar are taken from 'java_outer_classname' of the protos.
+#
+# How the .jar-file is created is different than how protoc is used for other
+# targets, and as such, this lives in its own file.
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)android_protoc<(EXECUTABLE_SUFFIX)',
+    'java_out_dir': '<(PRODUCT_DIR)/java_proto/<(_target_name)/src',
+    'proto_in_dir%': '.',
+    'stamp_file': '<(java_out_dir).stamp',
+    'script': '<(DEPTH)/build/protoc_java.py',
+
+    # The rest of the variables here are for the java.gypi include.
+    'java_in_dir': '<(DEPTH)/build/android/empty',
+    'generated_src_dirs': ['<(java_out_dir)'],
+    # Adding the |stamp_file| to |additional_input_paths| makes the actions in
+    # the include of java.gypi depend on the genproto_java action.
+    'additional_input_paths': ['<(stamp_file)'],
+  },
+  'actions': [
+    {
+      'action_name': 'genproto_java',
+      'inputs': [
+        '<(script)',
+        '<(protoc)',
+        '<@(_sources)',
+      ],
+      # We do not know the names of the generated files, so we use a stamp.
+      'outputs': [
+        '<(stamp_file)',
+      ],
+      'action': [
+        '<(script)',
+        '--protoc=<(protoc)',
+        '--proto-path=<(proto_in_dir)',
+        '--java-out-dir=<(java_out_dir)',
+        '--stamp=<(stamp_file)',
+        '<@(_sources)',
+      ],
+      'message': 'Generating Java code from protobuf files in <(proto_in_dir)',
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:android_protoc#host',
+    '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:protobuf_nano_javalib',
+  ],
+  'includes': [ 'java.gypi' ],
+}
diff --git a/build/protoc_java.py b/build/protoc_java.py
new file mode 100755
index 0000000..470667c
--- /dev/null
+++ b/build/protoc_java.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobuf files.
+
+This is a helper file for the genproto_java action in protoc_java.gypi.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc (output into either --java-out-dir or
+   --srcjar).
+4. Creates a new stamp file.
+"""
+
+import os
+import optparse
+import shutil
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp"))
+from util import build_utils
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option("--protoc", help="Path to protoc binary.")
+  parser.add_option("--proto-path", help="Path to proto directory.")
+  parser.add_option("--java-out-dir",
+      help="Path to output directory for java files.")
+  parser.add_option("--srcjar", help="Path to output srcjar.")
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, args = parser.parse_args(argv)
+
+  build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
+  if not options.java_out_dir and not options.srcjar:
+    print 'One of --java-out-dir or --srcjar must be specified.'
+    return 1
+
+  with build_utils.TempDir() as temp_dir:
+    # Specify arguments to the generator.
+    generator_args = ['optional_field_style=reftypes',
+                      'store_unknown_fields=true']
+    out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir
+    # Generate Java files using protoc.
+    build_utils.CheckOutput(
+        [options.protoc, '--proto_path', options.proto_path, out_arg]
+        + args)
+
+    if options.java_out_dir:
+      build_utils.DeleteDirectory(options.java_out_dir)
+      shutil.copytree(temp_dir, options.java_out_dir)
+    else:
+      build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        args + [options.protoc] + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/release.gypi b/build/release.gypi
new file mode 100644
index 0000000..9b8b11d
--- /dev/null
+++ b/build/release.gypi
@@ -0,0 +1,29 @@
+{
+  'conditions': [
+    # Handle build types.
+    ['buildtype=="Dev"', {
+      'includes': ['internal/release_impl.gypi'],
+    }],
+    ['buildtype=="Dev" and incremental_chrome_dll==1', {
+      'msvs_settings': {
+        'VCLinkerTool': {
+          # Enable incremental linking and disable conflicting link options:
+          # http://msdn.microsoft.com/en-us/library/4khtbfyf.aspx
+          'LinkIncremental': '2',
+          'OptimizeReferences': '1',
+          'EnableCOMDATFolding': '1',
+          'Profile': 'false',
+        },
+      },
+    }],
+    ['buildtype=="Official"', {
+      'includes': ['internal/release_impl_official.gypi'],
+    }],
+    # TODO(bradnelson): may also need:
+    #     checksenabled
+    #     coverage
+    #     dom_stats
+    #     pgo_instrument
+    #     pgo_optimize
+  ],
+}
diff --git a/build/repack_action.gypi b/build/repack_action.gypi
new file mode 100644
index 0000000..04b982a
--- /dev/null
+++ b/build/repack_action.gypi
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit repack in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   pak_inputs: list: paths of pak files that need to be combined.
+#   pak_output: string: the output pak file path.
+
+{
+  # GYP version: //tools/grit/repack.gni
+  'variables': {
+    'repack_path': '<(DEPTH)/tools/grit/grit/format/repack.py',
+    'repack_options%': [],
+  },
+  'inputs': [
+    '<(repack_path)',
+    '<@(pak_inputs)',
+  ],
+  'outputs': [
+    '<(pak_output)'
+  ],
+  'action': [
+    'python',
+    '<(repack_path)',
+    '<@(repack_options)',
+    '<(pak_output)',
+    '<@(pak_inputs)',
+  ],
+}
diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed
new file mode 100644
index 0000000..b4111c7
--- /dev/null
+++ b/build/sanitize-mac-build-log.sed
@@ -0,0 +1,33 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^    setenv /d
+/^    cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line.  These deletions drop the command line.
+\|^    /Developer/usr/bin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^    .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^    /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding\.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
+s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000..df5a7af
--- /dev/null
+++ b/build/sanitize-mac-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed
new file mode 100644
index 0000000..c18e664
--- /dev/null
+++ b/build/sanitize-win-build-log.sed
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully\./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]+>//
+
+# Shorten bindings generation lines
+s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/  idl_compiler \1/
diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000..df5a7af
--- /dev/null
+++ b/build/sanitize-win-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS
new file mode 100644
index 0000000..10a3e3b
--- /dev/null
+++ b/build/sanitizers/OWNERS
@@ -0,0 +1,2 @@
+glider@chromium.org
+per-file tsan_suppressions.cc=*
diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc
new file mode 100644
index 0000000..1e92625
--- /dev/null
+++ b/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,138 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+#if defined(ADDRESS_SANITIZER) && defined(OS_MACOSX)
+#include <crt_externs.h>  // for _NSGetArgc, _NSGetArgv
+#include <string.h>
+#endif  // ADDRESS_SANITIZER && OS_MACOSX
+
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+//   strict_memcmp=1 - disable the strict memcmp() checking
+//     (http://crbug.com/178677 and http://crbug.com/178404).
+//   malloc_context_size=5 - limit the size of stack traces collected by ASan
+//     for each malloc/free by 5 frames. These stack traces tend to accumulate
+//     very fast in applications using JIT (v8 in Chrome's case), see
+//     https://code.google.com/p/address-sanitizer/issues/detail?id=177
+//   symbolize=false - disable the in-process symbolization, which isn't 100%
+//     compatible with the existing sandboxes and doesn't make much sense for
+//     stripped official binaries.
+//   legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+//     work around libGL.so using the obsolete API, see
+//     http://crbug.com/341805. This may break if pthread_cond_t objects are
+//     accessed by both instrumented and non-instrumented binaries (e.g. if
+//     they reside in shared memory). This option is going to be deprecated in
+//     upstream AddressSanitizer and must not be used anywhere except the
+//     official builds.
+//   replace_intrin=0 - do not intercept memcpy(), memmove() and memset() to
+//     work around http://crbug.com/162461 (ASan report in OpenCL on Mac).
+//   check_printf=1 - check the memory accesses to printf (and other formatted
+//     output routines) arguments.
+//   use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+//     for stack overflow detection.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports
+//     (if symbolize=true, which is set when running with LeakSanitizer).
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+    "legacy_pthread_cond=1 malloc_context_size=5 strict_memcmp=0 "
+    "symbolize=false check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+    "strip_path_prefix=Release/../../ ";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char *kAsanDefaultOptions =
+    "strict_memcmp=0 symbolize=false check_printf=1 use_sigaltstack=1 "
+    "detect_leaks=0 strip_path_prefix=Release/../../ ";
+#endif  // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+    "strict_memcmp=0 replace_intrin=0 check_printf=1 use_sigaltstack=1 "
+    "strip_path_prefix=Release/../../ ";
+static const char kNaClDefaultOptions[] = "handle_segv=0";
+static const char kNaClFlag[] = "--type=nacl-loader";
+#endif  // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX)
+extern "C"
+__attribute__((no_sanitize_address))
+__attribute__((visibility("default")))
+// The function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+const char *__asan_default_options() {
+#if defined(OS_MACOSX)
+  char*** argvp = _NSGetArgv();
+  int* argcp = _NSGetArgc();
+  if (!argvp || !argcp) return kAsanDefaultOptions;
+  char** argv = *argvp;
+  int argc = *argcp;
+  for (int i = 0; i < argc; ++i) {
+    if (strcmp(argv[i], kNaClFlag) == 0) {
+      return kNaClDefaultOptions;
+    }
+  }
+#endif
+  return kAsanDefaultOptions;
+}
+#endif  // OS_LINUX || OS_MACOSX
+#endif  // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+//   detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+//   second_deadlock_stack=1 - more verbose deadlock reports.
+//   report_signal_unsafe=0 - do not report async-signal-unsafe functions
+//     called from signal handlers.
+//   report_thread_leaks=0 - do not report unjoined threads at the end of
+//     the program execution.
+//   print_suppressions=1 - print the list of matched suppressions.
+//   history_size=7 - make the history buffer proportional to 2^7 (the maximum
+//     value) to keep more stack traces.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+    "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+    "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+    "strip_path_prefix=Release/../../ ";
+
+extern "C"
+__attribute__((no_sanitize_thread))
+__attribute__((visibility("default")))
+// The function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+const char *__tsan_default_options() {
+  return kTsanDefaultOptions;
+}
+
+extern "C" char kTSanDefaultSuppressions[];
+
+extern "C"
+__attribute__((no_sanitize_thread))
+__attribute__((visibility("default")))
+// The function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+const char *__tsan_default_suppressions() {
+  return kTSanDefaultSuppressions;
+}
+
+#endif  // THREAD_SANITIZER && OS_LINUX
diff --git a/build/sanitizers/sanitizers.gyp b/build/sanitizers/sanitizers.gyp
new file mode 100644
index 0000000..d971d6d
--- /dev/null
+++ b/build/sanitizers/sanitizers.gyp
@@ -0,0 +1,61 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sanitizer_options',
+      'type': 'static_library',
+      'toolsets': ['host', 'target'],
+      'variables': {
+         # Every target is going to depend on sanitizer_options, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+         # Do not let 'none' targets depend on this one, they don't need to.
+         'link_dependency': 1,
+       },
+      'sources': [
+        'sanitizer_options.cc',
+      ],
+      'include_dirs': [
+        '../..',
+      ],
+      # Some targets may want to opt-out from ASan, TSan and MSan and link
+      # without the corresponding runtime libraries. We drop the libc++
+      # dependency and omit the compiler flags to avoid bringing instrumented
+      # code to those targets.
+      'conditions': [
+        ['use_custom_libcxx==1', {
+          'dependencies!': [
+            '../../third_party/libc++/libc++.gyp:libcxx_proxy',
+          ],
+        }],
+        ['tsan==1', {
+          'sources': [
+            'tsan_suppressions.cc',
+          ],
+        }],
+      ],
+      'cflags/': [
+        ['exclude', '-fsanitize='],
+        ['exclude', '-fsanitize-'],
+      ],
+      'direct_dependent_settings': {
+        'ldflags': [
+          '-Wl,-u_sanitizer_options_link_helper',
+        ],
+        'target_conditions': [
+          ['_type=="executable"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-u,__sanitizer_options_link_helper',
+              ],
+            },
+          }],
+        ],
+      },
+    },
+  ],
+}
+
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
new file mode 100644
index 0000000..aa4b7c7
--- /dev/null
+++ b/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,310 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+// False positives in libflashplayer.so and libglib.so. Since we don't
+// instrument them, we cannot reason about the synchronization in them.
+"race:libflashplayer.so\n"
+"race:libglib*.so\n"
+
+// Intentional race in ToolsSanityTest.DataRace in base_unittests.
+"race:base/tools_sanity_unittest.cc\n"
+
+// Data race on WatchdogCounter [test-only].
+"race:base/threading/watchdog_unittest.cc\n"
+
+// Races in libevent, http://crbug.com/23244.
+"race:libevent/event.c\n"
+
+// http://crbug.com/46840.
+"race:base::HistogramSamples::IncreaseSum\n"
+"race:base::Histogram::Add\n"
+"race:base::HistogramSamples::Add\n"
+
+// http://crbug.com/84094.
+"race:sqlite3StatusSet\n"
+"race:pcache1EnforceMaxPage\n"
+"race:pcache1AllocPage\n"
+
+// http://crbug.com/102327.
+// Test-only race, won't fix.
+"race:tracked_objects::ThreadData::ShutdownSingleThreadedCleanup\n"
+
+// http://crbug.com/115540
+"race:*GetCurrentThreadIdentifier\n"
+
+// http://crbug.com/120808
+"race:base/threading/watchdog.cc\n"
+
+// http://crbug.com/157586
+"race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+// http://crbug.com/158718
+"race:third_party/ffmpeg/libavcodec/pthread.c\n"
+"race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+"race:third_party/ffmpeg/libavcodec/vp8.c\n"
+"race:third_party/ffmpeg/libavutil/mem.c\n"
+"race:*HashFrameForTesting\n"
+"race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+"race:media::ReleaseData\n"
+
+// http://crbug.com/158922
+"race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
+
+// http://crbug.com/189177
+"race:thread_manager\n"
+"race:v8::Locker::Initialize\n"
+
+// http://crbug.com/223352
+"race:uprv_malloc_52\n"
+"race:uprv_realloc_52\n"
+
+// http://crbug.com/239359
+"race:media::TestInputCallback::OnData\n"
+
+// http://crbug.com/244368
+"race:skia::BeginPlatformPaint\n"
+
+// http://crbug.com/244385
+"race:unixTempFileDir\n"
+
+// http://crbug.com/244755
+"race:v8::internal::Zone::NewExpand\n"
+"race:TooLateToEnableNow\n"
+"race:adjust_segment_bytes_allocated\n"
+
+// http://crbug.com/244774
+"race:webrtc::RTPReceiver::ProcessBitrate\n"
+"race:webrtc::RTPSender::ProcessBitrate\n"
+"race:webrtc::VideoCodingModuleImpl::Decode\n"
+"race:webrtc::RTPSender::SendOutgoingData\n"
+"race:webrtc::VP8EncoderImpl::GetEncodedPartitions\n"
+"race:webrtc::VP8EncoderImpl::Encode\n"
+"race:webrtc::ViEEncoder::DeliverFrame\n"
+"race:webrtc::vcm::VideoReceiver::Decode\n"
+"race:webrtc::VCMReceiver::FrameForDecoding\n"
+"race:*trace_event_unique_catstatic*\n"
+
+// http://crbug.com/244856
+"race:AutoPulseLock\n"
+
+// http://crbug.com/246968
+"race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+// http://crbug.com/246970
+"race:webrtc::EventPosix::StartTimer\n"
+
+// http://crbug.com/246974
+"race:content::GpuWatchdogThread::CheckArmed\n"
+
+// http://crbug.com/257396
+"race:base::debug::TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
+
+// http://crbug.com/258479
+"race:SamplingStateScope\n"
+"race:g_trace_state\n"
+
+// http://crbug.com/258499
+"race:third_party/skia/include/core/SkRefCnt.h\n"
+
+// http://crbug.com/268924
+"race:base::g_power_monitor\n"
+"race:base::PowerMonitor::PowerMonitor\n"
+"race:base::PowerMonitor::AddObserver\n"
+"race:base::PowerMonitor::RemoveObserver\n"
+"race:base::PowerMonitor::IsOnBatteryPower\n"
+
+// http://crbug.com/268941
+"race:tracked_objects::ThreadData::tls_index_\n"
+
+// http://crbug.com/270037
+"race:gLibCleanupFunctions\n"
+
+// http://crbug.com/272095
+"race:base::g_top_manager\n"
+
+// http://crbug.com/272987
+"race:webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>::set_enabled\n"
+
+// http://crbug.com/273047
+"race:base::*::g_lazy_tls_ptr\n"
+"race:IPC::SyncChannel::ReceivedSyncMsgQueue::lazy_tls_ptr_\n"
+
+// http://crbug.com/280466
+"race:content::WebRtcAudioCapturer::SetCapturerSource\n"
+
+// http://crbug.com/285242
+"race:media::PulseAudioOutputStream::SetVolume\n"
+
+// http://crbug.com/290964
+"race:PostponeInterruptsScope\n"
+"race:v8::internal::StackGuard::RequestInstallCode\n"
+
+// http://crbug.com/296883
+"race:net::URLFetcherCore::Stop\n"
+
+// http://crbug.com/308590
+"race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+// http://crbug.com/310851
+"race:net::ProxyResolverV8Tracing::Job::~Job\n"
+
+// http://crbug.com/313726
+"race:CallbackWasCalled\n"
+
+// http://crbug.com/327330
+"race:PrepareTextureMailbox\n"
+"race:cc::LayerTreeHost::PaintLayerContents\n"
+
+// http://crbug.com/328804
+"race:v8::internal::Heap::SetStackLimits\n"
+"race:ScavengePointer\n"
+
+// http://crbug.com/328826
+"race:gLCDOrder\n"
+"race:gLCDOrientation\n"
+
+// http://crbug.com/328868
+"race:PR_Lock\n"
+
+// http://crbug.com/329225
+"race:blink::currentTimeFunction\n"
+
+// http://crbug.com/329460
+"race:extensions::InfoMap::AddExtension\n"
+
+// http://crbug.com/333244
+"race:content::"
+    "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+// http://crbug.com/333871
+"race:v8::internal::Interface::NewValue()::value_interface\n"
+"race:v8::internal::IsMinusZero(double)::minus_zero\n"
+"race:v8::internal::FastCloneShallowObjectStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedLoadStubCompiler::registers\n"
+"race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
+"race:v8::internal::KeyedLoadFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedStoreFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::LoadStubCompiler::registers\n"
+"race:v8::internal::StoreStubCompiler::registers\n"
+"race:v8::internal::HValue::LoopWeight\n"
+
+// http://crbug.com/334140
+"race:CommandLine::HasSwitch\n"
+"race:CommandLine::current_process_commandline_\n"
+"race:CommandLine::GetSwitchValueASCII\n"
+
+// http://crbug.com/338675
+"race:blink::s_platform\n"
+"race:content::"
+    "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
+
+// http://crbug.com/345240
+"race:WTF::s_shutdown\n"
+
+// http://crbug.com/345245
+"race:jingle_glue::JingleThreadWrapper::~JingleThreadWrapper\n"
+"race:webrtc::voe::Channel::UpdatePacketDelay\n"
+"race:webrtc::voe::Channel::GetDelayEstimate\n"
+"race:webrtc::VCMCodecDataBase::DeregisterReceiveCodec\n"
+"race:webrtc::GainControlImpl::set_stream_analog_level\n"
+
+// http://crbug.com/345618
+"race:WebCore::AudioDestinationNode::render\n"
+
+// http://crbug.com/345624
+"race:media::DataSource::set_host\n"
+
+// http://crbug.com/347534
+"race:v8::internal::V8::TearDown\n"
+
+// http://crbug.com/347538
+"race:sctp_timer_start\n"
+
+// http://crbug.com/347548
+"race:cricket::WebRtcVideoMediaChannel::MaybeResetVieSendCodec\n"
+"race:cricket::WebRtcVideoMediaChannel::SetSendCodec\n"
+
+// http://crbug.com/347553
+"race:blink::WebString::reset\n"
+
+// http://crbug.com/348511
+"race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+// http://crbug.com/348982
+"race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+"race:cricket::P2PTransportChannel::AddConnection\n"
+
+// http://crbug.com/348984
+"race:sctp_express_handle_sack\n"
+"race:system_base_info\n"
+
+// http://crbug.com/363999
+"race:v8::internal::EnterDebugger::*EnterDebugger\n"
+
+// http://crbug.com/364006
+"race:gfx::ImageFamily::~ImageFamily\n"
+
+// http://crbug.com/364014
+"race:WTF::Latin1Encoding()::globalLatin1Encoding\n"
+
+// https://code.google.com/p/v8/issues/detail?id=3143
+"race:v8::internal::FLAG_track_double_fields\n"
+
+// https://crbug.com/369257
+// TODO(mtklein): annotate properly and remove suppressions.
+"race:SandboxIPCHandler::HandleFontMatchRequest\n"
+"race:SkFontConfigInterfaceDirect::matchFamilyName\n"
+"race:SkFontConfigInterface::GetSingletonDirectInterface\n"
+"race:FcStrStaticName\n"
+
+// http://crbug.com/372807
+"deadlock:net::X509Certificate::CreateCertificateListFromBytes\n"
+"deadlock:net::X509Certificate::CreateFromBytes\n"
+"deadlock:net::SSLClientSocketNSS::Core::DoHandshakeLoop\n"
+
+// http://crbug.com/374135
+"race:media::AlsaWrapper::PcmWritei\n"
+
+// False positive in libc's tzset_internal, http://crbug.com/379738.
+"race:tzset_internal\n"
+
+// http://crbug.com/380554
+"deadlock:g_type_add_interface_static\n"
+
+// http:://crbug.com/386385
+"race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+// http://crbug.com/388730
+"race:g_next_user_script_id\n"
+
+// http://crbug.com/389098
+"race:webrtc::RtpToNtpMs\n"
+"race:webrtc::UpdateRtcpList\n"
+"race:webrtc::RemoteNtpTimeEstimator::Estimate\n"
+"race:webrtc::voe::TransmitMixer::EnableStereoChannelSwapping\n"
+
+// http://crbug.com/397022
+"deadlock:"
+"base::debug::TraceEventTestFixture_ThreadOnceBlocking_Test::TestBody\n"
+
+// http://crbug.com/415472
+"deadlock:base::debug::TraceLog::GetCategoryGroupEnabled\n"
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // THREAD_SANITIZER
diff --git a/build/secondary/testing/BUILD.gn b/build/secondary/testing/BUILD.gn
new file mode 100644
index 0000000..2cafa68
--- /dev/null
+++ b/build/secondary/testing/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+source_set("gmock_mutant") {
+  sources = [
+    "gmock_mutant.h",  # gMock helpers
+  ]
+
+  deps = [ "//base" ]
+}
diff --git a/build/secondary/testing/gmock/BUILD.gn b/build/secondary/testing/gmock/BUILD.gn
new file mode 100644
index 0000000..a0dbad7
--- /dev/null
+++ b/build/secondary/testing/gmock/BUILD.gn
@@ -0,0 +1,49 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("gmock_config") {
+  # Gmock headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+}
+
+static_library("gmock") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    # Sources based on files in r173 of gmock.
+    "include/gmock/gmock-actions.h",
+    "include/gmock/gmock-cardinalities.h",
+    "include/gmock/gmock-generated-actions.h",
+    "include/gmock/gmock-generated-function-mockers.h",
+    "include/gmock/gmock-generated-matchers.h",
+    "include/gmock/gmock-generated-nice-strict.h",
+    "include/gmock/gmock-matchers.h",
+    "include/gmock/gmock-spec-builders.h",
+    "include/gmock/gmock.h",
+    "include/gmock/internal/gmock-generated-internal-utils.h",
+    "include/gmock/internal/gmock-internal-utils.h",
+    "include/gmock/internal/gmock-port.h",
+    #"src/gmock-all.cc",  # Not needed by our build.
+    "src/gmock-cardinalities.cc",
+    "src/gmock-internal-utils.cc",
+    "src/gmock-matchers.cc",
+    "src/gmock-spec-builders.cc",
+    "src/gmock.cc",
+  ]
+
+  # This project includes some stuff form gtest's guts.
+  include_dirs = [ "../gtest/include" ]
+
+  public_configs = [
+    ":gmock_config",
+    "//testing/gtest:gtest_config",
+  ]
+}
+
+static_library("gmock_main") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [ "src/gmock_main.cc" ]
+  deps = [ ":gmock" ]
+}
diff --git a/build/secondary/testing/gtest/BUILD.gn b/build/secondary/testing/gtest/BUILD.gn
new file mode 100644
index 0000000..f50afb6
--- /dev/null
+++ b/build/secondary/testing/gtest/BUILD.gn
@@ -0,0 +1,120 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("gtest_config") {
+  visibility = [
+    ":*",
+    "//testing/gmock:*",  # gmock also shares this config.
+  ]
+
+  defines = [
+
+    # In order to allow regex matches in gtest to be shared between Windows
+    # and other systems, we tell gtest to always use it's internal engine.
+    "GTEST_HAS_POSIX_RE=0",
+    # Chrome doesn't support / require C++11, yet.
+    "GTEST_LANG_CXX11=0",
+  ]
+
+  # Gtest headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+
+  if (is_win) {
+    cflags = [ "/wd4800" ]  # Unused variable warning.
+  }
+
+  if (is_posix) {
+    defines += [
+      # gtest isn't able to figure out when RTTI is disabled for gcc
+      # versions older than 4.3.2, and assumes it's enabled.  Our Mac
+      # and Linux builds disable RTTI, and cannot guarantee that the
+      # compiler will be 4.3.2. or newer.  The Mac, for example, uses
+      # 4.2.1 as that is the latest available on that platform.  gtest
+      # must be instructed that RTTI is disabled here, and for any
+      # direct dependents that might include gtest headers.
+      "GTEST_HAS_RTTI=0",
+    ]
+  }
+
+  if (is_android) {
+    defines += [
+      # We want gtest features that use tr1::tuple, but we currently
+      # don't support the variadic templates used by libstdc++'s
+      # implementation. gtest supports this scenario by providing its
+      # own implementation but we must opt in to it.
+      "GTEST_USE_OWN_TR1_TUPLE=1",
+
+      # GTEST_USE_OWN_TR1_TUPLE only works if GTEST_HAS_TR1_TUPLE is set.
+      # gtest r625 made it so that GTEST_HAS_TR1_TUPLE is set to 0
+      # automatically on android, so it has to be set explicitly here.
+      "GTEST_HAS_TR1_TUPLE=1",
+    ]
+  }
+}
+
+config("gtest_direct_config") {
+  visibility = [ ":*" ]
+  defines = [ "UNIT_TEST" ]
+}
+
+static_library("gtest") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "include/gtest/gtest-death-test.h",
+    "include/gtest/gtest-message.h",
+    "include/gtest/gtest-param-test.h",
+    "include/gtest/gtest-printers.h",
+    "include/gtest/gtest-spi.h",
+    "include/gtest/gtest-test-part.h",
+    "include/gtest/gtest-typed-test.h",
+    "include/gtest/gtest.h",
+    "include/gtest/gtest_pred_impl.h",
+    "include/gtest/internal/gtest-death-test-internal.h",
+    "include/gtest/internal/gtest-filepath.h",
+    "include/gtest/internal/gtest-internal.h",
+    "include/gtest/internal/gtest-linked_ptr.h",
+    "include/gtest/internal/gtest-param-util-generated.h",
+    "include/gtest/internal/gtest-param-util.h",
+    "include/gtest/internal/gtest-port.h",
+    "include/gtest/internal/gtest-string.h",
+    "include/gtest/internal/gtest-tuple.h",
+    "include/gtest/internal/gtest-type-util.h",
+    #"gtest/src/gtest-all.cc",  # Not needed by our build.
+    "src/gtest-death-test.cc",
+    "src/gtest-filepath.cc",
+    "src/gtest-internal-inl.h",
+    "src/gtest-port.cc",
+    "src/gtest-printers.cc",
+    "src/gtest-test-part.cc",
+    "src/gtest-typed-test.cc",
+    "src/gtest.cc",
+    "../multiprocess_func_list.cc",
+    "../multiprocess_func_list.h",
+    "../platform_test.h",
+  ]
+
+  if (is_mac) {
+    sources += [
+      "../gtest_mac.h",
+      "../gtest_mac.mm",
+      "../platform_test_mac.mm",
+    ]
+  }
+
+  include_dirs = [ "." ]
+
+  all_dependent_configs = [ ":gtest_config" ]
+  public_configs = [ ":gtest_direct_config" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+}
+
+source_set("gtest_main") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [ "src/gtest_main.cc" ]
+  deps = [ ":gtest" ]
+}
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
new file mode 100644
index 0000000..14e6c07
--- /dev/null
+++ b/build/secondary/third_party/android_tools/BUILD.gn
@@ -0,0 +1,63 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+config("cpu_features_include") {
+  include_dirs = [ "ndk/sources/android/cpufeatures" ]
+}
+
+# This is the GN version of
+# //build/android/cpufeatures.gypi:cpufeatures
+source_set("cpu_features") {
+  sources = [ "ndk/sources/android/cpufeatures/cpu-features.c" ]
+  public_configs = [ ":cpu_features_include" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+}
+
+android_java_prebuilt("android_gcm_java") {
+  jar_path = "$android_sdk_root/extras/google/gcm/gcm-client/dist/gcm.jar"
+}
+
+android_java_prebuilt("uiautomator_java") {
+  jar_path = "$android_sdk/uiautomator.jar"
+}
+
+android_java_prebuilt("android_support_v13_java") {
+  jar_path = "$android_sdk_root/extras/android/support/v13/android-support-v13.jar"
+}
+
+android_resources("android_support_v7_appcompat_resources") {
+  v14_verify_only = true
+  resource_dirs = [
+    "$android_sdk_root/extras/android/support/v7/appcompat/res"
+  ]
+  custom_package = "android.support.v7.appcompat"
+}
+
+android_java_prebuilt("android_support_v7_appcompat_java") {
+  deps = [ ":android_support_v7_appcompat_resources" ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/appcompat/libs/android-support-v7-appcompat.jar"
+}
+
+android_resources("android_support_v7_mediarouter_resources") {
+  v14_verify_only = true
+  resource_dirs = [
+    "$android_sdk_root/extras/android/support/v7/mediarouter/res"
+  ]
+  deps = [
+    ":android_support_v7_appcompat_resources",
+  ]
+  custom_package = "android.support.v7.mediarouter"
+}
+
+android_java_prebuilt("android_support_v7_mediarouter_java") {
+  deps = [
+    ":android_support_v7_mediarouter_resources",
+    ":android_support_v7_appcompat_java",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/mediarouter/libs/android-support-v7-mediarouter.jar"
+}
diff --git a/build/secondary/third_party/cacheinvalidation/BUILD.gn b/build/secondary/third_party/cacheinvalidation/BUILD.gn
new file mode 100644
index 0000000..53d8472
--- /dev/null
+++ b/build/secondary/third_party/cacheinvalidation/BUILD.gn
@@ -0,0 +1,144 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("cacheinvalidation_config") {
+  include_dirs = [
+    "overrides",
+    "src",
+  ]
+}
+
+static_library("cacheinvalidation") {
+  sources = [
+    "overrides/google/cacheinvalidation/deps/callback.h",
+    "overrides/google/cacheinvalidation/deps/gmock.h",
+    "overrides/google/cacheinvalidation/deps/googletest.h",
+    "overrides/google/cacheinvalidation/deps/logging.h",
+    "overrides/google/cacheinvalidation/deps/mutex.h",
+    "overrides/google/cacheinvalidation/deps/random.h",
+    "overrides/google/cacheinvalidation/deps/random.cc",
+    "overrides/google/cacheinvalidation/deps/sha1-digest-function.h",
+    "overrides/google/cacheinvalidation/deps/scoped_ptr.h",
+    "overrides/google/cacheinvalidation/deps/stl-namespace.h",
+    "overrides/google/cacheinvalidation/deps/string_util.h",
+    "overrides/google/cacheinvalidation/deps/time.h",
+    "src/google/cacheinvalidation/deps/digest-function.h",
+    "src/google/cacheinvalidation/impl/basic-system-resources.cc",
+    "src/google/cacheinvalidation/impl/basic-system-resources.h",
+    "src/google/cacheinvalidation/impl/checking-invalidation-listener.cc",
+    "src/google/cacheinvalidation/impl/checking-invalidation-listener.h",
+    "src/google/cacheinvalidation/impl/client-protocol-namespace-fix.h",
+    "src/google/cacheinvalidation/impl/constants.cc",
+    "src/google/cacheinvalidation/impl/constants.h",
+    "src/google/cacheinvalidation/impl/digest-store.h",
+    "src/google/cacheinvalidation/impl/exponential-backoff-delay-generator.cc",
+    "src/google/cacheinvalidation/impl/exponential-backoff-delay-generator.h",
+    "src/google/cacheinvalidation/impl/invalidation-client-core.cc",
+    "src/google/cacheinvalidation/impl/invalidation-client-core.h",
+    "src/google/cacheinvalidation/impl/invalidation-client-factory.cc",
+    "src/google/cacheinvalidation/impl/invalidation-client-impl.cc",
+    "src/google/cacheinvalidation/impl/invalidation-client-impl.h",
+    "src/google/cacheinvalidation/impl/invalidation-client-util.h",
+    "src/google/cacheinvalidation/impl/log-macro.h",
+    "src/google/cacheinvalidation/impl/object-id-digest-utils.cc",
+    "src/google/cacheinvalidation/impl/object-id-digest-utils.h",
+    "src/google/cacheinvalidation/impl/persistence-utils.cc",
+    "src/google/cacheinvalidation/impl/persistence-utils.h",
+    "src/google/cacheinvalidation/impl/proto-converter.cc",
+    "src/google/cacheinvalidation/impl/proto-converter.h",
+    "src/google/cacheinvalidation/impl/proto-helpers.h",
+    "src/google/cacheinvalidation/impl/proto-helpers.cc",
+    "src/google/cacheinvalidation/impl/protocol-handler.cc",
+    "src/google/cacheinvalidation/impl/protocol-handler.h",
+    "src/google/cacheinvalidation/impl/recurring-task.cc",
+    "src/google/cacheinvalidation/impl/recurring-task.h",
+    "src/google/cacheinvalidation/impl/registration-manager.cc",
+    "src/google/cacheinvalidation/impl/registration-manager.h",
+    "src/google/cacheinvalidation/impl/repeated-field-namespace-fix.h",
+    "src/google/cacheinvalidation/impl/run-state.h",
+    "src/google/cacheinvalidation/impl/safe-storage.cc",
+    "src/google/cacheinvalidation/impl/safe-storage.h",
+    "src/google/cacheinvalidation/impl/simple-registration-store.cc",
+    "src/google/cacheinvalidation/impl/simple-registration-store.h",
+    "src/google/cacheinvalidation/impl/smearer.h",
+    "src/google/cacheinvalidation/impl/statistics.cc",
+    "src/google/cacheinvalidation/impl/statistics.h",
+    "src/google/cacheinvalidation/impl/throttle.cc",
+    "src/google/cacheinvalidation/impl/throttle.h",
+    "src/google/cacheinvalidation/impl/ticl-message-validator.cc",
+    "src/google/cacheinvalidation/impl/ticl-message-validator.h",
+    "src/google/cacheinvalidation/include/invalidation-client.h",
+    "src/google/cacheinvalidation/include/invalidation-client-factory.h",
+    "src/google/cacheinvalidation/include/invalidation-listener.h",
+    "src/google/cacheinvalidation/include/system-resources.h",
+    "src/google/cacheinvalidation/include/types.h",
+  ]
+
+  public_configs = [ ":cacheinvalidation_config" ]
+
+  deps = [
+    "src/google/cacheinvalidation:cacheinvalidation_proto_cpp",
+    "//base",
+  ]
+
+  if (is_win) {
+    # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
+    cflags = [ "/wd4267" ]
+  }
+}
+
+test("cacheinvalidation_unittests") {
+  sources = [
+    "src/google/cacheinvalidation/test/deterministic-scheduler.cc",
+    "src/google/cacheinvalidation/test/deterministic-scheduler.h",
+    "src/google/cacheinvalidation/test/test-logger.cc",
+    "src/google/cacheinvalidation/test/test-logger.h",
+    "src/google/cacheinvalidation/test/test-utils.cc",
+    "src/google/cacheinvalidation/test/test-utils.h",
+    "src/google/cacheinvalidation/impl/invalidation-client-impl_test.cc",
+    "src/google/cacheinvalidation/impl/protocol-handler_test.cc",
+    "src/google/cacheinvalidation/impl/recurring-task_test.cc",
+    "src/google/cacheinvalidation/impl/throttle_test.cc",
+  ]
+
+  deps = [
+    ":cacheinvalidation",
+    "src/google/cacheinvalidation:cacheinvalidation_proto_cpp",
+    "//base",
+    "//base/test:run_all_unittests",
+    "//testing/gmock",
+    "//testing/gtest",
+  ]
+}
+
+# TODO(GYP) Test isolation stuff.
+if (is_android) {
+  import("//build/config/android/rules.gni")
+
+  # GYP: //third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_proto_java
+  proto_java_library("cacheinvalidation_proto_java") {
+    proto_path = "src/proto"
+    sources = [
+      "$proto_path/android_channel.proto",
+      "$proto_path/android_listener.proto",
+      "$proto_path/android_service.proto",
+      "$proto_path/channel_common.proto",
+      "$proto_path/client.proto",
+      "$proto_path/client_protocol.proto",
+      "$proto_path/java_client.proto",
+      "$proto_path/types.proto",
+    ]
+  }
+
+  # GYP: //third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib
+  android_library("cacheinvalidation_javalib") {
+    deps = [
+      ":cacheinvalidation_proto_java",
+      "//third_party/android_protobuf:protobuf_nano_javalib",
+      "//third_party/android_tools:android_gcm_java",
+    ]
+
+    DEPRECATED_java_in_dir = "src/java"
+  }
+}
diff --git a/build/secondary/third_party/cacheinvalidation/src/google/cacheinvalidation/BUILD.gn b/build/secondary/third_party/cacheinvalidation/src/google/cacheinvalidation/BUILD.gn
new file mode 100644
index 0000000..2dc1b99
--- /dev/null
+++ b/build/secondary/third_party/cacheinvalidation/src/google/cacheinvalidation/BUILD.gn
@@ -0,0 +1,28 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/protobuf/proto_library.gni")
+
+proto_library("cacheinvalidation_proto_cpp") {
+  # Depend on cacheinvalidation instead.
+  visibility = [ "//third_party/cacheinvalidation/*" ]
+
+  sources = [
+    "client.proto",
+    "client_gateway.proto",
+    "client_protocol.proto",
+    "client_test_internal.proto",
+    "types.proto",
+  ]
+
+  if (!is_android) {
+    sources += [
+      "android_channel.proto",
+      "channel_common.proto",
+    ]
+  }
+
+  proto_out_dir = "google/cacheinvalidation"
+}
+
diff --git a/build/secondary/third_party/flac/BUILD.gn b/build/secondary/third_party/flac/BUILD.gn
new file mode 100644
index 0000000..8a498bb
--- /dev/null
+++ b/build/secondary/third_party/flac/BUILD.gn
@@ -0,0 +1,76 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("flac_config") {
+  defines = [ "FLAC__NO_DLL" ]
+}
+
+static_library("flac") {
+  sources = [
+    "include/FLAC/all.h",
+    "include/FLAC/assert.h",
+    "include/FLAC/callback.h",
+    "include/FLAC/export.h",
+    "include/FLAC/format.h",
+    "include/FLAC/metadata.h",
+    "include/FLAC/ordinals.h",
+    "include/FLAC/stream_decoder.h",
+    "include/FLAC/stream_encoder.h",
+    "include/share/alloc.h",
+    "src/libFLAC/alloc.c",
+    "src/libFLAC/bitmath.c",
+    "src/libFLAC/bitreader.c",
+    "src/libFLAC/bitwriter.c",
+    "src/libFLAC/cpu.c",
+    "src/libFLAC/crc.c",
+    "src/libFLAC/fixed.c",
+    "src/libFLAC/float.c",
+    "src/libFLAC/format.c",
+    "src/libFLAC/lpc.c",
+    "src/libFLAC/md5.c",
+    "src/libFLAC/memory.c",
+    "src/libFLAC/stream_decoder.c",
+    "src/libFLAC/stream_encoder.c",
+    "src/libFLAC/stream_encoder_framing.c",
+    "src/libFLAC/window.c",
+    "src/libFLAC/include/private/all.h",
+    "src/libFLAC/include/private/bitmath.h",
+    "src/libFLAC/include/private/bitreader.h",
+    "src/libFLAC/include/private/bitwriter.h",
+    "src/libFLAC/include/private/cpu.h",
+    "src/libFLAC/include/private/crc.h",
+    "src/libFLAC/include/private/fixed.h",
+    "src/libFLAC/include/private/float.h",
+    "src/libFLAC/include/private/format.h",
+    "src/libFLAC/include/private/lpc.h",
+    "src/libFLAC/include/private/md5.h",
+    "src/libFLAC/include/private/memory.h",
+    "src/libFLAC/include/private/metadata.h",
+    "src/libFLAC/include/private/stream_encoder_framing.h",
+    "src/libFLAC/include/private/window.h",
+    "src/libFLAC/include/protected/all.h",
+    "src/libFLAC/include/protected/stream_decoder.h",
+    "src/libFLAC/include/protected/stream_encoder.h",
+  ]
+
+  defines = [
+    "FLAC__OVERFLOW_DETECT",
+    "VERSION=\"1.2.1\"",
+  ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+  public_configs = [ ":flac_config" ]
+
+  include_dirs = [
+    "include",
+    "src/libFLAC/include",
+  ]
+
+  if (is_clang) {
+    # libflac converts between FLAC__StreamDecoderState and
+    # FLAC__StreamDecoderInitStatus a lot in stream_decoder.c.
+    cflags = [ "-Wno-conversion" ]
+  }
+}
diff --git a/build/secondary/third_party/freetype/BUILD.gn b/build/secondary/third_party/freetype/BUILD.gn
new file mode 100644
index 0000000..2b96239
--- /dev/null
+++ b/build/secondary/third_party/freetype/BUILD.gn
@@ -0,0 +1,52 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_android, "This library is only used on Android")
+
+config("freetype_config") {
+  include_dirs = [ "include" ]
+}
+
+source_set("freetype") {
+  sources = [
+    # The following files are not sorted alphabetically, but in the
+    # same order as in Android.mk to ease maintenance.
+    "src/base/ftbbox.c",
+    "src/base/ftbitmap.c",
+    "src/base/ftfstype.c",
+    "src/base/ftglyph.c",
+    "src/base/ftlcdfil.c",
+    "src/base/ftstroke.c",
+    "src/base/fttype1.c",
+    "src/base/ftxf86.c",
+    "src/base/ftbase.c",
+    "src/base/ftsystem.c",
+    "src/base/ftinit.c",
+    "src/base/ftgasp.c",
+    "src/raster/raster.c",
+    "src/sfnt/sfnt.c",
+    "src/smooth/smooth.c",
+    "src/autofit/autofit.c",
+    "src/truetype/truetype.c",
+    "src/cff/cff.c",
+    "src/psnames/psnames.c",
+    "src/pshinter/pshinter.c",
+  ]
+
+  defines = [
+    "FT2_BUILD_LIBRARY",
+    "DARWIN_NO_CARBON",
+  ]
+
+  include_dirs = [
+    "build",
+  ]
+
+  public_configs = [ ":freetype_config" ]
+
+  deps = [
+    "//third_party/libpng",
+    "//third_party/zlib",
+  ]
+}
diff --git a/build/secondary/third_party/icu/BUILD.gn b/build/secondary/third_party/icu/BUILD.gn
new file mode 100644
index 0000000..865a5e4
--- /dev/null
+++ b/build/secondary/third_party/icu/BUILD.gn
@@ -0,0 +1,511 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/icu/config.gni")
+
+# Meta target that includes both icuuc and icui18n. Most targets want both.
+# You can depend on the individually if you need to.
+group("icu") {
+  deps = [
+    ":icui18n",
+    ":icuuc",
+  ]
+}
+
+# Shared config used by ICU and all dependents.
+config("icu_config") {
+  defines = [
+    "U_USING_ICU_NAMESPACE=0",
+    "U_ENABLE_DYLOAD=0",
+  ]
+
+  if (component_mode != "shared_library") {
+    defines += [ "U_STATIC_IMPLEMENTATION" ]
+  }
+
+  include_dirs = [
+    "source/common",
+    "source/i18n",
+  ]
+}
+
+# Config used only by ICU code.
+config("icu_code") {
+  cflags = []
+  if (is_win) {
+    # Disable some compiler warnings.
+    cflags += [
+      "/wd4005",  # Macro redefinition.
+      "/wd4068",  # Unknown pragmas.
+      "/wd4267",  # Conversion from size_t on 64-bits.
+      "/wd4996",  # Deprecated functions.
+    ]
+  } else if (is_linux) {
+    cflags += [
+      # Since ICU wants to internally use its own deprecated APIs, don't
+      # complain about it.
+      "-Wno-deprecated-declarations",
+      "-Wno-unused-function",
+    ]
+  }
+  if (is_clang) {
+    cflags += [
+      "-Wno-deprecated-declarations",
+      "-Wno-logical-op-parentheses",
+      "-Wno-tautological-compare",
+      "-Wno-switch",
+    ]
+  }
+}
+
+component("icui18n") {
+  sources = [
+    "source/i18n/alphaindex.cpp",
+    "source/i18n/anytrans.cpp",
+    "source/i18n/astro.cpp",
+    "source/i18n/basictz.cpp",
+    "source/i18n/bocsu.cpp",
+    "source/i18n/brktrans.cpp",
+    "source/i18n/buddhcal.cpp",
+    "source/i18n/calendar.cpp",
+    "source/i18n/casetrn.cpp",
+    "source/i18n/cecal.cpp",
+    "source/i18n/chnsecal.cpp",
+    "source/i18n/choicfmt.cpp",
+    "source/i18n/coleitr.cpp",
+    "source/i18n/coll.cpp",
+    "source/i18n/compactdecimalformat.cpp",
+    "source/i18n/coptccal.cpp",
+    "source/i18n/cpdtrans.cpp",
+    "source/i18n/csdetect.cpp",
+    "source/i18n/csmatch.cpp",
+    "source/i18n/csr2022.cpp",
+    "source/i18n/csrecog.cpp",
+    "source/i18n/csrmbcs.cpp",
+    "source/i18n/csrsbcs.cpp",
+    "source/i18n/csrucode.cpp",
+    "source/i18n/csrutf8.cpp",
+    "source/i18n/curramt.cpp",
+    "source/i18n/currfmt.cpp",
+    "source/i18n/currpinf.cpp",
+    "source/i18n/currunit.cpp",
+    "source/i18n/dangical.cpp",
+    "source/i18n/datefmt.cpp",
+    "source/i18n/dcfmtsym.cpp",
+    "source/i18n/decContext.c",
+    "source/i18n/decfmtst.cpp",
+    "source/i18n/decimfmt.cpp",
+    "source/i18n/decNumber.c",
+    "source/i18n/digitlst.cpp",
+    "source/i18n/dtfmtsym.cpp",
+    "source/i18n/dtitvfmt.cpp",
+    "source/i18n/dtitvinf.cpp",
+    "source/i18n/dtptngen.cpp",
+    "source/i18n/dtrule.cpp",
+    "source/i18n/esctrn.cpp",
+    "source/i18n/ethpccal.cpp",
+    "source/i18n/fmtable_cnv.cpp",
+    "source/i18n/fmtable.cpp",
+    "source/i18n/format.cpp",
+    "source/i18n/fphdlimp.cpp",
+    "source/i18n/fpositer.cpp",
+    "source/i18n/funcrepl.cpp",
+    "source/i18n/gender.cpp",
+    "source/i18n/gregocal.cpp",
+    "source/i18n/gregoimp.cpp",
+    "source/i18n/hebrwcal.cpp",
+    "source/i18n/identifier_info.cpp",
+    "source/i18n/indiancal.cpp",
+    "source/i18n/inputext.cpp",
+    "source/i18n/islamcal.cpp",
+    "source/i18n/japancal.cpp",
+    "source/i18n/locdspnm.cpp",
+    "source/i18n/measfmt.cpp",
+    "source/i18n/measure.cpp",
+    "source/i18n/msgfmt.cpp",
+    "source/i18n/name2uni.cpp",
+    "source/i18n/nfrs.cpp",
+    "source/i18n/nfrule.cpp",
+    "source/i18n/nfsubs.cpp",
+    "source/i18n/nortrans.cpp",
+    "source/i18n/nultrans.cpp",
+    "source/i18n/numfmt.cpp",
+    "source/i18n/numsys.cpp",
+    "source/i18n/olsontz.cpp",
+    "source/i18n/persncal.cpp",
+    "source/i18n/plurfmt.cpp",
+    "source/i18n/plurrule.cpp",
+    "source/i18n/quant.cpp",
+    "source/i18n/rbnf.cpp",
+    "source/i18n/rbt.cpp",
+    "source/i18n/rbt_data.cpp",
+    "source/i18n/rbt_pars.cpp",
+    "source/i18n/rbt_rule.cpp",
+    "source/i18n/rbt_set.cpp",
+    "source/i18n/rbtz.cpp",
+    "source/i18n/regexcmp.cpp",
+    "source/i18n/regeximp.cpp",
+    "source/i18n/regexst.cpp",
+    "source/i18n/regextxt.cpp",
+    "source/i18n/region.cpp",
+    "source/i18n/reldtfmt.cpp",
+    "source/i18n/rematch.cpp",
+    "source/i18n/remtrans.cpp",
+    "source/i18n/repattrn.cpp",
+    "source/i18n/scriptset.cpp",
+    "source/i18n/search.cpp",
+    "source/i18n/selfmt.cpp",
+    "source/i18n/simpletz.cpp",
+    "source/i18n/smpdtfmt.cpp",
+    "source/i18n/smpdtfst.cpp",
+    "source/i18n/sortkey.cpp",
+    "source/i18n/strmatch.cpp",
+    "source/i18n/strrepl.cpp",
+    "source/i18n/stsearch.cpp",
+    "source/i18n/taiwncal.cpp",
+    "source/i18n/tblcoll.cpp",
+    "source/i18n/timezone.cpp",
+    "source/i18n/titletrn.cpp",
+    "source/i18n/tmunit.cpp",
+    "source/i18n/tmutamt.cpp",
+    "source/i18n/tmutfmt.cpp",
+    "source/i18n/tolowtrn.cpp",
+    "source/i18n/toupptrn.cpp",
+    "source/i18n/translit.cpp",
+    "source/i18n/transreg.cpp",
+    "source/i18n/tridpars.cpp",
+    "source/i18n/tzfmt.cpp",
+    "source/i18n/tzgnames.cpp",
+    "source/i18n/tznames.cpp",
+    "source/i18n/tznames_impl.cpp",
+    "source/i18n/tzrule.cpp",
+    "source/i18n/tztrans.cpp",
+    "source/i18n/ucal.cpp",
+    "source/i18n/ucln_in.c",
+    "source/i18n/ucol_bld.cpp",
+    "source/i18n/ucol_cnt.cpp",
+    "source/i18n/ucol.cpp",
+    "source/i18n/ucoleitr.cpp",
+    "source/i18n/ucol_elm.cpp",
+    "source/i18n/ucol_res.cpp",
+    "source/i18n/ucol_sit.cpp",
+    "source/i18n/ucol_tok.cpp",
+    "source/i18n/ucol_wgt.cpp",
+    "source/i18n/ucsdet.cpp",
+    "source/i18n/ucurr.cpp",
+    "source/i18n/udat.cpp",
+    "source/i18n/udateintervalformat.cpp",
+    "source/i18n/udatpg.cpp",
+    "source/i18n/ulocdata.c",
+    "source/i18n/umsg.cpp",
+    "source/i18n/unesctrn.cpp",
+    "source/i18n/uni2name.cpp",
+    "source/i18n/unum.cpp",
+    "source/i18n/unumsys.cpp",
+    "source/i18n/upluralrules.cpp",
+    "source/i18n/uregexc.cpp",
+    "source/i18n/uregex.cpp",
+    "source/i18n/uregion.cpp",
+    "source/i18n/usearch.cpp",
+    "source/i18n/uspoof_build.cpp",
+    "source/i18n/uspoof_conf.cpp",
+    "source/i18n/uspoof.cpp",
+    "source/i18n/uspoof_impl.cpp",
+    "source/i18n/uspoof_wsconf.cpp",
+    "source/i18n/utmscale.c",
+    "source/i18n/utrans.cpp",
+    "source/i18n/vtzone.cpp",
+    "source/i18n/vzone.cpp",
+    "source/i18n/windtfmt.cpp",
+    "source/i18n/winnmfmt.cpp",
+    "source/i18n/wintzimpl.cpp",
+    "source/i18n/zonemeta.cpp",
+    "source/i18n/zrule.cpp",
+    "source/i18n/ztrans.cpp",
+  ]
+  defines = [
+    "U_I18N_IMPLEMENTATION",
+  ]
+  deps = [
+    ":icuuc",
+  ]
+
+  # ICU uses RTTI, replace the default "no rtti" config.
+  configs -= [
+    "//build/config/compiler:no_rtti",  # ICU uses RTTI.
+    "//build/config/compiler:chromium_code",
+  ]
+  configs += [
+    "//build/config/compiler:rtti",
+    "//build/config/compiler:no_chromium_code",
+  ]
+
+  configs += [ ":icu_code" ]
+  public_configs = [ ":icu_config" ]
+
+  cflags = []
+  if (is_android || is_linux) {
+    cflags += [
+      # ICU uses its own deprecated functions.
+      "-Wno-deprecated-declarations",
+    ]
+  }
+  if (is_clang) {
+    # uspoof.h has a U_NAMESPACE_USE macro. That's a bug,
+    # the header should use U_NAMESPACE_BEGIN instead.
+    # http://bugs.icu-project.org/trac/ticket/9054
+    configs -= [ "//build/config/clang:extra_warnings" ]
+
+    cflags += [
+      "-Wno-header-hygiene",
+      # Looks like a real issue, see http://crbug.com/114660
+      "-Wno-return-type-c-linkage",
+    ]
+  }
+}
+
+component("icuuc") {
+  sources = [
+    "source/common/appendable.cpp",
+    "source/common/bmpset.cpp",
+    "source/common/brkeng.cpp",
+    "source/common/brkiter.cpp",
+    "source/common/bytestream.cpp",
+    "source/common/bytestriebuilder.cpp",
+    "source/common/bytestrie.cpp",
+    "source/common/bytestrieiterator.cpp",
+    "source/common/caniter.cpp",
+    "source/common/chariter.cpp",
+    "source/common/charstr.cpp",
+    "source/common/cmemory.c",
+    "source/common/cstring.c",
+    "source/common/cwchar.c",
+    "source/common/dictbe.cpp",
+    "source/common/dictionarydata.cpp",
+    "source/common/dtintrv.cpp",
+    "source/common/errorcode.cpp",
+    "source/common/filterednormalizer2.cpp",
+    "source/common/icudataver.c",
+    "source/common/icuplug.c",
+    "source/common/listformatter.cpp",
+    "source/common/locavailable.cpp",
+    "source/common/locbased.cpp",
+    "source/common/locdispnames.cpp",
+    "source/common/locid.cpp",
+    "source/common/loclikely.cpp",
+    "source/common/locmap.c",
+    "source/common/locresdata.cpp",
+    "source/common/locutil.cpp",
+    "source/common/messagepattern.cpp",
+    "source/common/normalizer2.cpp",
+    "source/common/normalizer2impl.cpp",
+    "source/common/normlzr.cpp",
+    "source/common/parsepos.cpp",
+    "source/common/patternprops.cpp",
+    "source/common/propname.cpp",
+    "source/common/propsvec.c",
+    "source/common/punycode.cpp",
+    "source/common/putil.cpp",
+    "source/common/rbbi.cpp",
+    "source/common/rbbidata.cpp",
+    "source/common/rbbinode.cpp",
+    "source/common/rbbirb.cpp",
+    "source/common/rbbiscan.cpp",
+    "source/common/rbbisetb.cpp",
+    "source/common/rbbistbl.cpp",
+    "source/common/rbbitblb.cpp",
+    "source/common/resbund_cnv.cpp",
+    "source/common/resbund.cpp",
+    "source/common/ruleiter.cpp",
+    "source/common/schriter.cpp",
+    "source/common/serv.cpp",
+    "source/common/servlk.cpp",
+    "source/common/servlkf.cpp",
+    "source/common/servls.cpp",
+    "source/common/servnotf.cpp",
+    "source/common/servrbf.cpp",
+    "source/common/servslkf.cpp",
+    "source/common/stringpiece.cpp",
+    "source/common/stringtriebuilder.cpp",
+    "source/common/uarrsort.c",
+    "source/common/ubidi.c",
+    "source/common/ubidiln.c",
+    "source/common/ubidi_props.c",
+    "source/common/ubidiwrt.c",
+    "source/common/ubrk.cpp",
+    "source/common/ucase.cpp",
+    "source/common/ucasemap.cpp",
+    "source/common/ucasemap_titlecase_brkiter.cpp",
+    "source/common/ucat.c",
+    "source/common/uchar.c",
+    "source/common/ucharstriebuilder.cpp",
+    "source/common/ucharstrie.cpp",
+    "source/common/ucharstrieiterator.cpp",
+    "source/common/uchriter.cpp",
+    "source/common/ucln_cmn.c",
+    "source/common/ucmndata.c",
+    "source/common/ucnv2022.cpp",
+    "source/common/ucnv_bld.cpp",
+    "source/common/ucnvbocu.cpp",
+    "source/common/ucnv.c",
+    "source/common/ucnv_cb.c",
+    "source/common/ucnv_cnv.c",
+    "source/common/ucnv_ct.c",
+    "source/common/ucnvdisp.c",
+    "source/common/ucnv_err.c",
+    "source/common/ucnv_ext.cpp",
+    "source/common/ucnvhz.c",
+    "source/common/ucnv_io.cpp",
+    "source/common/ucnvisci.c",
+    "source/common/ucnvlat1.c",
+    "source/common/ucnv_lmb.c",
+    "source/common/ucnvmbcs.c",
+    "source/common/ucnvscsu.c",
+    "source/common/ucnvsel.cpp",
+    "source/common/ucnv_set.c",
+    "source/common/ucnv_u16.c",
+    "source/common/ucnv_u32.c",
+    "source/common/ucnv_u7.c",
+    "source/common/ucnv_u8.c",
+    "source/common/ucol_swp.cpp",
+    "source/common/udata.cpp",
+    "source/common/udatamem.c",
+    "source/common/udataswp.c",
+    "source/common/uenum.c",
+    "source/common/uhash.c",
+    "source/common/uhash_us.cpp",
+    "source/common/uidna.cpp",
+    "source/common/uinit.cpp",
+    "source/common/uinvchar.c",
+    "source/common/uiter.cpp",
+    "source/common/ulist.c",
+    "source/common/uloc.cpp",
+    "source/common/uloc_tag.c",
+    "source/common/umapfile.c",
+    "source/common/umath.c",
+    "source/common/umutex.cpp",
+    "source/common/unames.cpp",
+    "source/common/unifilt.cpp",
+    "source/common/unifunct.cpp",
+    "source/common/uniset_closure.cpp",
+    "source/common/uniset.cpp",
+    "source/common/uniset_props.cpp",
+    "source/common/unisetspan.cpp",
+    "source/common/unistr_case.cpp",
+    "source/common/unistr_case_locale.cpp",
+    "source/common/unistr_cnv.cpp",
+    "source/common/unistr.cpp",
+    "source/common/unistr_props.cpp",
+    "source/common/unistr_titlecase_brkiter.cpp",
+    "source/common/unormcmp.cpp",
+    "source/common/unorm.cpp",
+    "source/common/unorm_it.c",
+    "source/common/uobject.cpp",
+    "source/common/uprops.cpp",
+    "source/common/uresbund.cpp",
+    "source/common/ures_cnv.c",
+    "source/common/uresdata.c",
+    "source/common/usc_impl.c",
+    "source/common/uscript.c",
+    "source/common/uscript_props.cpp",
+    "source/common/uset.cpp",
+    "source/common/usetiter.cpp",
+    "source/common/uset_props.cpp",
+    "source/common/ushape.cpp",
+    "source/common/usprep.cpp",
+    "source/common/ustack.cpp",
+    "source/common/ustrcase.cpp",
+    "source/common/ustrcase_locale.cpp",
+    "source/common/ustr_cnv.c",
+    "source/common/ustrenum.cpp",
+    "source/common/ustrfmt.c",
+    "source/common/ustring.cpp",
+    "source/common/ustr_titlecase_brkiter.cpp",
+    "source/common/ustrtrns.cpp",
+    "source/common/ustr_wcs.cpp",
+    "source/common/utext.cpp",
+    "source/common/utf_impl.c",
+    "source/common/util.cpp",
+    "source/common/util_props.cpp",
+    "source/common/utrace.c",
+    "source/common/utrie2_builder.cpp",
+    "source/common/utrie2.cpp",
+    "source/common/utrie.cpp",
+    "source/common/uts46.cpp",
+    "source/common/utypes.c",
+    "source/common/uvector.cpp",
+    "source/common/uvectr32.cpp",
+    "source/common/uvectr64.cpp",
+    "source/common/wintz.c",
+  ]
+  defines = [
+    "U_COMMON_IMPLEMENTATION",
+  ]
+  deps = [
+    ":icudata",
+  ]
+  configs += [ ":icu_code" ]
+
+  configs -= [
+    "//build/config/compiler:no_rtti",  # ICU uses RTTI.
+    "//build/config/compiler:chromium_code",
+  ]
+  configs += [
+    "//build/config/compiler:rtti",
+    "//build/config/compiler:no_chromium_code",
+  ]
+
+  public_configs = [ ":icu_config" ]
+
+  if (is_win || icu_use_data_file) {
+    sources += [ "source/stubdata/stubdata.c" ]
+  }
+}
+
+# TODO(GYP) support use_system_icu.
+if (icu_use_data_file) {
+  if (is_ios) {
+    # TODO(GYP): Support mac resource bundle shown below.
+    # 'link_settings': {
+    #   'mac_bundle_resources': [
+    #     'source/data/in/icudtl.dat',
+    #   ],
+    # }
+  } else {
+    copy("icudata") {
+      if (is_android) {
+        sources = [ "android/icudtl.dat" ]
+      } else {
+        sources = [ "source/data/in/icudtl.dat" ]
+      }
+
+      outputs = [ "$root_out_dir/icudtl.dat" ]
+    }
+  }
+} else {
+  if (is_win) {
+    # On Windows the target DLL is pre-built so just use a copy rule.
+    copy("icudata") {
+      sources = [ "windows/icudt.dll" ]
+      outputs = [ "$root_out_dir/icudt.dll" ]
+    }
+  } else {
+    source_set("icudata") {
+      # These are hand-generated, but will do for now.
+      #
+      # TODO(GYP): Gyp has considerations here for QNX and for the host toolchain
+      #  that have not been ported over.
+      if (is_linux) {
+        sources = [ "linux/icudtl_dat.S" ]
+      } else if (is_mac) {
+        sources = [ "mac/icudtl_dat.S" ]
+      } else if (is_android) {
+        sources = [ "android/icudtl_dat.S" ]
+      } else {
+        assert(false, "No icu data for this platform")
+      }
+      defines = [ "U_HIDE_DATA_SYMBOL" ]
+    }
+  }
+}
diff --git a/build/secondary/third_party/icu/config.gni b/build/secondary/third_party/icu/config.gni
new file mode 100644
index 0000000..9c389de
--- /dev/null
+++ b/build/secondary/third_party/icu/config.gni
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Tells icu to load an external data file rather than rely on the icudata
+  # being linked directly into the binary.
+  #
+  # This flag is a bit confusing. As of this writing, icu.gyp set the value to
+  # 0 but common.gypi sets the value to 1 for most platforms (and the 1 takes
+  # precedence).
+  #
+  # TODO(GYP) We'll probably need to enhance this logic to set the value to
+  # true or false in similar circumstances.
+  icu_use_data_file = true
+}
diff --git a/build/secondary/third_party/leveldatabase/BUILD.gn b/build/secondary/third_party/leveldatabase/BUILD.gn
new file mode 100644
index 0000000..ffe9fa6
--- /dev/null
+++ b/build/secondary/third_party/leveldatabase/BUILD.gn
@@ -0,0 +1,339 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Snappy is a compression library we use.
+# TODO(brettw) It's not clear why this needs to be parameterized.
+use_snappy = true
+
+defines = [ "LEVELDB_PLATFORM_CHROMIUM=1" ]
+
+config("leveldatabase_config") {
+  include_dirs = [
+    ".",
+    "src",
+    "src/include",
+  ]
+  if (is_win) {
+    include_dirs += [ "src/port/win" ]
+  }
+}
+
+static_library("leveldatabase") {
+  sources = [
+    "env_chromium.cc",
+    "env_chromium.h",
+    "env_chromium_stdio.cc",
+    "env_chromium_stdio.h",
+    "env_idb.h",
+    "port/port_chromium.cc",
+    "port/port_chromium.h",
+    "src/db/builder.cc",
+    "src/db/builder.h",
+    "src/db/db_impl.cc",
+    "src/db/db_impl.h",
+    "src/db/db_iter.cc",
+    "src/db/db_iter.h",
+    "src/db/filename.cc",
+    "src/db/filename.h",
+    "src/db/dbformat.cc",
+    "src/db/dbformat.h",
+    "src/db/log_format.h",
+    "src/db/log_reader.cc",
+    "src/db/log_reader.h",
+    "src/db/log_writer.cc",
+    "src/db/log_writer.h",
+    "src/db/memtable.cc",
+    "src/db/memtable.h",
+    "src/db/repair.cc",
+    "src/db/skiplist.h",
+    "src/db/snapshot.h",
+    "src/db/table_cache.cc",
+    "src/db/table_cache.h",
+    "src/db/version_edit.cc",
+    "src/db/version_edit.h",
+    "src/db/version_set.cc",
+    "src/db/version_set.h",
+    "src/db/write_batch.cc",
+    "src/db/write_batch_internal.h",
+    "src/helpers/memenv/memenv.cc",
+    "src/helpers/memenv/memenv.h",
+    "src/include/leveldb/cache.h",
+    "src/include/leveldb/comparator.h",
+    "src/include/leveldb/db.h",
+    "src/include/leveldb/env.h",
+    "src/include/leveldb/filter_policy.h",
+    "src/include/leveldb/iterator.h",
+    "src/include/leveldb/options.h",
+    "src/include/leveldb/slice.h",
+    "src/include/leveldb/status.h",
+    "src/include/leveldb/table.h",
+    "src/include/leveldb/table_builder.h",
+    "src/include/leveldb/write_batch.h",
+    "src/port/port.h",
+    "src/port/port_example.h",
+    #"src/port/port_posix.cc",  # We use the chromium port instead of this.
+    #"src/port/port_posix.h",
+    "src/table/block.cc",
+    "src/table/block.h",
+    "src/table/block_builder.cc",
+    "src/table/block_builder.h",
+    "src/table/filter_block.cc",
+    "src/table/filter_block.h",
+    "src/table/format.cc",
+    "src/table/format.h",
+    "src/table/iterator.cc",
+    "src/table/iterator_wrapper.h",
+    "src/table/merger.cc",
+    "src/table/merger.h",
+    "src/table/table.cc",
+    "src/table/table_builder.cc",
+    "src/table/two_level_iterator.cc",
+    "src/table/two_level_iterator.h",
+    "src/util/arena.cc",
+    "src/util/arena.h",
+    "src/util/bloom.cc",
+    "src/util/cache.cc",
+    "src/util/coding.cc",
+    "src/util/coding.h",
+    "src/util/comparator.cc",
+    "src/util/crc32c.cc",
+    "src/util/crc32c.h",
+    "src/util/env.cc",
+    "src/util/filter_policy.cc",
+    "src/util/hash.cc",
+    "src/util/hash.h",
+    "src/util/logging.cc",
+    "src/util/logging.h",
+    "src/util/mutexlock.h",
+    "src/util/options.cc",
+    "src/util/random.h",
+    "src/util/status.cc",
+  ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+
+  public_configs = [ ":leveldatabase_config" ]
+
+  deps = [
+    "//base",
+    "//base/third_party/dynamic_annotations",
+    "//third_party/re2",
+  ]
+
+  if (use_snappy) {
+    defines += [ "USE_SNAPPY=1" ]
+    deps += [ "//third_party/snappy" ]
+  }
+}
+
+if (!is_android) {
+  test("env_chromium_unittests") {
+    sources = [
+      "env_chromium_unittest.cc",
+    ]
+    deps = [
+      ":leveldatabase",
+      "//base/test:test_support",
+      "//testing/gtest",
+    ]
+  }
+
+  static_library("leveldb_testutil") {
+    sources = [
+      "src/util/histogram.cc",
+      "src/util/histogram.h",
+      "src/util/testharness.cc",
+      "src/util/testharness.h",
+      "src/util/testutil.cc",
+      "src/util/testutil.h",
+    ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+
+    public_deps = [
+      ":leveldatabase",
+    ]
+    deps = [
+      "//base",
+    ]
+  }
+
+  test("leveldb_arena_test") {
+    sources = [
+      "src/util/arena_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_bloom_test") {
+    sources = [
+      "src/util/bloom_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_cache_test") {
+    sources = [
+      "src/util/cache_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_corruption_test") {
+    sources = [
+      "src/db/corruption_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_crc32c_test") {
+    sources = [
+      "src/util/crc32c_test.cc"
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_db_bench") {
+    sources = [
+      "src/db/db_bench.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_db_test") {
+    sources = [
+      "src/db/db_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_dbformat_test") {
+    sources = [
+      "src/db/dbformat_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_env_test") {
+    sources = [
+      "src/util/env_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_filename_test") {
+    sources = [
+      "src/db/filename_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_filter_block_test") {
+    sources = [
+      "src/table/filter_block_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_log_test") {
+    sources = [
+      "src/db/log_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_skiplist_test") {
+    sources = [
+      "src/db/skiplist_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_table_test") {
+    sources = [
+      "src/table/table_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_version_edit_test") {
+    sources = [
+      "src/db/version_edit_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+
+  test("leveldb_write_batch_test") {
+    sources = [
+      "src/db/write_batch_test.cc",
+    ]
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    deps = [
+      ":leveldb_testutil",
+    ]
+  }
+}
diff --git a/build/secondary/third_party/libjpeg_turbo/BUILD.gn b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
new file mode 100644
index 0000000..4dbca97
--- /dev/null
+++ b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
@@ -0,0 +1,219 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not use the targets in this file unless you need a certain libjpeg
+# implementation. Use the meta target //third_party:jpeg instead.
+
+if (cpu_arch == "arm") {
+  import("//build/config/arm.gni")
+}
+
+if (cpu_arch == "x86" || cpu_arch == "x64") {
+
+import("//third_party/yasm/yasm_assemble.gni")
+
+yasm_assemble("simd_asm") {
+  defines = []
+
+  if (cpu_arch == "x86") {
+    sources = [
+      "simd/jccolmmx.asm",
+      "simd/jccolss2.asm",
+      "simd/jcgrammx.asm",
+      "simd/jcgrass2.asm",
+      "simd/jcqnt3dn.asm",
+      "simd/jcqntmmx.asm",
+      "simd/jcqnts2f.asm",
+      "simd/jcqnts2i.asm",
+      "simd/jcqntsse.asm",
+      "simd/jcsammmx.asm",
+      "simd/jcsamss2.asm",
+      "simd/jdcolmmx.asm",
+      "simd/jdcolss2.asm",
+      "simd/jdmermmx.asm",
+      "simd/jdmerss2.asm",
+      "simd/jdsammmx.asm",
+      "simd/jdsamss2.asm",
+      "simd/jf3dnflt.asm",
+      "simd/jfmmxfst.asm",
+      "simd/jfmmxint.asm",
+      "simd/jfss2fst.asm",
+      "simd/jfss2int.asm",
+      "simd/jfsseflt.asm",
+      "simd/ji3dnflt.asm",
+      "simd/jimmxfst.asm",
+      "simd/jimmxint.asm",
+      "simd/jimmxred.asm",
+      "simd/jiss2flt.asm",
+      "simd/jiss2fst.asm",
+      "simd/jiss2int.asm",
+      "simd/jiss2red.asm",
+      "simd/jisseflt.asm",
+      "simd/jsimdcpu.asm",
+    ]
+    defines += [
+      "__x86__",
+    ]
+  } else if (cpu_arch == "x64") {
+    sources = [
+      "simd/jccolss2-64.asm",
+      "simd/jcgrass2-64.asm",
+      "simd/jcqnts2f-64.asm",
+      "simd/jcqnts2i-64.asm",
+      "simd/jcsamss2-64.asm",
+      "simd/jdcolss2-64.asm",
+      "simd/jdmerss2-64.asm",
+      "simd/jdsamss2-64.asm",
+      "simd/jfss2fst-64.asm",
+      "simd/jfss2int-64.asm",
+      "simd/jfsseflt-64.asm",
+      "simd/jiss2flt-64.asm",
+      "simd/jiss2fst-64.asm",
+      "simd/jiss2int-64.asm",
+      "simd/jiss2red-64.asm",
+    ]
+    defines += [
+      "__x86_64__",
+    ]
+  }
+
+  if (is_win) {
+    defines += [
+      "MSVC",
+    ]
+    include_dirs = [ "win" ]
+    if (cpu_arch == "x86") {
+      defines += [
+        "WIN32",
+      ]
+    } else {
+      defines += [
+        "WIN64",
+      ]
+    }
+  } else if (is_mac) {
+    defines += [
+      "MACHO",
+    ]
+    include_dirs = [ "mac" ]
+  } else if (is_linux) {
+    defines += [
+      "ELF",
+    ]
+    include_dirs = [ "linux" ]
+  }
+}
+
+}
+
+source_set("simd") {
+  if (cpu_arch == "x86") {
+    deps = [ ":simd_asm" ]
+    sources = [
+      "simd/jsimd_i386.c",
+    ]
+    if (is_win) {
+      cflags = [ "/wd4245" ]
+    }
+  } else if (cpu_arch == "x64") {
+    deps = [ ":simd_asm" ]
+    sources = [
+      "simd/jsimd_x86_64.c",
+    ]
+  } else if (cpu_arch == "arm" && arm_version >= 7 &&
+             (arm_use_neon || arm_optionally_use_neon)) {
+    sources = [
+      "simd/jsimd_arm.c",
+      "simd/jsimd_arm_neon.S",
+    ]
+  } else {
+    sources = [ "jsimd_none.c" ]
+  }
+}
+
+config("libjpeg_config") {
+  include_dirs = [ "." ]
+}
+
+source_set("libjpeg") {
+  sources = [
+    "jcapimin.c",
+    "jcapistd.c",
+    "jccoefct.c",
+    "jccolor.c",
+    "jcdctmgr.c",
+    "jchuff.c",
+    "jchuff.h",
+    "jcinit.c",
+    "jcmainct.c",
+    "jcmarker.c",
+    "jcmaster.c",
+    "jcomapi.c",
+    "jconfig.h",
+    "jcparam.c",
+    "jcphuff.c",
+    "jcprepct.c",
+    "jcsample.c",
+    "jdapimin.c",
+    "jdapistd.c",
+    "jdatadst.c",
+    "jdatasrc.c",
+    "jdcoefct.c",
+    "jdcolor.c",
+    "jdct.h",
+    "jddctmgr.c",
+    "jdhuff.c",
+    "jdhuff.h",
+    "jdinput.c",
+    "jdmainct.c",
+    "jdmarker.c",
+    "jdmaster.c",
+    "jdmerge.c",
+    "jdphuff.c",
+    "jdpostct.c",
+    "jdsample.c",
+    "jerror.c",
+    "jerror.h",
+    "jfdctflt.c",
+    "jfdctfst.c",
+    "jfdctint.c",
+    "jidctflt.c",
+    "jidctfst.c",
+    "jidctint.c",
+    "jidctred.c",
+    "jinclude.h",
+    "jmemmgr.c",
+    "jmemnobs.c",
+    "jmemsys.h",
+    "jmorecfg.h",
+    "jpegint.h",
+    "jpeglib.h",
+    "jpeglibmangler.h",
+    "jquant1.c",
+    "jquant2.c",
+    "jutils.c",
+    "jversion.h",
+  ]
+
+  defines = [
+    "WITH_SIMD",
+    "MOTION_JPEG_SUPPORTED",
+    "NO_GETENV",
+  ]
+
+  configs += [ ":libjpeg_config" ]
+
+  public_configs = [ ":libjpeg_config" ]
+
+  # MemorySanitizer doesn't support assembly code, so keep it disabled in
+  # MSan builds for now.
+  # TODO: Enable on Linux when .asm files are recognized.
+  if (is_msan || is_linux) {
+    sources += [ "jsimd_none.c" ]
+  } else {
+    deps = [ ":simd" ]
+  }
+
+  # TODO(GYP): Compile the .asm files with YASM as GYP does.
+}
diff --git a/build/secondary/third_party/libsrtp/BUILD.gn b/build/secondary/third_party/libsrtp/BUILD.gn
new file mode 100644
index 0000000..b0e0a91
--- /dev/null
+++ b/build/secondary/third_party/libsrtp/BUILD.gn
@@ -0,0 +1,310 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  use_system_libsrtp = false
+}
+
+config("libsrtp_config") {
+  defines = [
+    "HAVE_STDLIB_H",
+    "HAVE_STRING_H",
+  ]
+
+  include_dirs = [
+    "config",
+    "srtp/include",
+    "srtp/crypto/include",
+  ]
+
+  if (is_posix) {
+    defines += [
+      "HAVE_INT16_T",
+      "HAVE_INT32_T",
+      "HAVE_INT8_T",
+      "HAVE_UINT16_T",
+      "HAVE_UINT32_T",
+      "HAVE_UINT64_T",
+      "HAVE_UINT8_T",
+      "HAVE_STDINT_H",
+      "HAVE_INTTYPES_H",
+      "HAVE_NETINET_IN_H",
+      "INLINE=inline",
+    ]
+  }
+
+  if (is_win) {
+    defines += [
+      "INLINE=__inline",
+      "HAVE_BYTESWAP_METHODS_H",
+      # All Windows architectures are this way.
+      "SIZEOF_UNSIGNED_LONG=4",
+      "SIZEOF_UNSIGNED_LONG_LONG=8",
+    ]
+  }
+
+  if (cpu_arch == "x64" || cpu_arch == "x86" || cpu_arch == "arm") {
+    defines += [
+      # TODO(leozwang): CPU_RISC doesn"t work properly on android/arm
+      # platform for unknown reasons, need to investigate the root cause
+      # of it. CPU_RISC is used for optimization only, and CPU_CISC should
+      # just work just fine, it has been tested on android/arm with srtp
+      # test applications and libjingle.
+      "CPU_CISC",
+    ]
+  }
+
+  if (cpu_arch == "mipsel") {
+    defines += [ "CPU_RISC" ]
+  }
+}
+
+config("system_libsrtp_config") {
+  defines = [ "USE_SYSTEM_LIBSRTP" ]
+  include_dirs = [ "/usr/include/srtp" ]
+}
+
+if (use_system_libsrtp) {
+  group("libsrtp") {
+    public_configs = [ ":libsrtp_config", ":system_libsrtp_config" ]
+    libs = [ "-lsrtp" ]
+  }
+} else {
+  static_library("libsrtp") {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    public_configs = [ ":libsrtp_config" ]
+
+    sources = [
+      # includes
+      "srtp/include/ekt.h",
+      "srtp/include/getopt_s.h",
+      "srtp/include/rtp.h",
+      "srtp/include/rtp_priv.h",
+      "srtp/include/srtp.h",
+      "srtp/include/srtp_priv.h",
+      "srtp/include/ut_sim.h",
+
+      # headers
+      "srtp/crypto/include/aes_cbc.h",
+      "srtp/crypto/include/aes.h",
+      "srtp/crypto/include/aes_icm.h",
+      "srtp/crypto/include/alloc.h",
+      "srtp/crypto/include/auth.h",
+      "srtp/crypto/include/cipher.h",
+      "srtp/crypto/include/cryptoalg.h",
+      "srtp/crypto/include/crypto.h",
+      "srtp/crypto/include/crypto_kernel.h",
+      "srtp/crypto/include/crypto_math.h",
+      "srtp/crypto/include/crypto_types.h",
+      "srtp/crypto/include/datatypes.h",
+      "srtp/crypto/include/err.h",
+      "srtp/crypto/include/gf2_8.h",
+      "srtp/crypto/include/hmac.h",
+      "srtp/crypto/include/integers.h",
+      "srtp/crypto/include/kernel_compat.h",
+      "srtp/crypto/include/key.h",
+      "srtp/crypto/include/null_auth.h",
+      "srtp/crypto/include/null_cipher.h",
+      "srtp/crypto/include/prng.h",
+      "srtp/crypto/include/rand_source.h",
+      "srtp/crypto/include/rdb.h",
+      "srtp/crypto/include/rdbx.h",
+      "srtp/crypto/include/sha1.h",
+      "srtp/crypto/include/stat.h",
+      "srtp/crypto/include/xfm.h",
+
+      # sources
+      "srtp/srtp/ekt.c",
+      "srtp/srtp/srtp.c",
+
+      "srtp/crypto/cipher/aes.c",
+      "srtp/crypto/cipher/aes_cbc.c",
+      "srtp/crypto/cipher/aes_icm.c",
+      "srtp/crypto/cipher/cipher.c",
+      "srtp/crypto/cipher/null_cipher.c",
+      "srtp/crypto/hash/auth.c",
+      "srtp/crypto/hash/hmac.c",
+      "srtp/crypto/hash/null_auth.c",
+      "srtp/crypto/hash/sha1.c",
+      "srtp/crypto/kernel/alloc.c",
+      "srtp/crypto/kernel/crypto_kernel.c",
+      "srtp/crypto/kernel/err.c",
+      "srtp/crypto/kernel/key.c",
+      "srtp/crypto/math/datatypes.c",
+      "srtp/crypto/math/gf2_8.c",
+      "srtp/crypto/math/stat.c",
+      "srtp/crypto/replay/rdb.c",
+      "srtp/crypto/replay/rdbx.c",
+      "srtp/crypto/replay/ut_sim.c",
+      "srtp/crypto/rng/ctr_prng.c",
+      "srtp/crypto/rng/prng.c",
+      "srtp/crypto/rng/rand_source.c",
+    ]
+
+    if (is_clang) {
+      cflags = [ "-Wno-implicit-function-declaration" ]
+    }
+  }
+
+  # TODO(GYP): A bunch of these tests don't compile (in gyp either). They're
+  # not very broken, so could probably be made to work if it's useful.
+  if (!is_win) {
+    executable("rdbx_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/rdbx_driver.c",
+      ]
+    }
+
+    executable("srtp_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/include/srtp_priv.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/srtp_driver.c",
+      ]
+    }
+
+    executable("roc_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/include/rdbx.h",
+        "srtp/include/ut_sim.h",
+        "srtp/test/roc_driver.c",
+      ]
+    }
+
+    executable("replay_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/include/rdbx.h",
+        "srtp/include/ut_sim.h",
+        "srtp/test/replay_driver.c",
+      ]
+    }
+
+    executable("rtpw") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/include/rtp.h",
+        "srtp/include/srtp.h",
+        "srtp/crypto/include/datatypes.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/rtp.c",
+        "srtp/test/rtpw.c",
+      ]
+      if (is_android) {
+        defines = [ "HAVE_SYS_SOCKET_H" ]
+      }
+      if (is_clang) {
+        cflags = [ "-Wno-implicit-function-declaration" ]
+      }
+    }
+
+    executable("srtp_test_cipher_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/cipher_driver.c",
+      ]
+    }
+
+    executable("srtp_test_datatypes_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/datatypes_driver.c",
+      ]
+    }
+
+    executable("srtp_test_stat_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/stat_driver.c",
+      ]
+    }
+
+    executable("srtp_test_sha1_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/sha1_driver.c",
+      ]
+    }
+
+    executable("srtp_test_kernel_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/kernel_driver.c",
+      ]
+    }
+
+    executable("srtp_test_aes_calc") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/aes_calc.c",
+      ]
+    }
+
+    executable("srtp_test_rand_gen") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/rand_gen.c",
+      ]
+    }
+
+    executable("srtp_test_env") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [ ":libsrtp" ]
+      sources = [
+        "srtp/crypto/test/env.c",
+      ]
+    }
+
+    group("srtp_runtest") {
+      deps = [
+        ":rdbx_driver",
+        ":srtp_driver",
+        ":roc_driver",
+        ":replay_driver",
+        ":rtpw",
+        ":srtp_test_cipher_driver",
+        ":srtp_test_datatypes_driver",
+        ":srtp_test_stat_driver",
+        ":srtp_test_sha1_driver",
+        ":srtp_test_kernel_driver",
+        ":srtp_test_aes_calc",
+        ":srtp_test_rand_gen",
+        ":srtp_test_env",
+      ]
+    }
+  }
+}
diff --git a/build/secondary/third_party/nss/BUILD.gn b/build/secondary/third_party/nss/BUILD.gn
new file mode 100644
index 0000000..15f80b3
--- /dev/null
+++ b/build/secondary/third_party/nss/BUILD.gn
@@ -0,0 +1,1209 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (is_linux) {
+  # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
+  # library but the system NSS libraries. Non-Linux platforms using NSS use the
+  # hermetic one in //third_party/nss.
+  #
+  # Generally you should depend on //crypto:platform instead of using this
+  # config since that will properly pick up NSS or OpenSSL depending on
+  # platform and build config.
+  pkg_config("system_nss_no_ssl_config") {
+    packages = [ "nss" ]
+    extra_args = [ "-v", "-lssl3" ]
+  }
+} else {
+  include_nss_root_certs = is_ios
+  include_nss_libpkix = is_ios
+
+  config("nspr_config") {
+    defines = [ "NO_NSPR_10_SUPPORT" ]
+    include_dirs = [
+      "nspr/pr/include",
+      "nspr/lib/ds",
+      "nspr/lib/libc/include",
+    ]
+
+    if (component_mode != "shared_library") {
+      defines += [ "NSPR_STATIC" ]
+    }
+  }
+
+  component("nspr") {
+    output_name = "crnspr"
+    sources = [
+      "nspr/lib/ds/plarena.c",
+      "nspr/lib/ds/plarena.h",
+      "nspr/lib/ds/plarenas.h",
+      "nspr/lib/ds/plhash.c",
+      "nspr/lib/ds/plhash.h",
+      "nspr/lib/libc/include/plbase64.h",
+      "nspr/lib/libc/include/plerror.h",
+      "nspr/lib/libc/include/plgetopt.h",
+      "nspr/lib/libc/include/plstr.h",
+      "nspr/lib/libc/src/base64.c",
+      "nspr/lib/libc/src/plerror.c",
+      "nspr/lib/libc/src/plgetopt.c",
+      "nspr/lib/libc/src/strcase.c",
+      "nspr/lib/libc/src/strcat.c",
+      "nspr/lib/libc/src/strchr.c",
+      "nspr/lib/libc/src/strcmp.c",
+      "nspr/lib/libc/src/strcpy.c",
+      "nspr/lib/libc/src/strdup.c",
+      "nspr/lib/libc/src/strlen.c",
+      "nspr/lib/libc/src/strpbrk.c",
+      "nspr/lib/libc/src/strstr.c",
+      "nspr/lib/libc/src/strtok.c",
+      "nspr/pr/include/md/prosdep.h",
+      "nspr/pr/include/md/_darwin.cfg",
+      "nspr/pr/include/md/_darwin.h",
+      "nspr/pr/include/md/_pcos.h",
+      "nspr/pr/include/md/_pth.h",
+      "nspr/pr/include/md/_unixos.h",
+      "nspr/pr/include/md/_unix_errors.h",
+      "nspr/pr/include/md/_win32_errors.h",
+      "nspr/pr/include/md/_win95.cfg",
+      "nspr/pr/include/md/_win95.h",
+      "nspr/pr/include/nspr.h",
+      "nspr/pr/include/obsolete/pralarm.h",
+      "nspr/pr/include/obsolete/probslet.h",
+      "nspr/pr/include/obsolete/protypes.h",
+      "nspr/pr/include/obsolete/prsem.h",
+      "nspr/pr/include/pratom.h",
+      "nspr/pr/include/prbit.h",
+      "nspr/pr/include/prclist.h",
+      "nspr/pr/include/prcmon.h",
+      "nspr/pr/include/prcountr.h",
+      "nspr/pr/include/prcpucfg.h",
+      "nspr/pr/include/prcvar.h",
+      "nspr/pr/include/prdtoa.h",
+      "nspr/pr/include/prenv.h",
+      "nspr/pr/include/prerr.h",
+      "nspr/pr/include/prerror.h",
+      "nspr/pr/include/prinet.h",
+      "nspr/pr/include/prinit.h",
+      "nspr/pr/include/prinrval.h",
+      "nspr/pr/include/prio.h",
+      "nspr/pr/include/pripcsem.h",
+      "nspr/pr/include/private/pprio.h",
+      "nspr/pr/include/private/pprmwait.h",
+      "nspr/pr/include/private/pprthred.h",
+      "nspr/pr/include/private/primpl.h",
+      "nspr/pr/include/private/prpriv.h",
+      "nspr/pr/include/prlink.h",
+      "nspr/pr/include/prlock.h",
+      "nspr/pr/include/prlog.h",
+      "nspr/pr/include/prlong.h",
+      "nspr/pr/include/prmem.h",
+      "nspr/pr/include/prmon.h",
+      "nspr/pr/include/prmwait.h",
+      "nspr/pr/include/prnetdb.h",
+      "nspr/pr/include/prolock.h",
+      "nspr/pr/include/prpdce.h",
+      "nspr/pr/include/prprf.h",
+      "nspr/pr/include/prproces.h",
+      "nspr/pr/include/prrng.h",
+      "nspr/pr/include/prrwlock.h",
+      "nspr/pr/include/prshm.h",
+      "nspr/pr/include/prshma.h",
+      "nspr/pr/include/prsystem.h",
+      "nspr/pr/include/prthread.h",
+      "nspr/pr/include/prtime.h",
+      "nspr/pr/include/prtpool.h",
+      "nspr/pr/include/prtrace.h",
+      "nspr/pr/include/prtypes.h",
+      "nspr/pr/include/prvrsion.h",
+      "nspr/pr/include/prwin16.h",
+      "nspr/pr/src/io/prdir.c",
+      "nspr/pr/src/io/prfdcach.c",
+      "nspr/pr/src/io/prfile.c",
+      "nspr/pr/src/io/prio.c",
+      "nspr/pr/src/io/priometh.c",
+      "nspr/pr/src/io/pripv6.c",
+      "nspr/pr/src/io/prlayer.c",
+      "nspr/pr/src/io/prlog.c",
+      "nspr/pr/src/io/prmapopt.c",
+      "nspr/pr/src/io/prmmap.c",
+      "nspr/pr/src/io/prmwait.c",
+      "nspr/pr/src/io/prpolevt.c",
+      "nspr/pr/src/io/prprf.c",
+      "nspr/pr/src/io/prscanf.c",
+      "nspr/pr/src/io/prsocket.c",
+      "nspr/pr/src/io/prstdio.c",
+      "nspr/pr/src/linking/prlink.c",
+      "nspr/pr/src/malloc/prmalloc.c",
+      "nspr/pr/src/malloc/prmem.c",
+      "nspr/pr/src/md/prosdep.c",
+      "nspr/pr/src/md/unix/darwin.c",
+      "nspr/pr/src/md/unix/os_Darwin.s",
+      "nspr/pr/src/md/unix/unix.c",
+      "nspr/pr/src/md/unix/unix_errors.c",
+      "nspr/pr/src/md/unix/uxproces.c",
+      "nspr/pr/src/md/unix/uxrng.c",
+      "nspr/pr/src/md/unix/uxshm.c",
+      "nspr/pr/src/md/unix/uxwrap.c",
+      "nspr/pr/src/md/windows/ntgc.c",
+      "nspr/pr/src/md/windows/ntinrval.c",
+      "nspr/pr/src/md/windows/ntmisc.c",
+      "nspr/pr/src/md/windows/ntsec.c",
+      "nspr/pr/src/md/windows/ntsem.c",
+      "nspr/pr/src/md/windows/w32ipcsem.c",
+      "nspr/pr/src/md/windows/w32poll.c",
+      "nspr/pr/src/md/windows/w32rng.c",
+      "nspr/pr/src/md/windows/w32shm.c",
+      "nspr/pr/src/md/windows/w95cv.c",
+      "nspr/pr/src/md/windows/w95dllmain.c",
+      "nspr/pr/src/md/windows/w95io.c",
+      "nspr/pr/src/md/windows/w95sock.c",
+      "nspr/pr/src/md/windows/w95thred.c",
+      "nspr/pr/src/md/windows/win32_errors.c",
+      "nspr/pr/src/memory/prseg.c",
+      "nspr/pr/src/memory/prshm.c",
+      "nspr/pr/src/memory/prshma.c",
+      "nspr/pr/src/misc/pralarm.c",
+      "nspr/pr/src/misc/pratom.c",
+      "nspr/pr/src/misc/praton.c",
+      "nspr/pr/src/misc/prcountr.c",
+      "nspr/pr/src/misc/prdtoa.c",
+      "nspr/pr/src/misc/prenv.c",
+      "nspr/pr/src/misc/prerr.c",
+      "nspr/pr/src/misc/prerror.c",
+      "nspr/pr/src/misc/prerrortable.c",
+      "nspr/pr/src/misc/prinit.c",
+      "nspr/pr/src/misc/prinrval.c",
+      "nspr/pr/src/misc/pripc.c",
+      "nspr/pr/src/misc/pripcsem.c",
+      "nspr/pr/src/misc/prlog2.c",
+      "nspr/pr/src/misc/prlong.c",
+      "nspr/pr/src/misc/prnetdb.c",
+      "nspr/pr/src/misc/prolock.c",
+      "nspr/pr/src/misc/prrng.c",
+      "nspr/pr/src/misc/prsystem.c",
+      "nspr/pr/src/misc/prthinfo.c",
+      "nspr/pr/src/misc/prtime.c",
+      "nspr/pr/src/misc/prtpool.c",
+      "nspr/pr/src/misc/prtrace.c",
+      "nspr/pr/src/pthreads/ptio.c",
+      "nspr/pr/src/pthreads/ptmisc.c",
+      "nspr/pr/src/pthreads/ptsynch.c",
+      "nspr/pr/src/pthreads/ptthread.c",
+      "nspr/pr/src/threads/combined/prucpu.c",
+      "nspr/pr/src/threads/combined/prucv.c",
+      "nspr/pr/src/threads/combined/prulock.c",
+      "nspr/pr/src/threads/combined/prustack.c",
+      "nspr/pr/src/threads/combined/pruthr.c",
+      "nspr/pr/src/threads/prcmon.c",
+      "nspr/pr/src/threads/prcthr.c",
+      "nspr/pr/src/threads/prdump.c",
+      "nspr/pr/src/threads/prmon.c",
+      "nspr/pr/src/threads/prrwlock.c",
+      "nspr/pr/src/threads/prsem.c",
+      "nspr/pr/src/threads/prtpd.c",
+    ]
+
+    public_configs = [ ":nspr_config" ]
+
+    configs -= [
+      "//build/config/compiler:chromium_code",
+    ]
+    if (is_win) {
+      configs -= [
+        "//build/config/win:unicode",  # Requires 8-bit mode.
+        "//build/config/win:lean_and_mean",  # Won"t compile with lean and mean.
+      ]
+    }
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+
+    cflags = []
+    defines = [
+      "_NSPR_BUILD_",
+      "FORCE_PR_LOG",
+    ]
+
+    include_dirs = [
+      "nspr/pr/include/private",
+    ]
+
+    if (is_win) {
+      cflags = [
+        "/wd4554",  # Check precidence.
+        "/wd4267",  # Conversion from size_t to "type".
+      ]
+      defines += [
+        "XP_PC",
+        "WIN32",
+        "WIN95",
+        "_PR_GLOBAL_THREADS_ONLY",
+        "_CRT_SECURE_NO_WARNINGS",
+      ]
+    } else {
+      sources -= [
+        "nspr/pr/src/md/windows/ntgc.c",
+        "nspr/pr/src/md/windows/ntinrval.c",
+        "nspr/pr/src/md/windows/ntmisc.c",
+        "nspr/pr/src/md/windows/ntsec.c",
+        "nspr/pr/src/md/windows/ntsem.c",
+        "nspr/pr/src/md/windows/w32ipcsem.c",
+        "nspr/pr/src/md/windows/w32poll.c",
+        "nspr/pr/src/md/windows/w32rng.c",
+        "nspr/pr/src/md/windows/w32shm.c",
+        "nspr/pr/src/md/windows/w95cv.c",
+        "nspr/pr/src/md/windows/w95dllmain.c",
+        "nspr/pr/src/md/windows/w95io.c",
+        "nspr/pr/src/md/windows/w95sock.c",
+        "nspr/pr/src/md/windows/w95thred.c",
+        "nspr/pr/src/md/windows/win32_errors.c",
+        "nspr/pr/src/threads/combined/prucpu.c",
+        "nspr/pr/src/threads/combined/prucv.c",
+        "nspr/pr/src/threads/combined/prulock.c",
+        "nspr/pr/src/threads/combined/prustack.c",
+        "nspr/pr/src/threads/combined/pruthr.c",
+      ]
+    }
+
+    if (!is_posix) {
+      sources -= [
+        "nspr/pr/src/md/unix/darwin.c",
+        "nspr/pr/src/md/unix/os_Darwin.s",
+        "nspr/pr/src/md/unix/unix.c",
+        "nspr/pr/src/md/unix/unix_errors.c",
+        "nspr/pr/src/md/unix/uxproces.c",
+        "nspr/pr/src/md/unix/uxrng.c",
+        "nspr/pr/src/md/unix/uxshm.c",
+        "nspr/pr/src/md/unix/uxwrap.c",
+        "nspr/pr/src/pthreads/ptio.c",
+        "nspr/pr/src/pthreads/ptmisc.c",
+        "nspr/pr/src/pthreads/ptsynch.c",
+        "nspr/pr/src/pthreads/ptthread.c",
+      ]
+    }
+
+    if (cpu_arch == "x86") {
+      defines += [ "_X86_" ]
+    } else if (cpu_arch == "x64") {
+      defines += [ "_AMD64_" ]
+    }
+
+    if (is_mac || is_ios) {
+      sources -= [
+        "nspr/pr/src/io/prdir.c",
+        "nspr/pr/src/io/prfile.c",
+        "nspr/pr/src/io/prio.c",
+        "nspr/pr/src/io/prsocket.c",
+        "nspr/pr/src/misc/pripcsem.c",
+        "nspr/pr/src/threads/prcthr.c",
+        "nspr/pr/src/threads/prdump.c",
+        "nspr/pr/src/threads/prmon.c",
+        "nspr/pr/src/threads/prsem.c",
+      ]
+      defines += [
+        "XP_UNIX",
+        "DARWIN",
+        "XP_MACOSX",
+        "_PR_PTHREADS",
+        "HAVE_BSD_FLOCK",
+        "HAVE_DLADDR",
+        "HAVE_LCHOWN",
+        "HAVE_SOCKLEN_T",
+        "HAVE_STRERROR",
+      ]
+    }
+
+    if (is_mac) {
+      defines += [
+        "HAVE_CRT_EXTERNS_H",
+      ]
+      libs = [
+        "CoreFoundation.framework",
+        "CoreServices.framework",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # nspr uses a bunch of deprecated functions (NSLinkModule etc) in
+        # prlink.c on mac.
+        "-Wno-deprecated-declarations",
+        # nspr passes "const char*" through "void*".
+        "-Wno-incompatible-pointer-types",
+        # nspr passes "int*" through "unsigned int*".
+        "-Wno-pointer-sign",
+
+        "-Wno-incompatible-pointer-types",
+        "-Wno-pointer-sign",
+      ]
+    }
+  }
+
+  component("nss") {
+    output_name = "crnss"
+    sources = [
+      # Ensure at least one object file is produced, so that MSVC does not
+      # warn when creating the static/shared library. See the note for
+      # the "nssckbi" target for why the "nss" target was split as such.
+      "nss/lib/nss/nssver.c",
+    ]
+
+    public_deps = [ ":nss_static" ]
+
+    if (include_nss_root_certs) {
+      public_deps += [ ":nssckbi" ]
+    }
+
+    if (component_mode == "shared_library") {
+      if (is_mac) {
+        ldflags = [ "-all_load" ]
+      } else if (is_win) {
+        # Pass the def file to the linker.
+        ldflags = [
+          "/DEF:" + rebase_path("nss/exports_win.def", root_build_dir)
+        ]
+      }
+    }
+  }
+
+  config("nssckbi_config") {
+    include_dirs = [ "nss/lib/ckfw/builtins" ]
+  }
+
+  # This is really more of a pseudo-target to work around the fact that
+  # a single static_library target cannot contain two object files of the
+  # same name (hash.o / hash.obj). Logically, this is part of the
+  # "nss_static" target. By separating it out, it creates a possible
+  # circular dependency between "nss_static" and "nssckbi" when
+  # "exclude_nss_root_certs" is not specified, as "nss_static" depends on
+  # the "builtinsC_GetFunctionList" exported by this target. This is an
+  # artifact of how NSS is being statically built, which is not an
+  # officially supported configuration - normally, "nssckbi.dll/so" would
+  # depend on libnss3.dll/so, and the higher layer caller would instruct
+  # libnss3.dll to dynamically load nssckbi.dll, breaking the circle.
+  #
+  # TODO(rsleevi): http://crbug.com/128134 - Break the circular dependency
+  # without requiring nssckbi to be built as a shared library.
+  source_set("nssckbi") {
+    visibility = [ ":nss" ]  # This target is internal implementation detail.
+
+    sources = [
+      "nss/lib/ckfw/builtins/anchor.c",
+      "nss/lib/ckfw/builtins/bfind.c",
+      "nss/lib/ckfw/builtins/binst.c",
+      "nss/lib/ckfw/builtins/bobject.c",
+      "nss/lib/ckfw/builtins/bsession.c",
+      "nss/lib/ckfw/builtins/bslot.c",
+      "nss/lib/ckfw/builtins/btoken.c",
+      "nss/lib/ckfw/builtins/builtins.h",
+      "nss/lib/ckfw/builtins/certdata.c",
+      "nss/lib/ckfw/builtins/ckbiver.c",
+      "nss/lib/ckfw/builtins/constants.c",
+      "nss/lib/ckfw/builtins/nssckbi.h",
+      "nss/lib/ckfw/ck.h",
+      "nss/lib/ckfw/ckfw.h",
+      "nss/lib/ckfw/ckfwm.h",
+      "nss/lib/ckfw/ckfwtm.h",
+      "nss/lib/ckfw/ckmd.h",
+      "nss/lib/ckfw/ckt.h",
+      "nss/lib/ckfw/crypto.c",
+      "nss/lib/ckfw/find.c",
+      "nss/lib/ckfw/hash.c",
+      "nss/lib/ckfw/instance.c",
+      "nss/lib/ckfw/mechanism.c",
+      "nss/lib/ckfw/mutex.c",
+      "nss/lib/ckfw/nssck.api",
+      "nss/lib/ckfw/nssckepv.h",
+      "nss/lib/ckfw/nssckft.h",
+      "nss/lib/ckfw/nssckfw.h",
+      "nss/lib/ckfw/nssckfwc.h",
+      "nss/lib/ckfw/nssckfwt.h",
+      "nss/lib/ckfw/nssckg.h",
+      "nss/lib/ckfw/nssckmdt.h",
+      "nss/lib/ckfw/nssckt.h",
+      "nss/lib/ckfw/object.c",
+      "nss/lib/ckfw/session.c",
+      "nss/lib/ckfw/sessobj.c",
+      "nss/lib/ckfw/slot.c",
+      "nss/lib/ckfw/token.c",
+      "nss/lib/ckfw/wrap.c",
+    ]
+
+    configs -= [
+      "//build/config/compiler:chromium_code"
+    ]
+
+    if (is_win) {
+      configs -= [
+        "//build/config/win:unicode",  # Requires 8-bit mode.
+      ]
+    }
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+
+    include_dirs = [ "nss/lib/ckfw" ]
+    public_configs = [ ":nssckbi_config" ]
+
+    public_deps = [
+      ":nss_static",
+    ]
+  }
+
+  config("nss_static_config") {
+    defines = [
+      "NSS_STATIC",
+      "NSS_USE_STATIC_LIBS",
+      "USE_UTIL_DIRECTLY",
+    ]
+    if (is_win) {
+      defines += [ "_WINDOWS" ]
+    }
+    include_dirs = [
+      "nspr/pr/include",
+      "nspr/lib/ds",
+      "nspr/lib/libc/include",
+      "nss/lib/base",
+      "nss/lib/certdb",
+      "nss/lib/certhigh",
+      "nss/lib/cryptohi",
+      "nss/lib/dev",
+      "nss/lib/freebl",
+      "nss/lib/freebl/ecl",
+      "nss/lib/nss",
+      "nss/lib/pk11wrap",
+      "nss/lib/pkcs7",
+      "nss/lib/pki",
+      "nss/lib/smime",
+      "nss/lib/softoken",
+      "nss/lib/util",
+    ]
+  }
+
+  if (is_win && cpu_arch == "x86") {
+    source_set("nss_static_avx") {
+      sources = [
+        "nss/lib/freebl/intel-gcm-wrap.c",
+        "nss/lib/freebl/intel-gcm-x86-masm.asm",
+        "nss/lib/freebl/intel-gcm.h",
+      ]
+      defines = [
+        "_WINDOWS",
+        "_X86_",
+        "INTEL_GCM",
+        "MP_API_COMPATIBLE",
+        "MP_ASSEMBLY_DIV_2DX1D",
+        "MP_ASSEMBLY_MULTIPLY",
+        "MP_ASSEMBLY_SQUARE",
+        "MP_NO_MP_WORD",
+        "MP_USE_UINT_DIGIT",
+        "NSS_DISABLE_DBM",
+        "NSS_STATIC",
+        "NSS_USE_STATIC_LIBS",
+        "NSS_X86",
+        "NSS_X86_OR_X64",
+        "RIJNDAEL_INCLUDE_TABLES",
+        "SHLIB_PREFIX=\"\"",
+        "SHLIB_SUFFIX=\"dll\"",
+        "SHLIB_VERSION=\"3\"",
+        "SOFTOKEN_LIB_NAME=\"softokn3.dll\"",
+        "SOFTOKEN_SHLIB_VERSION=\"3\"",
+        "USE_HW_AES",
+        "USE_UTIL_DIRECTLY",
+        "WIN32",
+        "WIN95",
+        "XP_PC",
+      ]
+      include_dirs = [
+        "nspr/pr/include",
+        "nspr/lib/ds",
+        "nspr/lib/libc/include",
+        "nss/lib/freebl/ecl",
+        "nss/lib/util",
+      ]
+    }
+  }
+
+  source_set("nss_static") {
+    visibility = [ ":*" ]  # Internal implementation detail.
+
+    sources = [
+      "nss/lib/base/arena.c",
+      "nss/lib/base/base.h",
+      "nss/lib/base/baset.h",
+      "nss/lib/base/error.c",
+      "nss/lib/base/errorval.c",
+      "nss/lib/base/hash.c",
+      "nss/lib/base/hashops.c",
+      "nss/lib/base/item.c",
+      "nss/lib/base/libc.c",
+      "nss/lib/base/list.c",
+      "nss/lib/base/nssbase.h",
+      "nss/lib/base/nssbaset.h",
+      "nss/lib/base/nssutf8.c",
+      "nss/lib/base/tracker.c",
+      "nss/lib/certdb/alg1485.c",
+      "nss/lib/certdb/cert.h",
+      "nss/lib/certdb/certdb.c",
+      "nss/lib/certdb/certdb.h",
+      "nss/lib/certdb/certi.h",
+      "nss/lib/certdb/certt.h",
+      "nss/lib/certdb/certv3.c",
+      "nss/lib/certdb/certxutl.c",
+      "nss/lib/certdb/certxutl.h",
+      "nss/lib/certdb/crl.c",
+      "nss/lib/certdb/genname.c",
+      "nss/lib/certdb/genname.h",
+      "nss/lib/certdb/polcyxtn.c",
+      "nss/lib/certdb/secname.c",
+      "nss/lib/certdb/stanpcertdb.c",
+      "nss/lib/certdb/xauthkid.c",
+      "nss/lib/certdb/xbsconst.c",
+      "nss/lib/certdb/xconst.c",
+      "nss/lib/certdb/xconst.h",
+      "nss/lib/certhigh/certhigh.c",
+      "nss/lib/certhigh/certhtml.c",
+      "nss/lib/certhigh/certreq.c",
+      "nss/lib/certhigh/certvfy.c",
+      "nss/lib/certhigh/crlv2.c",
+      "nss/lib/certhigh/ocsp.c",
+      "nss/lib/certhigh/ocsp.h",
+      "nss/lib/certhigh/ocspi.h",
+      "nss/lib/certhigh/ocspsig.c",
+      "nss/lib/certhigh/ocspt.h",
+      "nss/lib/certhigh/ocspti.h",
+      "nss/lib/certhigh/xcrldist.c",
+      "nss/lib/cryptohi/cryptohi.h",
+      "nss/lib/cryptohi/cryptoht.h",
+      "nss/lib/cryptohi/dsautil.c",
+      "nss/lib/cryptohi/key.h",
+      "nss/lib/cryptohi/keyhi.h",
+      "nss/lib/cryptohi/keyi.h",
+      "nss/lib/cryptohi/keyt.h",
+      "nss/lib/cryptohi/keythi.h",
+      "nss/lib/cryptohi/sechash.c",
+      "nss/lib/cryptohi/sechash.h",
+      "nss/lib/cryptohi/seckey.c",
+      "nss/lib/cryptohi/secsign.c",
+      "nss/lib/cryptohi/secvfy.c",
+      "nss/lib/dev/ckhelper.c",
+      "nss/lib/dev/ckhelper.h",
+      "nss/lib/dev/dev.h",
+      "nss/lib/dev/devm.h",
+      "nss/lib/dev/devslot.c",
+      "nss/lib/dev/devt.h",
+      "nss/lib/dev/devtm.h",
+      "nss/lib/dev/devtoken.c",
+      "nss/lib/dev/devutil.c",
+      "nss/lib/dev/nssdev.h",
+      "nss/lib/dev/nssdevt.h",
+      "nss/lib/freebl/aeskeywrap.c",
+      "nss/lib/freebl/alg2268.c",
+      "nss/lib/freebl/alghmac.c",
+      "nss/lib/freebl/alghmac.h",
+      "nss/lib/freebl/arcfive.c",
+      "nss/lib/freebl/arcfour.c",
+      "nss/lib/freebl/blapi.h",
+      "nss/lib/freebl/blapii.h",
+      "nss/lib/freebl/blapit.h",
+      "nss/lib/freebl/camellia.c",
+      "nss/lib/freebl/camellia.h",
+      "nss/lib/freebl/chacha20/chacha20.c",
+      "nss/lib/freebl/chacha20/chacha20.h",
+      "nss/lib/freebl/chacha20/chacha20_vec.c",
+      "nss/lib/freebl/chacha20poly1305.c",
+      "nss/lib/freebl/chacha20poly1305.h",
+      "nss/lib/freebl/ctr.c",
+      "nss/lib/freebl/ctr.h",
+      "nss/lib/freebl/cts.c",
+      "nss/lib/freebl/cts.h",
+      "nss/lib/freebl/des.c",
+      "nss/lib/freebl/des.h",
+      "nss/lib/freebl/desblapi.c",
+      "nss/lib/freebl/dh.c",
+      "nss/lib/freebl/drbg.c",
+      "nss/lib/freebl/dsa.c",
+      "nss/lib/freebl/ec.c",
+      "nss/lib/freebl/ec.h",
+      "nss/lib/freebl/ecdecode.c",
+      "nss/lib/freebl/ecl/ec2.h",
+      "nss/lib/freebl/ecl/ecl-curve.h",
+      "nss/lib/freebl/ecl/ecl-exp.h",
+      "nss/lib/freebl/ecl/ecl-priv.h",
+      "nss/lib/freebl/ecl/ecl.c",
+      "nss/lib/freebl/ecl/ecl.h",
+      "nss/lib/freebl/ecl/ecl_curve.c",
+      "nss/lib/freebl/ecl/ecl_gf.c",
+      "nss/lib/freebl/ecl/ecl_mult.c",
+      "nss/lib/freebl/ecl/ecp.h",
+      "nss/lib/freebl/ecl/ecp_256.c",
+      "nss/lib/freebl/ecl/ecp_256_32.c",
+      "nss/lib/freebl/ecl/ecp_384.c",
+      "nss/lib/freebl/ecl/ecp_521.c",
+      "nss/lib/freebl/ecl/ecp_aff.c",
+      "nss/lib/freebl/ecl/ecp_jac.c",
+      "nss/lib/freebl/ecl/ecp_jm.c",
+      "nss/lib/freebl/ecl/ecp_mont.c",
+      "nss/lib/freebl/ecl/ec_naf.c",
+      "nss/lib/freebl/gcm.c",
+      "nss/lib/freebl/gcm.h",
+      "nss/lib/freebl/intel-aes-x86-masm.asm",
+      "nss/lib/freebl/intel-aes.h",
+      "nss/lib/freebl/hmacct.c",
+      "nss/lib/freebl/hmacct.h",
+      "nss/lib/freebl/jpake.c",
+      "nss/lib/freebl/md2.c",
+      "nss/lib/freebl/md5.c",
+      "nss/lib/freebl/mpi/logtab.h",
+      "nss/lib/freebl/mpi/mpcpucache.c",
+      "nss/lib/freebl/mpi/mpi-config.h",
+      "nss/lib/freebl/mpi/mpi-priv.h",
+      "nss/lib/freebl/mpi/mpi.c",
+      "nss/lib/freebl/mpi/mpi.h",
+      "nss/lib/freebl/mpi/mpi_amd64.c",
+      "nss/lib/freebl/mpi/mpi_arm.c",
+      "nss/lib/freebl/mpi/mpi_arm_mac.c",
+      "nss/lib/freebl/mpi/mpi_x86_asm.c",
+      "nss/lib/freebl/mpi/mplogic.c",
+      "nss/lib/freebl/mpi/mplogic.h",
+      "nss/lib/freebl/mpi/mpmontg.c",
+      "nss/lib/freebl/mpi/mpprime.c",
+      "nss/lib/freebl/mpi/mpprime.h",
+      "nss/lib/freebl/mpi/mp_gf2m-priv.h",
+      "nss/lib/freebl/mpi/mp_gf2m.c",
+      "nss/lib/freebl/mpi/mp_gf2m.h",
+      "nss/lib/freebl/mpi/primes.c",
+      "nss/lib/freebl/nss_build_config_mac.h",
+      "nss/lib/freebl/poly1305/poly1305-donna-x64-sse2-incremental-source.c",
+      "nss/lib/freebl/poly1305/poly1305.c",
+      "nss/lib/freebl/poly1305/poly1305.h",
+      "nss/lib/freebl/pqg.c",
+      "nss/lib/freebl/pqg.h",
+      "nss/lib/freebl/rawhash.c",
+      "nss/lib/freebl/rijndael.c",
+      "nss/lib/freebl/rijndael.h",
+      "nss/lib/freebl/rijndael32.tab",
+      "nss/lib/freebl/rsa.c",
+      "nss/lib/freebl/rsapkcs.c",
+      "nss/lib/freebl/secmpi.h",
+      "nss/lib/freebl/secrng.h",
+      "nss/lib/freebl/seed.c",
+      "nss/lib/freebl/seed.h",
+      "nss/lib/freebl/sha256.h",
+      "nss/lib/freebl/sha512.c",
+      "nss/lib/freebl/sha_fast.c",
+      "nss/lib/freebl/sha_fast.h",
+      "nss/lib/freebl/shsign.h",
+      "nss/lib/freebl/shvfy.c",
+      "nss/lib/freebl/sysrand.c",
+      "nss/lib/freebl/tlsprfalg.c",
+      "nss/lib/freebl/unix_rand.c",
+      "nss/lib/freebl/win_rand.c",
+      "nss/lib/nss/nss.h",
+      "nss/lib/nss/nssinit.c",
+      "nss/lib/nss/nssrenam.h",
+      "nss/lib/nss/utilwrap.c",
+      "nss/lib/pk11wrap/debug_module.c",
+      "nss/lib/pk11wrap/dev3hack.c",
+      "nss/lib/pk11wrap/dev3hack.h",
+      "nss/lib/pk11wrap/pk11akey.c",
+      "nss/lib/pk11wrap/pk11auth.c",
+      "nss/lib/pk11wrap/pk11cert.c",
+      "nss/lib/pk11wrap/pk11cxt.c",
+      "nss/lib/pk11wrap/pk11err.c",
+      "nss/lib/pk11wrap/pk11func.h",
+      "nss/lib/pk11wrap/pk11kea.c",
+      "nss/lib/pk11wrap/pk11list.c",
+      "nss/lib/pk11wrap/pk11load.c",
+      "nss/lib/pk11wrap/pk11mech.c",
+      "nss/lib/pk11wrap/pk11merge.c",
+      "nss/lib/pk11wrap/pk11nobj.c",
+      "nss/lib/pk11wrap/pk11obj.c",
+      "nss/lib/pk11wrap/pk11pars.c",
+      "nss/lib/pk11wrap/pk11pbe.c",
+      "nss/lib/pk11wrap/pk11pk12.c",
+      "nss/lib/pk11wrap/pk11pqg.c",
+      "nss/lib/pk11wrap/pk11pqg.h",
+      "nss/lib/pk11wrap/pk11priv.h",
+      "nss/lib/pk11wrap/pk11pub.h",
+      "nss/lib/pk11wrap/pk11sdr.c",
+      "nss/lib/pk11wrap/pk11sdr.h",
+      "nss/lib/pk11wrap/pk11skey.c",
+      "nss/lib/pk11wrap/pk11slot.c",
+      "nss/lib/pk11wrap/pk11util.c",
+      "nss/lib/pk11wrap/secmod.h",
+      "nss/lib/pk11wrap/secmodi.h",
+      "nss/lib/pk11wrap/secmodt.h",
+      "nss/lib/pk11wrap/secmodti.h",
+      "nss/lib/pk11wrap/secpkcs5.h",
+      "nss/lib/pkcs7/certread.c",
+      "nss/lib/pkcs7/p7common.c",
+      "nss/lib/pkcs7/p7create.c",
+      "nss/lib/pkcs7/p7decode.c",
+      "nss/lib/pkcs7/p7encode.c",
+      "nss/lib/pkcs7/p7local.c",
+      "nss/lib/pkcs7/p7local.h",
+      "nss/lib/pkcs7/pkcs7t.h",
+      "nss/lib/pkcs7/secmime.c",
+      "nss/lib/pkcs7/secmime.h",
+      "nss/lib/pkcs7/secpkcs7.h",
+      "nss/lib/pki/asymmkey.c",
+      "nss/lib/pki/certdecode.c",
+      "nss/lib/pki/certificate.c",
+      "nss/lib/pki/cryptocontext.c",
+      "nss/lib/pki/nsspki.h",
+      "nss/lib/pki/nsspkit.h",
+      "nss/lib/pki/pki.h",
+      "nss/lib/pki/pki3hack.c",
+      "nss/lib/pki/pki3hack.h",
+      "nss/lib/pki/pkibase.c",
+      "nss/lib/pki/pkim.h",
+      "nss/lib/pki/pkistore.c",
+      "nss/lib/pki/pkistore.h",
+      "nss/lib/pki/pkit.h",
+      "nss/lib/pki/pkitm.h",
+      "nss/lib/pki/symmkey.c",
+      "nss/lib/pki/tdcache.c",
+      "nss/lib/pki/trustdomain.c",
+      "nss/lib/smime/cms.h",
+      "nss/lib/smime/cmslocal.h",
+      "nss/lib/smime/cmsreclist.h",
+      "nss/lib/smime/cmst.h",
+      "nss/lib/smime/smime.h",
+      "nss/lib/softoken/fipsaudt.c",
+      "nss/lib/softoken/fipstest.c",
+      "nss/lib/softoken/fipstokn.c",
+      "nss/lib/softoken/jpakesftk.c",
+      "nss/lib/softoken/lgglue.c",
+      "nss/lib/softoken/lgglue.h",
+      "nss/lib/softoken/lowkey.c",
+      "nss/lib/softoken/lowkeyi.h",
+      "nss/lib/softoken/lowkeyti.h",
+      "nss/lib/softoken/lowpbe.c",
+      "nss/lib/softoken/lowpbe.h",
+      "nss/lib/softoken/padbuf.c",
+      "nss/lib/softoken/pkcs11.c",
+      "nss/lib/softoken/pkcs11c.c",
+      "nss/lib/softoken/pkcs11i.h",
+      "nss/lib/softoken/pkcs11ni.h",
+      "nss/lib/softoken/pkcs11u.c",
+      "nss/lib/softoken/sdb.c",
+      "nss/lib/softoken/sdb.h",
+      "nss/lib/softoken/sftkdb.c",
+      "nss/lib/softoken/sftkdb.h",
+      "nss/lib/softoken/sftkdbt.h",
+      "nss/lib/softoken/sftkdbti.h",
+      "nss/lib/softoken/sftkhmac.c",
+      "nss/lib/softoken/sftkpars.c",
+      "nss/lib/softoken/sftkpars.h",
+      "nss/lib/softoken/sftkpwd.c",
+      "nss/lib/softoken/softkver.c",
+      "nss/lib/softoken/softkver.h",
+      "nss/lib/softoken/softoken.h",
+      "nss/lib/softoken/softoknt.h",
+      "nss/lib/softoken/tlsprf.c",
+      "nss/lib/ssl/sslerr.h",
+      "nss/lib/util/SECerrs.h",
+      "nss/lib/util/base64.h",
+      "nss/lib/util/ciferfam.h",
+      "nss/lib/util/derdec.c",
+      "nss/lib/util/derenc.c",
+      "nss/lib/util/dersubr.c",
+      "nss/lib/util/dertime.c",
+      "nss/lib/util/errstrs.c",
+      "nss/lib/util/hasht.h",
+      "nss/lib/util/nssb64.h",
+      "nss/lib/util/nssb64d.c",
+      "nss/lib/util/nssb64e.c",
+      "nss/lib/util/nssb64t.h",
+      "nss/lib/util/nssilckt.h",
+      "nss/lib/util/nssilock.c",
+      "nss/lib/util/nssilock.h",
+      "nss/lib/util/nsslocks.h",
+      "nss/lib/util/nssrwlk.c",
+      "nss/lib/util/nssrwlk.h",
+      "nss/lib/util/nssrwlkt.h",
+      "nss/lib/util/nssutil.h",
+      "nss/lib/util/oidstring.c",
+      "nss/lib/util/pkcs11.h",
+      "nss/lib/util/pkcs11f.h",
+      "nss/lib/util/pkcs11n.h",
+      "nss/lib/util/pkcs11p.h",
+      "nss/lib/util/pkcs11t.h",
+      "nss/lib/util/pkcs11u.h",
+      "nss/lib/util/portreg.c",
+      "nss/lib/util/portreg.h",
+      "nss/lib/util/quickder.c",
+      "nss/lib/util/secalgid.c",
+      "nss/lib/util/secasn1.h",
+      "nss/lib/util/secasn1d.c",
+      "nss/lib/util/secasn1e.c",
+      "nss/lib/util/secasn1t.h",
+      "nss/lib/util/secasn1u.c",
+      "nss/lib/util/seccomon.h",
+      "nss/lib/util/secder.h",
+      "nss/lib/util/secdert.h",
+      "nss/lib/util/secdig.c",
+      "nss/lib/util/secdig.h",
+      "nss/lib/util/secdigt.h",
+      "nss/lib/util/secerr.h",
+      "nss/lib/util/secitem.c",
+      "nss/lib/util/secitem.h",
+      "nss/lib/util/secoid.c",
+      "nss/lib/util/secoid.h",
+      "nss/lib/util/secoidt.h",
+      "nss/lib/util/secport.c",
+      "nss/lib/util/secport.h",
+      "nss/lib/util/sectime.c",
+      "nss/lib/util/templates.c",
+      "nss/lib/util/utf8.c",
+      "nss/lib/util/utilmod.c",
+      "nss/lib/util/utilmodt.h",
+      "nss/lib/util/utilpars.c",
+      "nss/lib/util/utilpars.h",
+      "nss/lib/util/utilparst.h",
+      "nss/lib/util/utilrename.h",
+    ]
+
+    sources -= [
+      # mpi_arm.c is included by mpi_arm_mac.c.
+      # NOTE: mpi_arm.c can be used directly on Linux. mpi_arm.c will need
+      # to be excluded conditionally if we start to build NSS on Linux.
+      "nss/lib/freebl/mpi/mpi_arm.c",
+      # primes.c is included by mpprime.c.
+      "nss/lib/freebl/mpi/primes.c",
+      # unix_rand.c and win_rand.c are included by sysrand.c.
+      "nss/lib/freebl/unix_rand.c",
+      "nss/lib/freebl/win_rand.c",
+      # debug_module.c is included by pk11load.c.
+      "nss/lib/pk11wrap/debug_module.c",
+    ]
+
+    configs -= [
+      "//build/config/compiler:chromium_code"
+    ]
+    if (is_win) {
+      configs -= [
+        "//build/config/win:unicode",  # Requires 8-bit mode.
+      ]
+    }
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    public_configs = [ ":nss_static_config" ]
+
+    cflags = []
+
+    # Only need the defines and includes not in nss_static_config.
+    defines = [
+      "MP_API_COMPATIBLE",
+      "NSS_DISABLE_DBM",
+      "RIJNDAEL_INCLUDE_TABLES",
+      "SHLIB_VERSION=\"3\"",
+      "SOFTOKEN_SHLIB_VERSION=\"3\"",
+    ]
+    include_dirs = [
+      "nss/lib/freebl/mpi",
+      "nss/lib/ssl",
+    ]
+
+    if (is_win) {
+      cflags += [
+        "/wd4101",  # Unreferenced local variable.
+        "/wd4267",  # Conversion from size_t to "type".
+      ]
+    }
+
+    if (include_nss_libpkix) {
+      sources += [
+        "nss/lib/certhigh/certvfypkix.c",
+        "nss/lib/certhigh/certvfypkixprint.c",
+        "nss/lib/libpkix/include/pkix.h",
+        "nss/lib/libpkix/include/pkix_certsel.h",
+        "nss/lib/libpkix/include/pkix_certstore.h",
+        "nss/lib/libpkix/include/pkix_checker.h",
+        "nss/lib/libpkix/include/pkix_crlsel.h",
+        "nss/lib/libpkix/include/pkix_errorstrings.h",
+        "nss/lib/libpkix/include/pkix_params.h",
+        "nss/lib/libpkix/include/pkix_pl_pki.h",
+        "nss/lib/libpkix/include/pkix_pl_system.h",
+        "nss/lib/libpkix/include/pkix_results.h",
+        "nss/lib/libpkix/include/pkix_revchecker.h",
+        "nss/lib/libpkix/include/pkix_sample_modules.h",
+        "nss/lib/libpkix/include/pkix_util.h",
+        "nss/lib/libpkix/include/pkixt.h",
+        "nss/lib/libpkix/pkix/certsel/pkix_certselector.c",
+        "nss/lib/libpkix/pkix/certsel/pkix_certselector.h",
+        "nss/lib/libpkix/pkix/certsel/pkix_comcertselparams.c",
+        "nss/lib/libpkix/pkix/certsel/pkix_comcertselparams.h",
+        "nss/lib/libpkix/pkix/checker/pkix_basicconstraintschecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_basicconstraintschecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_certchainchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_certchainchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_crlchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_crlchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_ekuchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_ekuchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_expirationchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_expirationchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_namechainingchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_namechainingchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_nameconstraintschecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_nameconstraintschecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_ocspchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_ocspchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_policychecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_policychecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationmethod.c",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationmethod.h",
+        "nss/lib/libpkix/pkix/checker/pkix_signaturechecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_signaturechecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_targetcertchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_targetcertchecker.h",
+        "nss/lib/libpkix/pkix/crlsel/pkix_comcrlselparams.c",
+        "nss/lib/libpkix/pkix/crlsel/pkix_comcrlselparams.h",
+        "nss/lib/libpkix/pkix/crlsel/pkix_crlselector.c",
+        "nss/lib/libpkix/pkix/crlsel/pkix_crlselector.h",
+        "nss/lib/libpkix/pkix/params/pkix_procparams.c",
+        "nss/lib/libpkix/pkix/params/pkix_procparams.h",
+        "nss/lib/libpkix/pkix/params/pkix_resourcelimits.c",
+        "nss/lib/libpkix/pkix/params/pkix_resourcelimits.h",
+        "nss/lib/libpkix/pkix/params/pkix_trustanchor.c",
+        "nss/lib/libpkix/pkix/params/pkix_trustanchor.h",
+        "nss/lib/libpkix/pkix/params/pkix_valparams.c",
+        "nss/lib/libpkix/pkix/params/pkix_valparams.h",
+        "nss/lib/libpkix/pkix/results/pkix_buildresult.c",
+        "nss/lib/libpkix/pkix/results/pkix_buildresult.h",
+        "nss/lib/libpkix/pkix/results/pkix_policynode.c",
+        "nss/lib/libpkix/pkix/results/pkix_policynode.h",
+        "nss/lib/libpkix/pkix/results/pkix_valresult.c",
+        "nss/lib/libpkix/pkix/results/pkix_valresult.h",
+        "nss/lib/libpkix/pkix/results/pkix_verifynode.c",
+        "nss/lib/libpkix/pkix/results/pkix_verifynode.h",
+        "nss/lib/libpkix/pkix/store/pkix_store.c",
+        "nss/lib/libpkix/pkix/store/pkix_store.h",
+        "nss/lib/libpkix/pkix/top/pkix_build.c",
+        "nss/lib/libpkix/pkix/top/pkix_build.h",
+        "nss/lib/libpkix/pkix/top/pkix_lifecycle.c",
+        "nss/lib/libpkix/pkix/top/pkix_lifecycle.h",
+        "nss/lib/libpkix/pkix/top/pkix_validate.c",
+        "nss/lib/libpkix/pkix/top/pkix_validate.h",
+        "nss/lib/libpkix/pkix/util/pkix_error.c",
+        "nss/lib/libpkix/pkix/util/pkix_error.h",
+        "nss/lib/libpkix/pkix/util/pkix_errpaths.c",
+        "nss/lib/libpkix/pkix/util/pkix_list.c",
+        "nss/lib/libpkix/pkix/util/pkix_list.h",
+        "nss/lib/libpkix/pkix/util/pkix_logger.c",
+        "nss/lib/libpkix/pkix/util/pkix_logger.h",
+        "nss/lib/libpkix/pkix/util/pkix_tools.c",
+        "nss/lib/libpkix/pkix/util/pkix_tools.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_aiamgr.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_aiamgr.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_colcertstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_colcertstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpcertstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpcertstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpdefaultclient.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpdefaultclient.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_nsscontext.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_nsscontext.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_pk11certstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_pk11certstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_socket.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_socket.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_basicconstraints.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_basicconstraints.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_cert.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_cert.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyinfo.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyinfo.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicymap.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicymap.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyqualifier.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyqualifier.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crl.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crl.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crldp.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crldp.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crlentry.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crlentry.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_date.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_date.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_generalname.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_generalname.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_infoaccess.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_infoaccess.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_nameconstraints.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_nameconstraints.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspcertid.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspcertid.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocsprequest.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocsprequest.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspresponse.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspresponse.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_publickey.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_publickey.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_x500name.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_x500name.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bigint.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bigint.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bytearray.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bytearray.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_common.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_common.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_error.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_hashtable.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_hashtable.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_lifecycle.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_lifecycle.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mem.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mem.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_monitorlock.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_monitorlock.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mutex.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mutex.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_object.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_object.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_oid.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_oid.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_primhash.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_primhash.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_rwlock.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_rwlock.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_string.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_string.h",
+      ]
+
+      # Disable the LDAP code in libpkix.
+      defines += [ "NSS_PKIX_NO_LDAP" ]
+
+      include_dirs += [
+        "nss/lib/libpkix/include",
+        "nss/lib/libpkix/pkix/certsel",
+        "nss/lib/libpkix/pkix/checker",
+        "nss/lib/libpkix/pkix/crlsel",
+        "nss/lib/libpkix/pkix/params",
+        "nss/lib/libpkix/pkix/results",
+        "nss/lib/libpkix/pkix/store",
+        "nss/lib/libpkix/pkix/top",
+        "nss/lib/libpkix/pkix/util",
+        "nss/lib/libpkix/pkix_pl_nss/module",
+        "nss/lib/libpkix/pkix_pl_nss/pki",
+        "nss/lib/libpkix/pkix_pl_nss/system",
+      ]
+    } else {
+      defines += [ "NSS_DISABLE_LIBPKIX" ]
+    }
+
+    if (!include_nss_root_certs) {
+      defines += [ "NSS_DISABLE_ROOT_CERTS" ]
+    }
+
+    if (cpu_arch == "x64" && !is_win) {
+      sources -= [
+        "nss/lib/freebl/chacha20/chacha20.c",
+        "nss/lib/freebl/poly1305/poly1305.c",
+      ]
+    } else {
+      sources -= [
+        "nss/lib/freebl/chacha20/chacha20_vec.c",
+        "nss/lib/freebl/poly1305/poly1305-donna-x64-sse2-incremental-source.c",
+      ]
+    }
+
+    if (is_mac || is_ios) {
+      sources -= [
+        "nss/lib/freebl/mpi/mpi_amd64.c",
+      ]
+      cflags += [
+        "-include",
+        rebase_path("//third_party/nss/nss/lib/freebl/nss_build_config_mac.h",
+                    root_build_dir),
+      ]
+      defines += [
+        "XP_UNIX",
+        "DARWIN",
+        "HAVE_STRERROR",
+        "HAVE_BSD_FLOCK",
+        "SHLIB_SUFFIX=\"dylib\"",
+        "SHLIB_PREFIX=\"lib\"",
+        "SOFTOKEN_LIB_NAME=\"libsoftokn3.dylib\"",
+      ]
+
+      configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
+    } else {
+      # Not Mac/iOS.
+      sources -= [ "nss/lib/freebl/mpi/mpi_arm_mac.c" ]
+    }
+
+    if (is_win) {
+      defines += [
+        "SHLIB_SUFFIX=\"dll\"",
+        "SHLIB_PREFIX=\"\"",
+        "SOFTOKEN_LIB_NAME=\"softokn3.dll\"",
+        "XP_PC",
+        "WIN32",
+        "WIN95",
+      ]
+
+      if (cpu_arch == "x86") {
+        sources -= [ "nss/lib/freebl/mpi/mpi_amd64.c" ]
+        defines += [
+          "NSS_X86_OR_X64",
+          "NSS_X86",
+          "_X86_",
+          "MP_ASSEMBLY_MULTIPLY",
+          "MP_ASSEMBLY_SQUARE",
+          "MP_ASSEMBLY_DIV_2DX1D",
+          "MP_USE_UINT_DIGIT",
+          "MP_NO_MP_WORD",
+          "USE_HW_AES",
+          "INTEL_GCM",
+        ]
+      } else if (cpu_arch == "x64") {
+        sources -= [ "nss/lib/freebl/mpi/mpi_x86_asm.c" ]
+        defines += [
+          "NSS_USE_64",
+          "NSS_X86_OR_X64",
+          "NSS_X64",
+          "_AMD64_",
+          "MP_CHAR_STORE_SLOW",
+          "MP_IS_LITTLE_ENDIAN",
+          "WIN64",
+        ]
+      }
+    } else {
+      # Not Windows.
+      sources -= [
+        # mpi_x86_asm.c contains MSVC inline assembly code.
+        "nss/lib/freebl/mpi/mpi_x86_asm.c",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # nss doesn"t explicitly cast between different enum types.
+        "-Wno-conversion",
+        # nss passes "const char*" through "void*".
+        "-Wno-incompatible-pointer-types",
+        # nss prefers `a && b || c` over `(a && b) || c`.
+        "-Wno-logical-op-parentheses",
+        # nss doesn"t use exhaustive switches on enums
+        "-Wno-switch",
+        # nss has some `unsigned < 0` checks.
+        "-Wno-tautological-compare",
+      ]
+    }
+
+    public_deps = [
+      ":nspr",
+    ]
+    deps = [
+      ":nspr",
+      "//third_party/sqlite",
+    ]
+
+    if (is_win && cpu_arch == "x86") {
+      deps += [ ":nss_static_avx" ]
+    }
+  }
+}  # Windows/Mac/iOS.
+
diff --git a/build/secondary/third_party/openmax_dl/dl/BUILD.gn b/build/secondary/third_party/openmax_dl/dl/BUILD.gn
new file mode 100644
index 0000000..067be17
--- /dev/null
+++ b/build/secondary/third_party/openmax_dl/dl/BUILD.gn
@@ -0,0 +1,232 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Override this value to build with small float FFT tables
+  openmax_big_float_fft = true
+}
+
+config("dl_config") {
+  include_dirs = [ ".." ]
+}
+
+# GYP: third_party/openmax_dl/dl/dl.gyp:openmax_dl
+source_set("dl") {
+  public_configs = [ ":dl_config" ]
+  sources = [
+    "api/omxtypes.h",
+    "sp/api/omxSP.h",
+    "sp/src/armSP_FFT_F32TwiddleTable.c",
+  ]
+
+  cflags = []
+  deps = []
+  defines = []
+
+  if (openmax_big_float_fft) {
+    defines += [
+      "BIG_FFT_TABLE",
+    ]
+  }
+
+  if (cpu_arch == "arm" || cpu_arch == "arm64") {
+    sources += [
+      # Common files that are used by both arm and arm64 code.
+      "api/arm/armOMX.h",
+      "api/arm/omxtypes_s.h",
+      "sp/api/armSP.h",
+      "sp/src/arm/armSP_FFT_S32TwiddleTable.c",
+      "sp/src/arm/omxSP_FFTGetBufSize_C_FC32.c",
+      "sp/src/arm/omxSP_FFTGetBufSize_C_SC32.c",
+      "sp/src/arm/omxSP_FFTGetBufSize_R_F32.c",
+      "sp/src/arm/omxSP_FFTGetBufSize_R_S32.c",
+      "sp/src/arm/omxSP_FFTInit_C_FC32.c",
+      "sp/src/arm/omxSP_FFTInit_R_F32.c",
+    ]
+  }
+
+  if (cpu_arch == "arm") {
+    configs -= [ "//build/config/compiler:compiler_arm_fpu" ]
+    cflags += [
+      "-mfpu=neon"
+    ]
+
+    deps += [
+      ":openmax_dl_armv7"
+    ]
+
+    sources += [
+      # Common files that are used by both the NEON and non-NEON code.
+      "api/armCOMM_s.h",
+      "sp/src/arm/omxSP_FFTGetBufSize_C_SC16.c",
+      "sp/src/arm/omxSP_FFTGetBufSize_R_S16.c",
+      "sp/src/arm/omxSP_FFTGetBufSize_R_S16S32.c",
+      "sp/src/arm/omxSP_FFTInit_C_SC16.c",
+      "sp/src/arm/omxSP_FFTInit_C_SC32.c",
+      "sp/src/arm/omxSP_FFTInit_R_S16.c",
+      "sp/src/arm/omxSP_FFTInit_R_S16S32.c",
+      "sp/src/arm/omxSP_FFTInit_R_S32.c",
+
+      # Complex 32-bit fixed-point FFT.
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix2_fs_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix2_ls_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix2_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix4_fs_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix4_ls_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix4_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC32_Radix8_fs_unsafe_s.S",
+      "sp/src/arm/neon/omxSP_FFTFwd_CToC_SC32_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CToC_SC32_Sfs_s.S",
+      # Real 32-bit fixed-point FFT
+      "sp/src/arm/neon/armSP_FFTInv_CCSToR_S32_preTwiddleRadix2_unsafe_s.S",
+      "sp/src/arm/neon/omxSP_FFTFwd_RToCCS_S32_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CCSToR_S32_Sfs_s.S",
+      # Complex 16-bit fixed-point FFT
+      "sp/src/arm/neon/armSP_FFTInv_CCSToR_S16_preTwiddleRadix2_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix2_fs_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix2_ls_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix2_ps_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix2_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix4_fs_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix4_ls_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix4_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_SC16_Radix8_fs_unsafe_s.S",
+      "sp/src/arm/neon/omxSP_FFTFwd_CToC_SC16_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CToC_SC16_Sfs_s.S",
+      # Real 16-bit fixed-point FFT
+      "sp/src/arm/neon/omxSP_FFTFwd_RToCCS_S16_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CCSToR_S16_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTFwd_RToCCS_S16S32_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CCSToR_S32S16_Sfs_s.S",
+      # Complex floating-point FFT
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix2_fs_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix2_ls_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix2_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix4_fs_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix4_ls_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix4_unsafe_s.S",
+      "sp/src/arm/neon/armSP_FFT_CToC_FC32_Radix8_fs_unsafe_s.S",
+      "sp/src/arm/neon/omxSP_FFTFwd_CToC_FC32_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CToC_FC32_Sfs_s.S",
+      # Real floating-point FFT
+      "sp/src/arm/neon/armSP_FFTInv_CCSToR_F32_preTwiddleRadix2_unsafe_s.S",
+      "sp/src/arm/neon/omxSP_FFTFwd_RToCCS_F32_Sfs_s.S",
+      "sp/src/arm/neon/omxSP_FFTInv_CCSToR_F32_Sfs_s.S",
+    ]
+  }
+
+  if (cpu_arch == "ia32" || cpu_arch == "x64") {
+    cflags += [
+      "-msse2"
+    ]
+
+    sources += [
+      # Real 32-bit floating-point FFT.
+      "sp/api/x86SP.h",
+      "sp/src/x86/omxSP_FFTFwd_RToCCS_F32_Sfs.c",
+      "sp/src/x86/omxSP_FFTGetBufSize_R_F32.c",
+      "sp/src/x86/omxSP_FFTInit_R_F32.c",
+      "sp/src/x86/omxSP_FFTInv_CCSToR_F32_Sfs.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix2_fs.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix2_ls.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix2_ls_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix2_ms.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix4_fs.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix4_fs_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix4_ls.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix4_ls_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix4_ms.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Fwd_Radix4_ms_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix2_fs.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix2_ls.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix2_ls_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix2_ms.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix4_fs.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix4_fs_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix4_ls.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix4_ls_sse.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix4_ms.c",
+      "sp/src/x86/x86SP_FFT_CToC_FC32_Inv_Radix4_ms_sse.c",
+      "sp/src/x86/x86SP_FFT_F32_radix2_kernel.c",
+      "sp/src/x86/x86SP_FFT_F32_radix4_kernel.c",
+      "sp/src/x86/x86SP_SSE_Math.h",
+    ]
+  }
+  if (cpu_arch == "arm64") {
+    sources += [
+      "api/arm/arm64COMM_s.h",
+
+      # Complex floating-point FFT
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix2_fs_s.S",
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix2_ls_s.S",
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix2_s.S",
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix4_fs_s.S",
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix4_ls_s.S",
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix4_s.S",
+      "sp/src/arm/arm64/armSP_FFT_CToC_FC32_Radix8_fs_s.S",
+      "sp/src/arm/arm64/omxSP_FFTInv_CToC_FC32.c",
+      "sp/src/arm/arm64/omxSP_FFTFwd_CToC_FC32.c",
+      # Real floating-point FFT
+      "sp/src/arm/arm64/armSP_FFTInv_CCSToR_F32_preTwiddleRadix2_s.S",
+      "sp/src/arm/arm64/omxSP_FFTFwd_RToCCS_F32.c",
+      "sp/src/arm/arm64/ComplexToRealFixup.S",
+      "sp/src/arm/arm64/omxSP_FFTInv_CCSToR_F32.c",
+    ]
+  }
+  if (cpu_arch == "mipsel") {
+    cflags += [
+      "-std=c99",
+    ]
+    sources -= [
+      "sp/src/armSP_FFT_F32TwiddleTable.c",
+    ]
+
+    sources += [
+      "sp/api/mipsSP.h",
+      "sp/src/mips/mips_FFTFwd_RToCCS_F32_complex.c",
+      "sp/src/mips/mips_FFTFwd_RToCCS_F32_real.c",
+      "sp/src/mips/mips_FFTInv_CCSToR_F32_complex.c",
+      "sp/src/mips/mips_FFTInv_CCSToR_F32_real.c",
+      "sp/src/mips/omxSP_FFT_F32TwiddleTable.c",
+      "sp/src/mips/omxSP_FFTFwd_RToCCS_F32_Sfs.c",
+      "sp/src/mips/omxSP_FFTGetBufSize_R_F32.c",
+      "sp/src/mips/omxSP_FFTInit_R_F32.c",
+      "sp/src/mips/omxSP_FFTInv_CCSToR_F32_Sfs.c",
+    ]
+  }
+}
+
+if (cpu_arch == "arm") {
+  # GYP: third_party/openmax_dl/dl/dl.gyp:openmax_dl
+  # Non-NEON implementation of FFT. This library is NOT
+  # standalone. Applications must link with openmax_dl.
+  source_set("openmax_dl_armv7") {
+    configs += [ ":dl_config" ]
+    deps = [ "//third_party/android_tools:cpu_features" ]
+    visibility = [ ":*" ]
+
+    #TODO(GYP):
+    #'cflags!': [
+    #'-mfpu=neon',
+    #],
+
+    libs = [ "log" ]
+
+    sources = [
+      # Detection routine
+      "sp/src/arm/detect.c",
+      # Complex floating-point FFT
+      "sp/src/arm/armv7/armSP_FFT_CToC_FC32_Radix2_fs_unsafe_s.S",
+      "sp/src/arm/armv7/armSP_FFT_CToC_FC32_Radix4_fs_unsafe_s.S",
+      "sp/src/arm/armv7/armSP_FFT_CToC_FC32_Radix4_unsafe_s.S",
+      "sp/src/arm/armv7/armSP_FFT_CToC_FC32_Radix8_fs_unsafe_s.S",
+      "sp/src/arm/armv7/omxSP_FFTInv_CToC_FC32_Sfs_s.S",
+      "sp/src/arm/armv7/omxSP_FFTFwd_CToC_FC32_Sfs_s.S",
+      # Real floating-point FFT
+      "sp/src/arm/armv7/armSP_FFTInv_CCSToR_F32_preTwiddleRadix2_unsafe_s.S",
+      "sp/src/arm/armv7/omxSP_FFTFwd_RToCCS_F32_Sfs_s.S",
+      "sp/src/arm/armv7/omxSP_FFTInv_CCSToR_F32_Sfs_s.S",
+    ]
+  }
+}
diff --git a/build/secondary/third_party/sfntly/BUILD.gn b/build/secondary/third_party/sfntly/BUILD.gn
new file mode 100644
index 0000000..74107af
--- /dev/null
+++ b/build/secondary/third_party/sfntly/BUILD.gn
@@ -0,0 +1,129 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+static_library("sfntly") {
+  sources = [
+    "cpp/src/sfntly/data/byte_array.cc",
+    "cpp/src/sfntly/data/byte_array.h",
+    "cpp/src/sfntly/data/font_data.cc",
+    "cpp/src/sfntly/data/font_data.h",
+    "cpp/src/sfntly/data/font_input_stream.cc",
+    "cpp/src/sfntly/data/font_input_stream.h",
+    "cpp/src/sfntly/data/font_output_stream.cc",
+    "cpp/src/sfntly/data/font_output_stream.h",
+    "cpp/src/sfntly/data/growable_memory_byte_array.cc",
+    "cpp/src/sfntly/data/growable_memory_byte_array.h",
+    "cpp/src/sfntly/data/memory_byte_array.cc",
+    "cpp/src/sfntly/data/memory_byte_array.h",
+    "cpp/src/sfntly/data/readable_font_data.cc",
+    "cpp/src/sfntly/data/readable_font_data.h",
+    "cpp/src/sfntly/data/writable_font_data.cc",
+    "cpp/src/sfntly/data/writable_font_data.h",
+    "cpp/src/sfntly/font.cc",
+    "cpp/src/sfntly/font.h",
+    "cpp/src/sfntly/font_factory.cc",
+    "cpp/src/sfntly/font_factory.h",
+    "cpp/src/sfntly/math/fixed1616.h",
+    "cpp/src/sfntly/math/font_math.h",
+    "cpp/src/sfntly/port/atomic.h",
+    "cpp/src/sfntly/port/config.h",
+    "cpp/src/sfntly/port/endian.h",
+    "cpp/src/sfntly/port/exception_type.h",
+    "cpp/src/sfntly/port/file_input_stream.cc",
+    "cpp/src/sfntly/port/file_input_stream.h",
+    "cpp/src/sfntly/port/input_stream.h",
+    "cpp/src/sfntly/port/lock.cc",
+    "cpp/src/sfntly/port/lock.h",
+    "cpp/src/sfntly/port/memory_input_stream.cc",
+    "cpp/src/sfntly/port/memory_input_stream.h",
+    "cpp/src/sfntly/port/memory_output_stream.cc",
+    "cpp/src/sfntly/port/memory_output_stream.h",
+    "cpp/src/sfntly/port/output_stream.h",
+    "cpp/src/sfntly/port/refcount.h",
+    "cpp/src/sfntly/port/type.h",
+    "cpp/src/sfntly/table/bitmap/big_glyph_metrics.cc",
+    "cpp/src/sfntly/table/bitmap/big_glyph_metrics.h",
+    "cpp/src/sfntly/table/bitmap/bitmap_glyph.cc",
+    "cpp/src/sfntly/table/bitmap/bitmap_glyph.h",
+    "cpp/src/sfntly/table/bitmap/bitmap_glyph_info.cc",
+    "cpp/src/sfntly/table/bitmap/bitmap_glyph_info.h",
+    "cpp/src/sfntly/table/bitmap/bitmap_size_table.cc",
+    "cpp/src/sfntly/table/bitmap/bitmap_size_table.h",
+    "cpp/src/sfntly/table/bitmap/composite_bitmap_glyph.cc",
+    "cpp/src/sfntly/table/bitmap/composite_bitmap_glyph.h",
+    "cpp/src/sfntly/table/bitmap/ebdt_table.cc",
+    "cpp/src/sfntly/table/bitmap/ebdt_table.h",
+    "cpp/src/sfntly/table/bitmap/eblc_table.cc",
+    "cpp/src/sfntly/table/bitmap/eblc_table.h",
+    "cpp/src/sfntly/table/bitmap/ebsc_table.cc",
+    "cpp/src/sfntly/table/bitmap/ebsc_table.h",
+    "cpp/src/sfntly/table/bitmap/glyph_metrics.cc",
+    "cpp/src/sfntly/table/bitmap/glyph_metrics.h",
+    "cpp/src/sfntly/table/bitmap/index_sub_table.cc",
+    "cpp/src/sfntly/table/bitmap/index_sub_table.h",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format1.cc",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format1.h",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format2.cc",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format2.h",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format3.cc",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format3.h",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format4.cc",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format4.h",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format5.cc",
+    "cpp/src/sfntly/table/bitmap/index_sub_table_format5.h",
+    "cpp/src/sfntly/table/bitmap/simple_bitmap_glyph.cc",
+    "cpp/src/sfntly/table/bitmap/simple_bitmap_glyph.h",
+    "cpp/src/sfntly/table/bitmap/small_glyph_metrics.cc",
+    "cpp/src/sfntly/table/bitmap/small_glyph_metrics.h",
+    "cpp/src/sfntly/table/byte_array_table_builder.cc",
+    "cpp/src/sfntly/table/byte_array_table_builder.h",
+    "cpp/src/sfntly/table/core/cmap_table.cc",
+    "cpp/src/sfntly/table/core/cmap_table.h",
+    "cpp/src/sfntly/table/core/font_header_table.cc",
+    "cpp/src/sfntly/table/core/font_header_table.h",
+    "cpp/src/sfntly/table/core/horizontal_device_metrics_table.cc",
+    "cpp/src/sfntly/table/core/horizontal_device_metrics_table.h",
+    "cpp/src/sfntly/table/core/horizontal_header_table.cc",
+    "cpp/src/sfntly/table/core/horizontal_header_table.h",
+    "cpp/src/sfntly/table/core/horizontal_metrics_table.cc",
+    "cpp/src/sfntly/table/core/horizontal_metrics_table.h",
+    "cpp/src/sfntly/table/core/maximum_profile_table.cc",
+    "cpp/src/sfntly/table/core/maximum_profile_table.h",
+    "cpp/src/sfntly/table/core/name_table.cc",
+    "cpp/src/sfntly/table/core/name_table.h",
+    "cpp/src/sfntly/table/core/os2_table.cc",
+    "cpp/src/sfntly/table/core/os2_table.h",
+    "cpp/src/sfntly/table/font_data_table.cc",
+    "cpp/src/sfntly/table/font_data_table.h",
+    "cpp/src/sfntly/table/generic_table_builder.cc",
+    "cpp/src/sfntly/table/generic_table_builder.h",
+    "cpp/src/sfntly/table/header.cc",
+    "cpp/src/sfntly/table/header.h",
+    "cpp/src/sfntly/table/subtable.cc",
+    "cpp/src/sfntly/table/subtable.h",
+    "cpp/src/sfntly/table/subtable_container_table.h",
+    "cpp/src/sfntly/table/table.cc",
+    "cpp/src/sfntly/table/table.h",
+    "cpp/src/sfntly/table/table_based_table_builder.cc",
+    "cpp/src/sfntly/table/table_based_table_builder.h",
+    "cpp/src/sfntly/table/truetype/glyph_table.cc",
+    "cpp/src/sfntly/table/truetype/glyph_table.h",
+    "cpp/src/sfntly/table/truetype/loca_table.cc",
+    "cpp/src/sfntly/table/truetype/loca_table.h",
+    "cpp/src/sfntly/tag.cc",
+    "cpp/src/sfntly/tag.h",
+    "cpp/src/sample/chromium/font_subsetter.cc",
+    "cpp/src/sample/chromium/font_subsetter.h",
+    "cpp/src/sample/chromium/subsetter_impl.cc",
+    "cpp/src/sample/chromium/subsetter_impl.h",
+  ]
+
+  defines = [ "SFNTLY_NO_EXCEPTION" ]
+  include_dirs = [ "cpp/src" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+
+  deps = [ "//third_party/icu:icuuc" ]
+}
diff --git a/build/secondary/third_party/trace-viewer/BUILD.gn b/build/secondary/third_party/trace-viewer/BUILD.gn
new file mode 100644
index 0000000..2fbc73a
--- /dev/null
+++ b/build/secondary/third_party/trace-viewer/BUILD.gn
@@ -0,0 +1,320 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+tracing_html_files = [
+  "trace_viewer/about_tracing/profiling_view.html",
+  "trace_viewer/tracing/record_selection_dialog.html",
+  "trace_viewer/tracing/sampling_summary_side_panel.html",
+  "trace_viewer/tracing/time_summary_side_panel.html",
+  "trace_viewer/tracing/input_latency_side_panel.html",
+  "trace_viewer/tracing/timeline_view.html",
+  "trace_viewer/tracing/analysis/cpu_slice_view.html",
+  "trace_viewer/tracing/analysis/thread_time_slice_view.html",
+  "trace_viewer/tracing/find_control.html",
+  "third_party/tvcm/src/tvcm/unittest/html_test_results.html",
+  "third_party/tvcm/src/tvcm/unittest/interactive_test_runner.html",
+  "third_party/tvcm/src/tvcm/unittest/module_test_case_runner.html",
+  "third_party/tvcm/src/tvcm/ui/chart_base.html",
+  "third_party/tvcm/src/tvcm/ui/mouse_mode_selector.html",
+  "third_party/tvcm/src/tvcm/ui/overlay.html",
+  "third_party/tvcm/src/tvcm/ui/quad_stack_view.html",
+  "trace_viewer/cc/picture_debugger.html",
+]
+tracing_css_files = [
+  "trace_viewer/about_tracing/common.css",
+  "trace_viewer/cc/layer_picker.css",
+  "trace_viewer/cc/layer_tree_host_impl_view.css",
+  "trace_viewer/cc/layer_tree_quad_stack_view.css",
+  "trace_viewer/cc/layer_view.css",
+  "trace_viewer/cc/picture_debugger.css",
+  "trace_viewer/cc/picture_ops_chart_summary_view.css",
+  "trace_viewer/cc/picture_ops_chart_view.css",
+  "trace_viewer/cc/picture_ops_list_view.css",
+  "trace_viewer/cc/picture_view.css",
+  "trace_viewer/cc/raster_task_slice_view.css",
+  "trace_viewer/gpu/state_view.css",
+  "trace_viewer/system_stats/system_stats_instance_track.css",
+  "trace_viewer/system_stats/system_stats_snapshot_view.css",
+  "trace_viewer/tcmalloc/heap_instance_track.css",
+  "trace_viewer/tcmalloc/tcmalloc_instance_view.css",
+  "trace_viewer/tcmalloc/tcmalloc_snapshot_view.css",
+  "trace_viewer/tracing/analysis/analysis_link.css",
+  "trace_viewer/tracing/analysis/analysis_results.css",
+  "trace_viewer/tracing/analysis/analysis_view.css",
+  "trace_viewer/tracing/analysis/analyze_slices.css",
+  "trace_viewer/tracing/analysis/default_object_view.css",
+  "trace_viewer/tracing/analysis/generic_object_view.css",
+  "trace_viewer/tracing/timeline_track_view.css",
+  "trace_viewer/tracing/timeline_view.css",
+  "trace_viewer/tracing/timeline_view_side_panel.css",
+  "trace_viewer/tracing/tracks/counter_track.css",
+  "trace_viewer/tracing/tracks/drawing_container.css",
+  "trace_viewer/tracing/tracks/heading_track.css",
+  "trace_viewer/tracing/tracks/object_instance_track.css",
+  "trace_viewer/tracing/tracks/process_track_base.css",
+  "trace_viewer/tracing/tracks/ruler_track.css",
+  "trace_viewer/tracing/tracks/slice_track.css",
+  "trace_viewer/tracing/tracks/spacing_track.css",
+  "trace_viewer/tracing/tracks/stacked_bars_track.css",
+  "trace_viewer/tracing/tracks/thread_track.css",
+  "trace_viewer/tracing/tracks/trace_model_track.css",
+  "trace_viewer/tracing/tracks/track.css",
+  "third_party/tvcm/src/tvcm/unittest/common.css",
+  "third_party/tvcm/src/tvcm/ui/common.css",
+  "third_party/tvcm/src/tvcm/ui/bar_chart.css",
+  "third_party/tvcm/src/tvcm/ui/drag_handle.css",
+  "third_party/tvcm/src/tvcm/ui/info_bar.css",
+  "third_party/tvcm/src/tvcm/ui/line_chart.css",
+  "third_party/tvcm/src/tvcm/ui/list_and_associated_view.css",
+  "third_party/tvcm/src/tvcm/ui/list_view.css",
+  "third_party/tvcm/src/tvcm/ui/mouse_mode_selector.css",
+  "third_party/tvcm/src/tvcm/ui/pie_chart.css",
+  "third_party/tvcm/src/tvcm/ui/quad_stack_view.css",
+  "third_party/tvcm/src/tvcm/ui/sortable_table.css",
+  "third_party/tvcm/src/tvcm/ui/sunburst_chart.css",
+  "third_party/tvcm/src/tvcm/ui/tool_button.css",
+]
+tracing_js_files = [
+  "trace_viewer/about_tracing/__init__.js",
+  "trace_viewer/about_tracing/features.js",
+  "trace_viewer/about_tracing/mock_request_handler.js",
+  "trace_viewer/about_tracing/profiling_view.js",
+  "trace_viewer/about_tracing/tracing_ui_client.js",
+  "trace_viewer/cc/__init__.js",
+  "trace_viewer/cc/constants.js",
+  "trace_viewer/cc/debug_colors.js",
+  "trace_viewer/cc/layer_impl.js",
+  "trace_viewer/cc/layer_picker.js",
+  "trace_viewer/cc/layer_tree_host_impl.js",
+  "trace_viewer/cc/layer_tree_host_impl_view.js",
+  "trace_viewer/cc/layer_tree_impl.js",
+  "trace_viewer/cc/layer_tree_quad_stack_view.js",
+  "trace_viewer/cc/layer_view.js",
+  "trace_viewer/cc/picture.js",
+  "trace_viewer/cc/picture_as_image_data.js",
+  "trace_viewer/cc/picture_debugger.js",
+  "trace_viewer/cc/picture_ops_chart_summary_view.js",
+  "trace_viewer/cc/picture_ops_chart_view.js",
+  "trace_viewer/cc/picture_ops_list_view.js",
+  "trace_viewer/cc/picture_view.js",
+  "trace_viewer/cc/raster_task_slice_view.js",
+  "trace_viewer/cc/region.js",
+  "trace_viewer/cc/render_pass.js",
+  "trace_viewer/cc/selection.js",
+  "trace_viewer/cc/tile.js",
+  "trace_viewer/cc/tile_coverage_rect.js",
+  "trace_viewer/cc/tile_view.js",
+  "trace_viewer/cc/util.js",
+  "trace_viewer/gpu/__init__.js",
+  "trace_viewer/gpu/state.js",
+  "trace_viewer/gpu/state_view.js",
+  "trace_viewer/system_stats/__init__.js",
+  "trace_viewer/system_stats/system_stats_instance_track.js",
+  "trace_viewer/system_stats/system_stats_snapshot.js",
+  "trace_viewer/system_stats/system_stats_snapshot_view.js",
+  "trace_viewer/tcmalloc/__init__.js",
+  "trace_viewer/tcmalloc/heap.js",
+  "trace_viewer/tcmalloc/heap_instance_track.js",
+  "trace_viewer/tcmalloc/tcmalloc_instance_view.js",
+  "trace_viewer/tcmalloc/tcmalloc_snapshot_view.js",
+  "trace_viewer/tracing/analysis/analysis_link.js",
+  "trace_viewer/tracing/analysis/analysis_results.js",
+  "trace_viewer/tracing/analysis/analysis_view.js",
+  "trace_viewer/tracing/analysis/analyze_counters.js",
+  "trace_viewer/tracing/analysis/analyze_selection.js",
+  "trace_viewer/tracing/analysis/analyze_slices.js",
+  "trace_viewer/tracing/analysis/cpu_slice_view.js",
+  "trace_viewer/tracing/analysis/default_object_view.js",
+  "trace_viewer/tracing/analysis/generic_object_view.js",
+  "trace_viewer/tracing/analysis/object_instance_view.js",
+  "trace_viewer/tracing/analysis/object_snapshot_view.js",
+  "trace_viewer/tracing/analysis/slice_view.js",
+  "trace_viewer/tracing/analysis/stub_analysis_results.js",
+  "trace_viewer/tracing/analysis/stub_analysis_table.js",
+  "trace_viewer/tracing/analysis/thread_time_slice_view.js",
+  "trace_viewer/tracing/analysis/util.js",
+  "trace_viewer/tracing/color_scheme.js",
+  "trace_viewer/tracing/constants.js",
+  "trace_viewer/tracing/draw_helpers.js",
+  "trace_viewer/tracing/elided_cache.js",
+  "trace_viewer/tracing/fast_rect_renderer.js",
+  "trace_viewer/tracing/filter.js",
+  "trace_viewer/tracing/find_control.js",
+  "trace_viewer/tracing/importer/__init__.js",
+  "trace_viewer/tracing/importer/gzip_importer.js",
+  "trace_viewer/tracing/importer/importer.js",
+  "trace_viewer/tracing/importer/simple_line_reader.js",
+  "trace_viewer/tracing/importer/linux_perf/android_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/bus_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/clock_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/cpufreq_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/disk_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/drm_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/exynos_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/gesture_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/i915_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/kfunc_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/mali_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/parser.js",
+  "trace_viewer/tracing/importer/linux_perf/power_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/sched_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/sync_parser.js",
+  "trace_viewer/tracing/importer/linux_perf/workqueue_parser.js",
+  "trace_viewer/tracing/importer/linux_perf_importer.js",
+  "trace_viewer/tracing/importer/task.js",
+  "trace_viewer/tracing/importer/timeline_stream_importer.js",
+  "trace_viewer/tracing/importer/trace2html_importer.js",
+  "trace_viewer/tracing/importer/trace_event_importer.js",
+  "trace_viewer/tracing/importer/etw/parser.js",
+  "trace_viewer/tracing/importer/etw/eventtrace_parser.js",
+  "trace_viewer/tracing/importer/etw/process_parser.js",
+  "trace_viewer/tracing/importer/etw/thread_parser.js",
+  "trace_viewer/tracing/importer/etw_importer.js",
+  "trace_viewer/tracing/importer/v8/codemap.js",
+  "trace_viewer/tracing/importer/v8/log_reader.js",
+  "trace_viewer/tracing/importer/v8/splaytree.js",
+  "trace_viewer/tracing/importer/v8_log_importer.js",
+  "trace_viewer/tracing/importer/zip_importer.js",
+  "trace_viewer/tracing/record_selection_dialog.js",
+  "trace_viewer/tracing/sampling_summary_side_panel.js",
+  "trace_viewer/tracing/selection.js",
+  "trace_viewer/tracing/standalone_timeline_view.js",
+  "trace_viewer/tracing/test_utils.js",
+  "trace_viewer/tracing/time_summary_side_panel.js",
+  "trace_viewer/tracing/input_latency_side_panel.js",
+  "trace_viewer/tracing/timeline_display_transform.js",
+  "trace_viewer/tracing/timeline_display_transform_animations.js",
+  "trace_viewer/tracing/timeline_interest_range.js",
+  "trace_viewer/tracing/timeline_track_view.js",
+  "trace_viewer/tracing/timeline_view.js",
+  "trace_viewer/tracing/timeline_view_side_panel.js",
+  "trace_viewer/tracing/timeline_viewport.js",
+  "trace_viewer/tracing/timing_tool.js",
+  "trace_viewer/tracing/trace_model.js",
+  "trace_viewer/tracing/trace_model/async_slice.js",
+  "trace_viewer/tracing/trace_model/async_slice_group.js",
+  "trace_viewer/tracing/trace_model/counter.js",
+  "trace_viewer/tracing/trace_model/counter_sample.js",
+  "trace_viewer/tracing/trace_model/counter_series.js",
+  "trace_viewer/tracing/trace_model/cpu.js",
+  "trace_viewer/tracing/trace_model/event.js",
+  "trace_viewer/tracing/trace_model/flow_event.js",
+  "trace_viewer/tracing/trace_model/instant_event.js",
+  "trace_viewer/tracing/trace_model/kernel.js",
+  "trace_viewer/tracing/trace_model/object_collection.js",
+  "trace_viewer/tracing/trace_model/object_instance.js",
+  "trace_viewer/tracing/trace_model/object_snapshot.js",
+  "trace_viewer/tracing/trace_model/process.js",
+  "trace_viewer/tracing/trace_model/process_base.js",
+  "trace_viewer/tracing/trace_model/sample.js",
+  "trace_viewer/tracing/trace_model/stack_frame.js",
+  "trace_viewer/tracing/trace_model/slice.js",
+  "trace_viewer/tracing/trace_model/slice_group.js",
+  "trace_viewer/tracing/trace_model/thread.js",
+  "trace_viewer/tracing/trace_model/time_to_object_instance_map.js",
+  "trace_viewer/tracing/trace_model/timed_event.js",
+  "trace_viewer/tracing/trace_model_settings.js",
+  "trace_viewer/tracing/tracks/async_slice_group_track.js",
+  "trace_viewer/tracing/tracks/container_track.js",
+  "trace_viewer/tracing/tracks/counter_track.js",
+  "trace_viewer/tracing/tracks/cpu_track.js",
+  "trace_viewer/tracing/tracks/drawing_container.js",
+  "trace_viewer/tracing/tracks/heading_track.js",
+  "trace_viewer/tracing/tracks/kernel_track.js",
+  "trace_viewer/tracing/tracks/object_instance_track.js",
+  "trace_viewer/tracing/tracks/process_track.js",
+  "trace_viewer/tracing/tracks/process_track_base.js",
+  "trace_viewer/tracing/tracks/ruler_track.js",
+  "trace_viewer/tracing/tracks/slice_group_track.js",
+  "trace_viewer/tracing/tracks/slice_track.js",
+  "trace_viewer/tracing/tracks/spacing_track.js",
+  "trace_viewer/tracing/tracks/stacked_bars_track.js",
+  "trace_viewer/tracing/tracks/thread_track.js",
+  "trace_viewer/tracing/tracks/trace_model_track.js",
+  "trace_viewer/tracing/tracks/track.js",
+  "third_party/tvcm/src/tvcm/__init__.js",
+  "third_party/tvcm/src/tvcm/base64.js",
+  "third_party/tvcm/src/tvcm/bbox2.js",
+  "third_party/tvcm/src/tvcm/color.js",
+  "third_party/tvcm/src/tvcm/event_target.js",
+  "third_party/tvcm/src/tvcm/events.js",
+  "third_party/tvcm/src/tvcm/gl_matrix.js",
+  "third_party/tvcm/src/tvcm/guid.js",
+  "third_party/tvcm/src/tvcm/interval_tree.js",
+  "third_party/tvcm/src/tvcm/iteration_helpers.js",
+  "third_party/tvcm/src/tvcm/key_event_manager.js",
+  "third_party/tvcm/src/tvcm/measuring_stick.js",
+  "third_party/tvcm/src/tvcm/polymer.js",
+  "third_party/tvcm/src/tvcm/promise.js",
+  "third_party/tvcm/src/tvcm/properties.js",
+  "third_party/tvcm/src/tvcm/quad.js",
+  "third_party/tvcm/src/tvcm/raf.js",
+  "third_party/tvcm/src/tvcm/range.js",
+  "third_party/tvcm/src/tvcm/rect.js",
+  "third_party/tvcm/src/tvcm/settings.js",
+  "third_party/tvcm/src/tvcm/sorted_array_utils.js",
+  "third_party/tvcm/src/tvcm/statistics.js",
+  "third_party/tvcm/src/tvcm/unittest/__init__.js",
+  "third_party/tvcm/src/tvcm/unittest/assertions.js",
+  "third_party/tvcm/src/tvcm/unittest/constants.js",
+  "third_party/tvcm/src/tvcm/unittest/html_test_results.js",
+  "third_party/tvcm/src/tvcm/unittest/interactive_test_runner.js",
+  "third_party/tvcm/src/tvcm/unittest/suite_loader.js",
+  "third_party/tvcm/src/tvcm/unittest/test_case.js",
+  "third_party/tvcm/src/tvcm/unittest/test_error.js",
+  "third_party/tvcm/src/tvcm/unittest/test_runner.js",
+  "third_party/tvcm/src/tvcm/unittest/test_suite.js",
+  "third_party/tvcm/src/tvcm/unittest/text_test_results.js",
+  "third_party/tvcm/src/tvcm/utils.js",
+  "third_party/tvcm/src/tvcm/ui/__init__.js",
+  "third_party/tvcm/src/tvcm/ui/animation.js",
+  "third_party/tvcm/src/tvcm/ui/animation_controller.js",
+  "third_party/tvcm/src/tvcm/ui/bar_chart.js",
+  "third_party/tvcm/src/tvcm/ui/camera.js",
+  "third_party/tvcm/src/tvcm/ui/chart_base.js",
+  "third_party/tvcm/src/tvcm/ui/color_scheme.js",
+  "third_party/tvcm/src/tvcm/ui/container_that_decorates_its_children.js",
+  "third_party/tvcm/src/tvcm/ui/d3.js",
+  "third_party/tvcm/src/tvcm/ui/dom_helpers.js",
+  "third_party/tvcm/src/tvcm/ui/drag_handle.js",
+  "third_party/tvcm/src/tvcm/ui/info_bar.js",
+  "third_party/tvcm/src/tvcm/ui/line_chart.js",
+  "third_party/tvcm/src/tvcm/ui/list_and_associated_view.js",
+  "third_party/tvcm/src/tvcm/ui/list_view.js",
+  "third_party/tvcm/src/tvcm/ui/mouse_mode_selector.js",
+  "third_party/tvcm/src/tvcm/ui/mouse_tracker.js",
+  "third_party/tvcm/src/tvcm/ui/overlay.js",
+  "third_party/tvcm/src/tvcm/ui/pie_chart.js",
+  "third_party/tvcm/src/tvcm/ui/quad_stack_view.js",
+  "third_party/tvcm/src/tvcm/ui/sortable_table.js",
+  "third_party/tvcm/src/tvcm/ui/sunburst_chart.js",
+]
+tracing_img_files = [
+  "trace_viewer/images/checkerboard.png",
+  "trace_viewer/images/collapse.png",
+  "trace_viewer/images/expand.png",
+  "third_party/tvcm/src/tvcm/images/chrome-left.png",
+  "third_party/tvcm/src/tvcm/images/chrome-right.png",
+  "third_party/tvcm/src/tvcm/images/chrome-mid.png",
+  "third_party/tvcm/src/tvcm/images/ui-states.png",
+]
+
+# TODO: ideally this would go into the target_gen_dir, but this requires some
+# changes to the scripts that process them.
+output_resource_dir = "$root_gen_dir/content/browser/tracing"
+
+action("generate_about_tracing") {
+  script = "trace_viewer/build/generate_about_tracing_contents"
+
+  inputs = tracing_html_files + tracing_css_files + tracing_js_files +
+    tracing_img_files
+  outputs = [
+    "$output_resource_dir/about_tracing.js",
+    "$output_resource_dir/about_tracing.html",
+  ]
+
+  args = [
+    "--outdir", rebase_path(output_resource_dir, root_build_dir),
+  ]
+}
diff --git a/build/secondary/tools/grit/BUILD.gn b/build/secondary/tools/grit/BUILD.gn
new file mode 100644
index 0000000..f954537
--- /dev/null
+++ b/build/secondary/tools/grit/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This target creates a stamp file that depends on all the sources in the grit
+# directory. By depending on this, a target can force itself to be rebuilt if
+# grit itself changes.
+action("grit_sources") {
+  depfile = "$target_out_dir/grit_sources.d"
+  script = "//build/secondary/tools/grit/stamp_grit_sources.py"
+
+  inputs = [ "grit.py" ]
+
+  # Note that we can't call this "grit_sources.stamp" because that file is
+  # implicitly created by GN for script actions.
+  outputs = [ "$target_out_dir/grit_sources.script.stamp" ]
+
+  args = [
+    rebase_path("//tools/grit", root_build_dir),
+    rebase_path(outputs[0], root_build_dir),
+    rebase_path(depfile, root_build_dir)
+  ]
+}
diff --git a/build/secondary/tools/grit/grit_rule.gni b/build/secondary/tools/grit/grit_rule.gni
new file mode 100644
index 0000000..5103d73
--- /dev/null
+++ b/build/secondary/tools/grit/grit_rule.gni
@@ -0,0 +1,338 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Instantiate grit. This will produce a script target to run grit, and a
+# static library that compiles the .cc files.
+#
+# Parameters
+#
+#   source (required)
+#       Path to .grd file.
+#
+#   outputs (required)
+#       List of outputs from grit, relative to the target_gen_dir. If supplied,
+#       a call to Grit to compute the outputs can be skipped which will make
+#       GN run faster. Grit will verify at build time that this list is correct
+#       and will fail if there is a mismatch between the outputs specified by
+#       the .grd file and the outputs list here.
+#
+#       To get this list, you can look in the .grd file for
+#       <output filename="..." and put those filename here. The base directory
+#       of the list in Grit and the output list specified in the GN grit target
+#       are the same (the target_gen_dir) so you can generally copy the names
+#       exactly.
+#
+#       To get the list of outputs programatically, run:
+#           python tools/grit/grit_info.py --outputs . path/to/your.grd
+#       And strip the leading "./" from the output files.
+#
+#   defines (optional)
+#       Extra defines to pass to grit (on top of the global grit_defines list).
+#
+#   grit_flags (optional)
+#       List of strings containing extra command-line flags to pass to Grit.
+#
+#   resource_ids (optional)
+#       Path to a grit "firstidsfile". Default is
+#       //tools/gritsettings/resource_ids. Set to "" to use the value specified
+#       in the <grit> nodes of the processed files.
+#
+#   output_dir (optional)
+#       Directory for generated files. If you specify this, you will often
+#       want to specify output_name if the target name is not particularly
+#       unique, since this can cause files from multiple grit targets to
+#       overwrite each other.
+#
+#   output_name (optiona)
+#       Provide an alternate base name for the generated files, like the .d
+#       files. Normally these are based on the target name and go in the
+#       output_dir, but if multiple targets with the same name end up in
+#       the same output_dir, they can collide.
+#
+#   use_qualified_include (optional)
+#       If set, output_dir is not added to include_dirs.
+#
+#   deps  (optional)
+#   visibility  (optional)
+#       Normal meaning.
+#
+# Example
+#
+#   grit("my_resources") {
+#     # Source and outputs are required.
+#     source = "myfile.grd"
+#     outputs = [
+#       "foo_strings.h",
+#       "foo_strings.pak",
+#     ]
+#
+#     grit_flags = [ "-E", "foo=bar" ]  # Optional extra flags.
+#     # You can also put deps here if the grit source depends on generated
+#     # files.
+#   }
+import ("//build/config/crypto.gni")
+import ("//build/config/features.gni")
+import ("//build/config/ui.gni")
+
+grit_defines = []
+
+# Mac and iOS want Title Case strings.
+use_titlecase_in_grd_files = is_mac || is_ios
+if (use_titlecase_in_grd_files) {
+  grit_defines += [ "-D", "use_titlecase" ]
+}
+
+if (is_chrome_branded) {
+  grit_defines += [
+    "-D", "_google_chrome",
+    "-E", "CHROMIUM_BUILD=google_chrome",
+  ]
+} else {
+  grit_defines += [
+    "-D", "_chromium",
+    "-E", "CHROMIUM_BUILD=chromium",
+  ]
+}
+
+if (is_chromeos) {
+  grit_defines += [
+    "-D", "chromeos",
+    "-D", "scale_factors=2x"
+  ]
+}
+
+if (is_desktop_linux) {
+  grit_defines += [ "-D", "desktop_linux" ]
+}
+
+if (toolkit_views) {
+  grit_defines += [ "-D", "toolkit_views" ]
+}
+
+if (use_aura) {
+  grit_defines += [ "-D", "use_aura" ]
+}
+
+if (use_ash) {
+  grit_defines += [ "-D", "use_ash" ]
+}
+
+if (use_nss_certs) {
+  grit_defines += [ "-D", "use_nss" ]
+}
+
+if (use_ozone) {
+  grit_defines += [ "-D", "use_ozone" ]
+}
+
+if (enable_image_loader_extension) {
+  grit_defines += [ "-D", "image_loader_extension" ]
+}
+
+if (enable_remoting) {
+  grit_defines += [ "-D", "remoting" ]
+}
+
+if (is_android) {
+  grit_defines += [
+    "-t", "android",
+    "-E", "ANDROID_JAVA_TAGGED_ONLY=true",
+  ]
+}
+
+if (is_mac || is_ios) {
+  grit_defines += [ "-D", "scale_factors=2x" ]
+}
+
+if (is_ios) {
+  grit_defines += [
+    "-t", "ios",
+    # iOS uses a whitelist to filter resources.
+    "-w", rebase_path("//build/ios/grit_whitelist.txt", root_build_dir),
+  ]
+}
+
+if (enable_extensions) {
+  grit_defines += [ "-D", "enable_extensions" ]
+}
+if (enable_plugins) {
+  grit_defines += [ "-D", "enable_plugins" ]
+}
+if (printing_mode != 0) {
+  grit_defines += [ "-D", "enable_printing" ]
+  if (printing_mode == 1) {
+    grit_defines += [ "-D", "enable_full_printing" ]
+  }
+}
+if (enable_themes) {
+  grit_defines += [ "-D", "enable_themes" ]
+}
+if (enable_app_list) {
+  grit_defines += [ "-D", "enable_app_list" ]
+}
+if (enable_settings_app) {
+  grit_defines += [ "-D", "enable_settings_app" ]
+}
+if (enable_google_now) {
+  grit_defines += [ "-D", "enable_google_now" ]
+}
+# Note: use_concatenated_impulse_responses is omitted. It is never used and
+# should probably be removed from GYP build.
+if (enable_webrtc) {
+  grit_defines += [ "-D", "enable_webrtc" ]
+}
+# Note: enable_hangout_services_extension is omitted. It is never set in the
+# GYP build. Need to figure out what it's for.
+if (enable_task_manager) {
+  grit_defines += [ "-D", "enable_task_manager" ]
+}
+if (enable_notifications) {
+  grit_defines += [ "-D", "enable_notifications" ]
+}
+if (enable_wifi_bootstrapping) {
+  grit_defines += [ "-D", "enable_wifi_bootstrapping" ]
+}
+if (enable_service_discovery) {
+  grit_defines += [ "-D", "enable_service_discovery" ]
+}
+
+grit_resource_id_file = "//tools/gritsettings/resource_ids"
+grit_info_script = "//tools/grit/grit_info.py"
+
+template("grit") {
+  assert(defined(invoker.source),
+         "\"source\" must be defined for the grit template $target_name")
+
+  if (defined(invoker.resource_ids)) {
+    resource_ids = invoker.resource_ids
+  } else {
+    resource_ids = grit_resource_id_file
+  }
+
+  if (defined(invoker.output_dir)) {
+    output_dir = invoker.output_dir
+  } else {
+    output_dir = target_gen_dir
+  }
+
+  if (defined(invoker.output_name)) {
+    grit_output_name = invoker.output_name
+  } else {
+    grit_output_name = target_name
+  }
+
+  # These are all passed as arguments to the script so have to be relative to
+  # the build directory.
+  if (resource_ids != "") {
+    resource_ids = rebase_path(resource_ids, root_build_dir)
+  }
+  rebased_output_dir = rebase_path(output_dir, root_build_dir)
+  source_path = rebase_path(invoker.source, root_build_dir)
+
+  if (defined(invoker.grit_flags)) {
+    grit_flags = invoker.grit_flags
+  } else {
+    grit_flags = []  # These are optional so default to empty list.
+  }
+
+  grit_inputs = [ invoker.source ]
+
+  assert_files_flags = []
+
+  # We want to make sure the declared outputs actually match what Grit is
+  # writing. We write the list to a file (some of the output lists are long
+  # enough to not fit on a Windows command line) and ask Grit to verify those
+  # are the actual outputs at runtime.
+  asserted_list_file =
+      "$target_out_dir/${grit_output_name}_expected_outputs.txt"
+  write_file(asserted_list_file,
+             rebase_path(invoker.outputs, root_build_dir, output_dir))
+  assert_files_flags += [
+    "--assert-file-list=" + rebase_path(asserted_list_file, root_build_dir),
+  ]
+  grit_outputs = get_path_info(
+      rebase_path(invoker.outputs, ".", output_dir),
+      "abspath")
+
+  # The config and the action below get this visibility son only the generated
+  # source set can depend on them. The variable "target_name" will get
+  # overwritten inside the inner classes so we need to compute it here.
+  target_visibility = [ ":$target_name" ]
+
+  # The current grit setup makes an file in $output_dir/grit/foo.h that
+  # the source code expects to include via "grit/foo.h". It would be nice to
+  # change this to including absolute paths relative to the root gen directory
+  # (like "mycomponent/foo.h"). This config sets up the include path.
+  grit_config = target_name + "_grit_config"
+  config(grit_config) {
+    if (!defined(invoker.use_qualified_include) ||
+        !invoker.use_qualified_include) {
+      include_dirs = [ output_dir ]
+    }
+    visibility = target_visibility
+  }
+
+  grit_custom_target = target_name + "_grit"
+  action(grit_custom_target) {
+    script = "//tools/grit/grit.py"
+    inputs = grit_inputs
+    outputs = grit_outputs
+    depfile = "$output_dir/${grit_output_name}.d"
+
+    args = [
+      "-i", source_path, "build",
+    ]
+    if (resource_ids != "") {
+      args += [ "-f", resource_ids ]
+    }
+    args += [
+      "-o", rebased_output_dir,
+      "--depdir", ".",
+      "--depfile", rebase_path(depfile, root_build_dir),
+    ] + grit_defines
+
+    # Add extra defines with -D flags.
+    if (defined(invoker.defines)) {
+      foreach (i, invoker.defines) {
+        args += [ "-D", i ]
+      }
+    }
+
+    args += grit_flags + assert_files_flags
+
+    if (defined(invoker.visibility)) {
+      # This needs to include both what the invoker specified (since they
+      # probably include generated headers from this target), as well as the
+      # generated source set (since there's no guarantee that the visibility
+      # specified by the invoker includes our target).
+      #
+      # Only define visibility at all if the invoker specified it. Otherwise,
+      # we want to keep the public "no visibility specified" default.
+      visibility = target_visibility + invoker.visibility
+    }
+
+    deps = [ "//tools/grit:grit_sources" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  # This is the thing that people actually link with, it must be named the
+  # same as the argument the template was invoked with.
+  source_set(target_name) {
+    # Since we generate a file, we need to be run before the targets that
+    # depend on us.
+    sources = grit_outputs
+
+    # Deps set on the template invocation will go on the grit script running
+    # target rather than this library.
+    deps = [ ":$grit_custom_target" ]
+    public_configs = [ ":$grit_config" ]
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    output_name = grit_output_name
+  }
+}
diff --git a/build/secondary/tools/grit/repack.gni b/build/secondary/tools/grit/repack.gni
new file mode 100644
index 0000000..cba1732
--- /dev/null
+++ b/build/secondary/tools/grit/repack.gni
@@ -0,0 +1,45 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines a template to invoke grit repack in a consistent manner.
+#
+# Parameters:
+#   sources  [required]
+#       List of pak files that need to be combined.
+#
+#   output  [required]
+#       File name (single string) of the output file.
+#
+#   repack_options  [optional]
+#       List of extra arguments to pass.
+#
+#   deps  [optional]
+#   visibility  [optional]
+#       Normal meaning.
+template("repack") {
+  action(target_name) {
+    assert(defined(invoker.sources), "Need sources for $target_name")
+    assert(defined(invoker.output), "Need output for $target_name")
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//tools/grit/grit/format/repack.py"
+
+    inputs = invoker.sources
+    outputs = [ invoker.output ]
+
+    args = []
+    if (defined(invoker.repack_options)) {
+      args += invoker.repack_options
+    }
+    args += [ rebase_path(invoker.output, root_build_dir) ]
+    args += rebase_path(invoker.sources, root_build_dir)
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
diff --git a/build/secondary/tools/grit/stamp_grit_sources.py b/build/secondary/tools/grit/stamp_grit_sources.py
new file mode 100644
index 0000000..d43d4b8
--- /dev/null
+++ b/build/secondary/tools/grit/stamp_grit_sources.py
@@ -0,0 +1,55 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script enumerates the files in the given directory, writing an empty
+# stamp file and a .d file listing the inputs required to make the stamp. This
+# allows us to dynamically depend on the grit sources without enumerating the
+# grit directory for every invocation of grit (which is what adding the source
+# files to every .grd file's .d file would entail) or shelling out to grit
+# synchronously during GN execution to get the list (which would be slow).
+#
+# Usage:
+#    stamp_grit_sources.py <directory> <stamp-file> <.d-file>
+
+import os
+import sys
+
+def GritSourceFiles(grit_root_dir):
+  files = []
+  for root, _, filenames in os.walk(grit_root_dir):
+    grit_src = [os.path.join(root, f) for f in filenames
+                if f.endswith('.py') and not f.endswith('_unittest.py')]
+    files.extend(grit_src)
+  files = [f.replace('\\', '/') for f in files]
+  return sorted(files)
+
+
+def WriteDepFile(dep_file, stamp_file, source_files):
+  with open(dep_file, "w") as f:
+    f.write(stamp_file)
+    f.write(": ")
+    f.write(' '.join(source_files))
+
+
+def WriteStampFile(stamp_file):
+  with open(stamp_file, "w"):
+    pass
+
+
+def main(argv):
+  if len(argv) != 4:
+    print "Error: expecting 3 args."
+    return 1
+
+  grit_root_dir = sys.argv[1]
+  stamp_file = sys.argv[2]
+  dep_file = sys.argv[3]
+
+  WriteStampFile(stamp_file)
+  WriteDepFile(dep_file, stamp_file, GritSourceFiles(grit_root_dir))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/set_clang_warning_flags.gypi b/build/set_clang_warning_flags.gypi
new file mode 100644
index 0000000..f6d7aea
--- /dev/null
+++ b/build/set_clang_warning_flags.gypi
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to set clang-specific compiler flags.
+# To use this the following variable can be defined:
+#   clang_warning_flags:       list: Compiler flags to pass to clang.
+#   clang_warning_flags_unset: list: Compiler flags to not pass to clang.
+#
+# Only use this in third-party code. In chromium_code, fix your code to not
+# warn instead!
+#
+# Note that the gypi file is included in target_defaults, so it does not need
+# to be explicitly included.
+#
+# Warning flags set by this will be used on all platforms. If you want to set
+# warning flags on only some platforms, you have to do so manually.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_target',
+#   'variables': {
+#     'clang_warning_flags': ['-Wno-awesome-warning'],
+#     'clang_warning_flags_unset': ['-Wpreviously-set-flag'],
+#   }
+# }
+
+{
+  'variables': {
+    'clang_warning_flags_unset%': [],  # Provide a default value.
+  },
+  'conditions': [
+    ['clang==1', {
+      # This uses >@ instead of @< to also see clang_warning_flags set in
+      # targets directly, not just the clang_warning_flags in target_defaults.
+      'cflags': [ '>@(clang_warning_flags)' ],
+      'cflags!': [ '>@(clang_warning_flags_unset)' ],
+      'xcode_settings': {
+        'WARNING_CFLAGS': ['>@(clang_warning_flags)'],
+        'WARNING_CFLAGS!': ['>@(clang_warning_flags_unset)'],
+      },
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'AdditionalOptions': [ '>@(clang_warning_flags)' ],
+          'AdditionalOptions!': [ '>@(clang_warning_flags_unset)' ],
+        },
+      },
+    }],
+    ['clang==0 and host_clang==1', {
+      'target_conditions': [
+        ['_toolset=="host"', {
+          'cflags': [ '>@(clang_warning_flags)' ],
+          'cflags!': [ '>@(clang_warning_flags_unset)' ],
+        }],
+      ],
+    }],
+  ],
+}
diff --git a/build/shim_headers.gypi b/build/shim_headers.gypi
new file mode 100644
index 0000000..56d8d3a
--- /dev/null
+++ b/build/shim_headers.gypi
@@ -0,0 +1,60 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to handle shim headers
+# in a consistent manner. To use this the following variables need to be
+# defined:
+#   headers_root_path: string: path to directory containing headers
+#   header_filenames: list: list of header file names
+
+{
+  'variables': {
+    'shim_headers_path': '<(SHARED_INTERMEDIATE_DIR)/shim_headers/<(_target_name)/<(_toolset)',
+    'shim_generator_additional_args%': [],
+  },
+  'include_dirs++': [
+    '<(shim_headers_path)',
+  ],
+  'all_dependent_settings': {
+    # Repeating this with different numbers of plusses is unfortunately required
+    # to make sure that even if this include is inside nested conditions/etc, it
+    # still gets inserted at the beginning of the include_dirs list. See
+    # http://crbug.com/263818 for details.
+    'include_dirs+++': [
+      '<(shim_headers_path)',
+    ],
+    'include_dirs++++': [
+      '<(shim_headers_path)',
+    ],
+    'include_dirs+++++': [
+      '<(shim_headers_path)',
+    ],
+  },
+  'actions': [
+    {
+      'variables': {
+        'generator_path': '<(DEPTH)/tools/generate_shim_headers/generate_shim_headers.py',
+        'generator_args': [
+          '--headers-root', '<(headers_root_path)',
+          '--output-directory', '<(shim_headers_path)',
+          '<@(shim_generator_additional_args)',
+          '<@(header_filenames)',
+        ],
+      },
+      'action_name': 'generate_<(_target_name)_shim_headers',
+      'inputs': [
+        '<(generator_path)',
+      ],
+      'outputs': [
+        '<!@pymod_do_main(generate_shim_headers <@(generator_args) --outputs)',
+      ],
+      'action': ['python',
+                 '<(generator_path)',
+                 '<@(generator_args)',
+                 '--generate',
+      ],
+      'message': 'Generating <(_target_name) shim headers',
+    },
+  ],
+}
diff --git a/build/slave/OWNERS b/build/slave/OWNERS
new file mode 100644
index 0000000..c367f57
--- /dev/null
+++ b/build/slave/OWNERS
@@ -0,0 +1,24 @@
+set noparent
+agable@chromium.org
+agable@google.com
+bevc@chromium.org
+bevc@google.com
+cmp@chromium.org
+cmp@google.com
+dpranke@chromium.org
+iannucci@chromium.org
+iannucci@google.com
+ilevy@chromium.org
+ilevy@google.com
+johnw@chromium.org
+johnw@google.com
+maruel@chromium.org
+maruel@google.com
+mmoss@chromium.org
+mmoss@google.com
+pschmidt@chromium.org
+pschmidt@google.com
+szager@chromium.org
+szager@google.com
+xusydoc@chromium.org
+xusydoc@google.com
diff --git a/build/slave/README b/build/slave/README
new file mode 100644
index 0000000..e3718b2
--- /dev/null
+++ b/build/slave/README
@@ -0,0 +1,8 @@
+This is a directory which contains configuration information for the
+buildsystem.
+
+* Under recipes, the buildsystem should use only this directory as an
+  entry point into src/.
+
+* Scripts in this directory must not import from outside this directory or shell
+  to scripts outside this directory.
diff --git a/build/some.gyp b/build/some.gyp
new file mode 100644
index 0000000..44a1dd5
--- /dev/null
+++ b/build/some.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'some',
+      'type': 'none',
+      'dependencies': [
+        # This file is intended to be locally modified. List the targets you use
+        # regularly. The generated some.sln will contains projects for only
+        # those targets and the targets they are transitively dependent on. This
+        # can result in a solution that loads and unloads faster in Visual
+        # Studio.
+        #
+        # Tip: Create a dummy CL to hold your local edits to this file, so they
+        # don't accidentally get added to another CL that you are editing.
+        #
+        # Example:
+        # '../chrome/chrome.gyp:chrome',
+      ],
+    },
+  ],
+}
diff --git a/build/symlink.py b/build/symlink.py
new file mode 100755
index 0000000..aade2f8
--- /dev/null
+++ b/build/symlink.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make a symlink and optionally touch a file (to handle dependencies)."""
+
+
+import errno
+import optparse
+import os.path
+import sys
+
+
+def Main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--force', action='store_true')
+  parser.add_option('--touch')
+
+  options, args = parser.parse_args(argv[1:])
+  if len(args) < 2:
+    parser.error('at least two arguments required.')
+
+  target = args[-1]
+  sources = args[:-1]
+  for s in sources:
+    t = os.path.join(target, os.path.basename(s))
+    try:
+      os.symlink(s, t)
+    except OSError, e:
+      if e.errno == errno.EEXIST and options.force:
+        os.remove(t)
+        os.symlink(s, t)
+      else:
+        raise
+
+
+  if options.touch:
+    with open(options.touch, 'w') as f:
+      pass
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
diff --git a/build/temp_gyp/README.chromium b/build/temp_gyp/README.chromium
new file mode 100644
index 0000000..8045d61
--- /dev/null
+++ b/build/temp_gyp/README.chromium
@@ -0,0 +1,3 @@
+This directory will be removed once the files in it are committed upstream and
+Chromium imports an upstream revision with these files.  Contact mark for
+details.
diff --git a/build/temp_gyp/pdfsqueeze.gyp b/build/temp_gyp/pdfsqueeze.gyp
new file mode 100644
index 0000000..2b3b1ff
--- /dev/null
+++ b/build/temp_gyp/pdfsqueeze.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'pdfsqueeze',
+      'type': 'executable',
+      'sources': [
+        '../../third_party/pdfsqueeze/pdfsqueeze.m',
+      ],
+      'defines': [
+        # Use defines to map the full path names that will be used for
+        # the vars into the short forms expected by pdfsqueeze.m.
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter=ApplyGenericRGB_qfilter',
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter_len=ApplyGenericRGB_qfilter_len',
+      ],
+      'include_dirs': [
+        '<(INTERMEDIATE_DIR)',
+      ],
+      'libraries': [
+        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        '$(SDKROOT)/System/Library/Frameworks/Quartz.framework',
+      ],
+      'actions': [
+        {
+          'action_name': 'Generate inline filter data',
+          'inputs': [
+            '../../third_party/pdfsqueeze/ApplyGenericRGB.qfilter',
+          ],
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/ApplyGenericRGB.h',
+          ],
+          'action': ['xxd', '-i', '<@(_inputs)', '<@(_outputs)'],
+        },
+      ],
+    },
+  ],
+}
diff --git a/build/toolchain/OWNERS b/build/toolchain/OWNERS
new file mode 100644
index 0000000..9b79b9a
--- /dev/null
+++ b/build/toolchain/OWNERS
@@ -0,0 +1,2 @@
+set noparent
+brettw@chromium.org
diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn
new file mode 100644
index 0000000..9ae2218
--- /dev/null
+++ b/build/toolchain/android/BUILD.gn
@@ -0,0 +1,100 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")  # Imports android/config.gni.
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# The Android GCC toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+#  - android_ndk_sysroot
+#      Sysroot for this architecture.
+#  - android_ndk_lib_dir
+#      Subdirectory inside of android_ndk_sysroot where libs go.
+#  - tool_prefix
+#      Prefix to be added to the tool names.
+#  - toolchain_cpu_arch
+#      Same as gcc_toolchain
+template("android_gcc_toolchain") {
+  gcc_toolchain(target_name) {
+    # Make our manually injected libs relative to the build dir.
+    android_ndk_lib = rebase_path(
+      invoker.android_ndk_sysroot + "/" + invoker.android_ndk_lib_dir,
+      root_build_dir)
+
+    libs_section_prefix = "$android_ndk_lib/crtbegin_dynamic.o"
+    libs_section_postfix = "$android_ndk_lib/crtend_android.o"
+
+    solink_libs_section_prefix = "$android_ndk_lib/crtbegin_so.o"
+    solink_libs_section_postfix = "$android_ndk_lib/crtend_so.o"
+
+    # The tools should be run relative to the build dir.
+    tool_prefix = rebase_path(invoker.tool_prefix, root_build_dir)
+
+    if (use_goma) {
+      goma_prefix = "$goma_dir/gomacc "
+    } else {
+      goma_prefix = ""
+    }
+
+    cc = goma_prefix + tool_prefix + "gcc"
+    cxx = goma_prefix + tool_prefix + "g++"
+    ar = tool_prefix + "ar"
+    ld = cxx
+
+    toolchain_os = "android"
+    toolchain_cpu_arch = invoker.toolchain_cpu_arch
+
+    # We make the assumption that the gcc_toolchain will produce a soname with
+    # the following definition.
+    soname = "{{target_output_name}}{{output_extension}}"
+
+    stripped_soname = "lib.stripped/${soname}"
+    temp_stripped_soname = "${stripped_soname}.tmp"
+
+    android_strip = "${tool_prefix}strip"
+
+    mkdir_command = "mkdir -p lib.stripped"
+    strip_command = "$android_strip --strip-unneeded -o $temp_stripped_soname $soname"
+    replace_command = "if ! cmp -s $temp_stripped_soname $stripped_soname; then mv $temp_stripped_soname $stripped_soname; fi"
+    postsolink = "$mkdir_command && $strip_command && $replace_command"
+    solink_outputs = [ stripped_soname ]
+
+    # We make the assumption that the gcc_toolchain will produce an exe with
+    # the following definition.
+    exe = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+    stripped_exe = "exe.stripped/$exe"
+    mkdir_command = "mkdir -p exe.stripped"
+    strip_command = "$android_strip --strip-unneeded -o $stripped_exe $exe"
+    postlink = "$mkdir_command && $strip_command"
+    link_outputs = [ stripped_exe ]
+  }
+}
+
+android_gcc_toolchain("x86") {
+  android_ndk_sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$x86_android_toolchain_root/bin/i686-linux-android-"
+  toolchain_cpu_arch = "x86"
+}
+
+android_gcc_toolchain("arm") {
+  android_ndk_sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$arm_android_toolchain_root/bin/arm-linux-androideabi-"
+  toolchain_cpu_arch = "arm"
+}
+
+android_gcc_toolchain("mipsel") {
+  android_ndk_sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$mips_android_toolchain_root/bin/mipsel-linux-android-"
+  toolchain_cpu_arch = "mipsel"
+}
diff --git a/build/toolchain/clang.gni b/build/toolchain/clang.gni
new file mode 100644
index 0000000..c680384
--- /dev/null
+++ b/build/toolchain/clang.gni
@@ -0,0 +1,9 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Enable the optional type profiler in Clang, which will tag heap allocations
+  # with the allocation type.
+  use_clang_type_profiler = false
+}
diff --git a/build/toolchain/cros/BUILD.gn b/build/toolchain/cros/BUILD.gn
new file mode 100644
index 0000000..d360f72
--- /dev/null
+++ b/build/toolchain/cros/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+  # The CrOS build system supports many different kinds of targets across
+  # many different architectures. Bringing your own toolchain is also supported,
+  # so it's actually impossible to enumerate all toolchains for all targets
+  # as GN toolchain specifications.
+  # These arguments provide a mechanism for specifying your CC, CXX and AR at
+  # buildfile-generation time, allowing the CrOS build system to always use
+  # the right tools for the current target.
+  cros_target_cc = ""
+  cros_target_cxx = ""
+  cros_target_ar = ""
+}
+
+gcc_toolchain("target") {
+  assert(cros_target_cc != "", "Must provide target CC.")
+  assert(cros_target_cxx != "", "Must provide target CXX.")
+  assert(cros_target_ar != "", "Must provide target AR.")
+
+  cc = "${cros_target_cc}"
+  cxx = "${cros_target_cxx}"
+
+  ar = "${cros_target_ar}"
+  ld = cxx
+
+  toolchain_cpu_arch = "${cpu_arch}"
+  toolchain_os = "linux"
+  is_clang = is_clang
+}
diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni
new file mode 100644
index 0000000..e415459
--- /dev/null
+++ b/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,215 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This value will be inherited in the toolchain below.
+concurrent_links = exec_script("get_concurrent_links.py", [], "value")
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+#  - cc
+#  - cxx
+#  - ar
+#  - ld
+# and the following which is used in the toolchain_args
+#  - toolchain_cpu_arch  (What "cpu_arch" should be set to when invoking a
+#                         build using this toolchain.)
+#  - toolchain_os  (What "os" should be set to when invoking a build using this
+#                   toolchain.)
+#
+# Optional parameters:
+#  - libs_section_prefix
+#  - libs_section_postfix
+#      The contents of these strings, if specified, will be placed around
+#      the libs section of the linker line. It allows one to inject libraries
+#      at the beginning and end for all targets in a toolchain.
+#  - solink_libs_section_prefix
+#  - solink_libs_section_postfix
+#      Same as libs_section_{pre,post}fix except used for solink instead of link.
+#  - post_solink
+#      The content of this string, if specified, will be appended to the solink
+#      command.
+#  - deps
+#      Just forwarded to the toolchain definition.
+#  - is_clang
+template("gcc_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+    assert(defined(invoker.toolchain_cpu_arch),
+           "gcc_toolchain() must specify a \"toolchain_cpu_arch\"")
+    assert(defined(invoker.toolchain_os),
+           "gcc_toolchain() must specify a \"toolchain_os\"")
+
+    # We can't do string interpolation ($ in strings) on things with dots in
+    # them. To allow us to use $cc below, for example, we create copies of
+    # these values in our scope.
+    cc = invoker.cc
+    cxx = invoker.cxx
+    ar = invoker.ar
+    ld = invoker.ld
+
+    # Bring these into our scope for string interpolation with default values.
+    if (defined(invoker.libs_section_prefix)) {
+      libs_section_prefix = invoker.libs_section_prefix
+    } else {
+      libs_section_prefix = ""
+    }
+
+    if (defined(invoker.libs_section_postfix)) {
+      libs_section_postfix = invoker.libs_section_postfix
+    } else {
+      libs_section_postfix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_prefix)) {
+      solink_libs_section_prefix = invoker.solink_libs_section_prefix
+    } else {
+      solink_libs_section_prefix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_postfix)) {
+      solink_libs_section_postfix = invoker.solink_libs_section_postfix
+    } else {
+      solink_libs_section_postfix = ""
+    }
+
+    # These library switches can apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "rm -f {{output}} && $ar rcs {{output}} @$rspfile"
+      description = "AR {{output}}"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}{{output_extension}}"
+      ]
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{root_out_dir}}/$soname"  # Possibly including toolchain dir.
+      rspfile = sofile + ".rsp"
+
+      # These variables are not built into GN but are helpers that implement
+      # (1) linking to produce a .so, (2) extracting the symbols from that file
+      # to a temporary file, (3) if the temporary file has differences from the
+      # existing .TOC file, overwrite it, otherwise, don't change it.
+      tocfile = sofile + ".TOC"
+      temporary_tocname = sofile + ".tmp"
+      link_command = "$ld -shared {{ldflags}} -o $sofile -Wl,-soname=$soname @$rspfile"
+      toc_command = "{ readelf -d $sofile | grep SONAME ; nm -gD -f p $soname | cut -f1-2 -d' '; } > $temporary_tocname"
+      replace_command = "if ! cmp -s $temporary_tocname $tocfile; then mv $temporary_tocname $tocfile; fi"
+
+      command = "$link_command && $toc_command && $replace_command"
+      if (defined(invoker.postsolink)) {
+        command += " && " + invoker.postsolink
+      }
+      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+      description = "SOLINK $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = ".so"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the sofile but use the
+      # tocfile for dependency management.
+      outputs = [
+        sofile,
+        tocfile,
+      ]
+      if (defined(invoker.solink_outputs)) {
+        outputs += invoker.solink_outputs
+      }
+      link_output = sofile
+      depend_output = tocfile
+    }
+
+    tool("link") {
+      outfile = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = "$outfile.rsp"
+      command = "$ld {{ldflags}} -o $outfile -Wl,--start-group @$rspfile {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
+      if (defined(invoker.postlink)) {
+        command += " && " + invoker.postlink
+      }
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs}}"
+      outputs = [ outfile ]
+      if (defined(invoker.link_outputs)) {
+        outputs += invoker.link_outputs
+      }
+    }
+
+    tool("stamp") {
+      command = "touch {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args() {
+      cpu_arch = invoker.toolchain_cpu_arch
+      os = invoker.toolchain_os
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
diff --git a/build/toolchain/get_concurrent_links.py b/build/toolchain/get_concurrent_links.py
new file mode 100644
index 0000000..629d67d
--- /dev/null
+++ b/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computs the number of concurrent links we want to run in the build
+# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
+
+import os
+import re
+import sys
+
+def GetDefaultConcurrentLinks():
+  # Inherit the legacy environment variable for people that have set it in GYP.
+  pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
+  if pool_size:
+    return pool_size
+
+  if sys.platform in ('win32', 'cygwin'):
+    import ctypes
+
+    class MEMORYSTATUSEX(ctypes.Structure):
+      _fields_ = [
+        ("dwLength", ctypes.c_ulong),
+        ("dwMemoryLoad", ctypes.c_ulong),
+        ("ullTotalPhys", ctypes.c_ulonglong),
+        ("ullAvailPhys", ctypes.c_ulonglong),
+        ("ullTotalPageFile", ctypes.c_ulonglong),
+        ("ullAvailPageFile", ctypes.c_ulonglong),
+        ("ullTotalVirtual", ctypes.c_ulonglong),
+        ("ullAvailVirtual", ctypes.c_ulonglong),
+        ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+      ]
+
+    stat = MEMORYSTATUSEX()
+    stat.dwLength = ctypes.sizeof(stat)
+    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+    mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30)))  # total / 4GB
+    hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+    return min(mem_limit, hard_cap)
+  elif sys.platform.startswith('linux'):
+    if os.path.exists("/proc/meminfo"):
+      with open("/proc/meminfo") as meminfo:
+        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+        for line in meminfo:
+          match = memtotal_re.match(line)
+          if not match:
+            continue
+          # Allow 8Gb per link on Linux because Gold is quite memory hungry
+          return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+    return 1
+  elif sys.platform == 'darwin':
+    try:
+      avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+      # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+      # 4GB per ld process allows for some more bloat.
+      return max(1, avail_bytes / (4 * (2 ** 30)))  # total / 4GB
+    except:
+      return 1
+  else:
+    # TODO(scottmg): Implement this for other platforms.
+    return 1
+
+print GetDefaultConcurrentLinks()
diff --git a/build/toolchain/goma.gni b/build/toolchain/goma.gni
new file mode 100644
index 0000000..c0f4cf2
--- /dev/null
+++ b/build/toolchain/goma.gni
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Goma.
+#
+# This is currently designed to match the GYP build exactly, so as not to break
+# people during the transition.
+
+declare_args() {
+  # Set to true to enable distributed compilation using Goma.
+  use_goma = false
+
+  # Set the default value based on the platform.
+  if (is_win) {
+    # Absolute directory containing the Goma source code.
+    goma_dir = "C:\goma\goma-win"
+  } else {
+    # Absolute directory containing the Goma source code.
+    goma_dir = getenv("HOME") + "/goma"
+  }
+}
diff --git a/build/toolchain/linux/BUILD.gn b/build/toolchain/linux/BUILD.gn
new file mode 100644
index 0000000..a98ea53
--- /dev/null
+++ b/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,99 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+import("//build/toolchain/goma.gni")
+
+if (use_goma) {
+  goma_prefix = "$goma_dir/gomacc "
+} else {
+  goma_prefix = ""
+}
+
+gcc_toolchain("arm") {
+  cc = "${goma_prefix}arm-linux-gnueabi-gcc"
+  cxx = "${goma_prefix}arm-linux-gnueabi-g++"
+
+  ar = "arm-linux-gnueabi-ar"
+  ld = cxx
+
+  toolchain_cpu_arch = "arm"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("clang_x86") {
+  if (use_clang_type_profiler) {
+    prefix = rebase_path("//third_party/llvm-allocated-type/Linux_ia32/bin",
+                         root_build_dir)
+  } else {
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+  }
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu_arch = "x86"
+  toolchain_os = "linux"
+  is_clang = true
+}
+
+gcc_toolchain("x86") {
+  cc = "${goma_prefix}gcc"
+  cxx = "$goma_prefix}g++"
+
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu_arch = "x86"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("clang_x64") {
+  if (use_clang_type_profiler) {
+    prefix = rebase_path("//third_party/llvm-allocated-type/Linux_x64/bin",
+                         root_build_dir)
+  } else {
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+  }
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu_arch = "x64"
+  toolchain_os = "linux"
+  is_clang = true
+}
+
+gcc_toolchain("x64") {
+  cc = "${goma_prefix}gcc"
+  cxx = "${goma_prefix}g++"
+
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu_arch = "x64"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("mipsel") {
+  cc = "mipsel-linux-gnu-gcc"
+  cxx = "mipsel-linux-gnu-g++"
+  ar = "mipsel-linux-gnu-ar"
+  ld = cxx
+
+  toolchain_cpu_arch = "mipsel"
+  toolchain_os = "linux"
+  is_clang = false
+}
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
new file mode 100644
index 0000000..b1cd36b
--- /dev/null
+++ b/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,201 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
+# some enhancements since the commands on Mac are slightly different than on
+# Linux.
+
+import("../goma.gni")
+
+# Should only be running on Mac.
+assert(is_mac || is_ios)
+
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/goma.gni")
+
+if (is_clang) {
+  cc = rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang",
+                   root_build_dir)
+  cxx = rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang++",
+                    root_build_dir)
+} else {
+  cc = "gcc"
+  cxx = "g++"
+}
+ld = cxx
+
+# This will copy the gyp-mac-tool to the build directory. We pass in the source
+# file of the win tool.
+gyp_mac_tool_source =
+  rebase_path("//tools/gyp/pylib/gyp/mac_tool.py", root_build_dir)
+exec_script("setup_toolchain.py", [ gyp_mac_tool_source ])
+
+# Shared toolchain definition. Invocations should set toolchain_os to set the
+# build args in this definition.
+template("mac_clang_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.cc),
+           "mac_clang_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx),
+           "mac_clang_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ld),
+           "mac_clang_toolchain() must specify a \"ld\" value")
+    assert(defined(invoker.toolchain_os),
+           "mac_clang_toolchain() must specify a \"toolchain_os\"")
+
+    # We can't do string interpolation ($ in strings) on things with dots in
+    # them. To allow us to use $cc below, for example, we create copies of
+    # these values in our scope.
+    cc = invoker.cc
+    cxx = invoker.cxx
+    ld = invoker.ld
+
+    # Make these apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objc") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} {{cflags_objc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJC {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objcxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJCXX {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      command = "rm -f {{output}} && ./gyp-mac-tool filter-libtool libtool -static -o {{output}} {{inputs}}"
+      description = "LIBTOOL-STATIC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}{{output_extension}}"
+      ]
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      dylib = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.dylib"
+      rspfile = dylib + ".rsp"
+
+      # These variables are not build into GN but are helpers that implement
+      # (1) linking to produce a .so, (2) extracting the symbols from that file
+      # to a temporary file, (3) if the temporary file has differences from the
+      # existing .TOC file, overwrite it, oterwise, don't change it.
+      #
+      # As a special case, if the library reexports symbols from other dynamic
+      # libraries, we always update the .TOC and skip the temporary file and
+      # diffing steps, since that library always needs to be re-linked.
+      tocname = dylib + ".TOC"
+      temporary_tocname = dylib + ".tmp"
+
+      does_reexport_command = "[ ! -e $dylib -o ! -e $tocname ] || otool -l $dylib | grep -q LC_REEXPORT_DYLIB"
+      link_command = "$ld -shared {{ldflags}} -o $dylib -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+      replace_command = "if ! cmp -s $temporary_tocname $tocname; then mv $temporary_tocname $tocname"
+      extract_toc_command = "{ otool -l $dylib | grep LC_ID_DYLIB -A 5; nm -gP $dylib | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+      command = "if $does_reexport_command ; then $link_command && $extract_toc_command > $tocname; else $link_command && $extract_toc_command > $temporary_tocname && $replace_command ; fi; fi"
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = ".dylib"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the dylib but use the
+      # tocname for dependency management.
+      outputs = [
+        dylib,
+        tocname,
+      ]
+      link_output = dylib
+      depend_output = tocname
+    }
+
+    tool("link") {
+      outfile = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = "$outfile.rsp"
+      command = "$ld {{ldflags}} -o $outfile -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs_newline}}"
+      outputs = [ outfile ]
+    }
+
+    tool("stamp") {
+      command = "touch {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    toolchain_args() {
+      os = invoker.toolchain_os
+    }
+  }
+}
+
+# Toolchain representing the target build (either mac or iOS).
+mac_clang_toolchain("clang") {
+  toolchain_os = os
+}
+
+# This toolchain provides a way for iOS target compiles to reference targets
+# compiled for the host system. It just overrides the OS back to "mac".
+mac_clang_toolchain("host_clang") {
+  toolchain_os = "mac"
+}
diff --git a/build/toolchain/mac/setup_toolchain.py b/build/toolchain/mac/setup_toolchain.py
new file mode 100644
index 0000000..431078f
--- /dev/null
+++ b/build/toolchain/mac/setup_toolchain.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import stat
+import sys
+
+def CopyTool(source_path):
+  """Copies the given tool to the current directory, including a warning not
+  to edit it."""
+  with open(source_path) as source_file:
+    tool_source = source_file.readlines()
+
+  # Add header and write it out to the current directory (which should be the
+  # root build dir).
+  out_path = 'gyp-mac-tool'
+  with open(out_path, 'w') as tool_file:
+    tool_file.write(''.join([tool_source[0],
+                             '# Generated by setup_toolchain.py do not edit.\n']
+                            + tool_source[1:]))
+  st = os.stat(out_path)
+  os.chmod(out_path, st.st_mode | stat.S_IEXEC)
+
+# Find the tool source, it's the first argument, and copy it.
+if len(sys.argv) != 2:
+  print "Need one argument (mac_tool source path)."
+  sys.exit(1)
+CopyTool(sys.argv[1])
diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn
new file mode 100644
index 0000000..8c76f5a
--- /dev/null
+++ b/build/toolchain/nacl/BUILD.gn
@@ -0,0 +1,61 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+toolchain("x86_newlib") {
+  toolprefix = "gen/sdk/toolchain/linux_x86_newlib/bin/x86_64-nacl-"
+  cc = toolprefix + "gcc"
+  cxx = toolprefix + "g++"
+  ld = toolprefix + "g++"
+
+  tool("cc") {
+    command = "$cc -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_c -c \$in -o \$out"
+    description = "CC(NaCl x86 Newlib) \$out"
+    depfile = "\$out.d"
+    depsformat = "gcc"
+  }
+  tool("cxx") {
+    # cflags_pch_cc
+    command = "$cxx -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_cc -c \$in -o \$out"
+    description = "CXX(NaCl x86 Newlib) \$out"
+    depfile = "\$out.d"
+    depsformat = "gcc"
+  }
+  tool("alink") {
+    command = "rm -f \$out && ${toolprefix}ar rcs \$out \$in"
+    description = "AR(NaCl x86 Newlib) \$out"
+  }
+  tool("solink") {
+    command = "if [ ! -e \$lib -o ! -e \${lib}.TOC ]; then $ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname -Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive \$libs && { readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.TOC; else $ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname -Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive \$libs && { readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.tmp && if ! cmp -s \${lib}.tmp \${lib}.TOC; then mv \${lib}.tmp \${lib}.TOC ; fi; fi"
+    description = "SOLINK(NaCl x86 Newlib) \$lib"
+    #pool = "link_pool"
+    restat = "1"
+  }
+  tool("link") {
+    command = "$ld \$ldflags -o \$out -Wl,--start-group \$in \$solibs -Wl,--end-group \$libs"
+    description = "LINK(NaCl x86 Newlib) \$out"
+    #pool = "link_pool"
+  }
+
+  if (is_win) {
+    tool("stamp") {
+      command = "$python_path gyp-win-tool stamp \$out"
+      description = "STAMP \$out"
+    }
+  } else {
+    tool("stamp") {
+      command = "touch \$out"
+      description = "STAMP \$out"
+    }
+  }
+
+  toolchain_args() {
+    # Override the default OS detection. The build config will set the is_*
+    # flags accordingly.
+    os = "nacl"
+
+    # Component build not supported in NaCl, since it does not support shared
+    # libraries.
+    is_component_build = false
+  }
+}
diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn
new file mode 100644
index 0000000..a9fab7c
--- /dev/null
+++ b/build/toolchain/win/BUILD.gn
@@ -0,0 +1,176 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/goma.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+gyp_win_tool_path = rebase_path("//tools/gyp/pylib/gyp/win_tool.py",
+                                root_build_dir)
+exec_script("setup_toolchain.py",
+            [ visual_studio_path, gyp_win_tool_path, windows_sdk_path ])
+
+# This value will be inherited in the toolchain below.
+concurrent_links = exec_script("../get_concurrent_links.py", [], "value")
+
+# Parameters:
+#  cpu_arch: cpu_arch to pass as a build arg
+#  environment: File name of environment file.
+#  force_win64 (optional): value for this build arg.
+template("msvc_toolchain") {
+  if (defined(invoker.concurrent_links)) {
+    concurrent_links = invoker.concurrent_links
+  }
+
+  env = invoker.environment
+
+  toolchain(target_name) {
+    # Make these apply to all tools below.
+    lib_switch = ""
+    lib_dir_switch="/LIBPATH:"
+
+    tool("cc") {
+      rspfile = "{{output}}.rsp"
+      pdbname = "{{target_out_dir}}/{{target_output_name}}_c.pdb"
+      command = "ninja -t msvc -e $env -- cl.exe /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+      depsformat = "msvc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.obj",
+      ]
+      rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+    }
+
+    tool("cxx") {
+      rspfile = "{{output}}.rsp"
+      # The PDB name needs to be different between C and C++ compiled files.
+      pdbname = "{{target_out_dir}}/{{target_output_name}}_cc.pdb"
+      command = "ninja -t msvc -e $env -- cl.exe /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+      depsformat = "msvc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.obj",
+      ]
+      rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+    }
+
+    tool("rc") {
+      command = "$python_path gyp-win-tool rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.res",
+      ]
+      description = "RC {{output}}"
+    }
+
+    tool("asm") {
+      # TODO(brettw): "/safeseh" assembler argument is hardcoded here. Extract
+      # assembler flags to a variable like cflags. crbug.com/418613
+      command = "$python_path gyp-win-tool asm-wrapper $env ml.exe {{defines}} {{include_dirs}} /safeseh /c /Fo {{output}} {{source}}"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.obj",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "$python_path gyp-win-tool link-wrapper $env False lib.exe /nologo /ignore:4221 /OUT:{{output}} @$rspfile"
+      description = "LIB {{output}}"
+      outputs = [
+        # Ignore {{output_extension}} and always use .lib, there's no reason to
+        # allow targets to override this extension on Windows.
+        "{{target_out_dir}}/{{target_output_name}}.lib",
+      ]
+      default_output_extension = ".lib"
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}}"
+    }
+
+    tool("solink") {
+      dllname = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
+      libname = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.lib"  # e.g. foo.dll.lib
+      rspfile = "${dllname}.rsp"
+
+      link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:${dllname}.pdb @$rspfile"
+
+      # TODO(brettw) support manifests
+      #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:${dllname}.manifest"
+      #command = "cmd /c $link_command && $manifest_command"
+      command = link_command
+
+      default_output_extension = ".dll"
+      description = "LINK(DLL) {{output}}"
+      outputs = [
+        dllname,
+        libname,
+      ]
+      link_output = libname
+      depend_output = libname
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("link") {
+      rspfile = "{{output}}.rsp"
+
+      link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /OUT:{{output}} /PDB:{{output}}.pdb @$rspfile"
+
+      # TODO(brettw) support manifests
+      #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:{{output}}.manifest"
+      #command = "cmd /c $link_command && $manifest_command"
+      command = link_command
+
+      default_output_extension = ".exe"
+      description = "LINK {{output}}"
+      outputs = [
+        "{{root_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+    }
+
+    tool("stamp") {
+      command = "$python_path gyp-win-tool stamp {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command =  "$python_path gyp-win-tool recursive-mirror {{source}} {{output}}"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args() {
+      cpu_arch = invoker.cpu_arch
+
+      # Normally the build config resets the CPU architecture to 32-bits. Setting
+      # this flag overrides that behavior.
+      if (defined(invoker.force_win64)) {
+        force_win64 = invoker.force_win64
+      }
+    }
+  }
+}
+
+msvc_toolchain("32") {
+  environment = "environment.x86"
+  cpu_arch = "x64"
+}
+
+msvc_toolchain("64") {
+  environment = "environment.x64"
+  cpu_arch = "x64"
+  force_win64 = true
+}
diff --git a/build/toolchain/win/midl.gni b/build/toolchain/win/midl.gni
new file mode 100644
index 0000000..7f068d0
--- /dev/null
+++ b/build/toolchain/win/midl.gni
@@ -0,0 +1,100 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler.
+#
+# Parameters
+#
+#   sources
+#      List of .idl file to process.
+#
+#   out_dir (optional)
+#       Directory to write the generated files to. Defaults to target_gen_dir.
+#
+#   visibility (optional)
+
+template("midl") {
+  action_name = "${target_name}_idl_action"
+  source_set_name = target_name
+
+  assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+  if (defined(invoker.out_dir)) {
+    out_dir = invoker.out_dir
+  } else {
+    out_dir = target_gen_dir
+  }
+
+  header_file = "{{source_name_part}}.h"
+  dlldata_file = "{{source_name_part}}.dlldata.c"
+  interface_identifier_file = "{{source_name_part}}_i.c"
+  proxy_file = "{{source_name_part}}_p.c"
+  type_library_file = "{{source_name_part}}.tlb"
+
+  action_foreach(action_name) {
+    visibility = [ ":$source_set_name" ]
+
+    # This functionality is handled by the win-tool because the GYP build has
+    # MIDL support built-in.
+    # TODO(brettw) move this to a separate MIDL wrapper script for better
+    # clarity once GYP support is not needed.
+    script = "$root_build_dir/gyp-win-tool"
+
+    sources = invoker.sources
+
+    # Note that .tlb is not included in the outputs as it is not always
+    # generated depending on the content of the input idl file.
+    outputs = [
+      "$out_dir/$header_file",
+      "$out_dir/$dlldata_file",
+      "$out_dir/$interface_identifier_file",
+      "$out_dir/$proxy_file",
+    ]
+
+    if (cpu_arch == "x86") {
+      win_tool_arch = "environment.x86"
+      idl_target_platform = "win32"
+    } else if (cpu_arch == "x64") {
+      win_tool_arch = "environment.x64"
+      idl_target_platform = "x64"
+    } else {
+      assert(false, "Need environment for this arch")
+    }
+
+    args = [
+      "midl-wrapper", win_tool_arch,
+      rebase_path(out_dir, root_build_dir),
+      type_library_file,
+      header_file,
+      dlldata_file,
+      interface_identifier_file,
+      proxy_file,
+      "{{source}}",
+      "/char", "signed",
+      "/env", idl_target_platform,
+      "/Oicf",
+    ]
+
+    foreach(include, system_include_dirs) {
+      args += [ "/I", include ]
+    }
+  }
+
+  source_set(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    # We only compile the IID files from the IDL tool rather than all outputs.
+    sources = process_file_template(
+        invoker.sources,
+        [ "$out_dir/$interface_identifier_file" ])
+
+    deps = [ ":$action_name" ]
+  }
+}
diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py
new file mode 100644
index 0000000..5e292ab
--- /dev/null
+++ b/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import errno
+import os
+import re
+import subprocess
+import sys
+
+"""
+Copies the given "win tool" (which the toolchain uses to wrap compiler
+invocations) and the environment blocks for the 32-bit and 64-bit builds on
+Windows to the build directory.
+
+The arguments are the visual studio install location and the location of the
+win tool. The script assumes that the root build directory is the current dir
+and the files will be written to the current directory.
+"""
+
+
+def ExtractImportantEnvironment():
+  """Extracts environment variables required for the toolchain from the
+  current environment."""
+  envvars_to_save = (
+      'goma_.*',  # TODO(scottmg): This is ugly, but needed for goma.
+      'include',  # Needed by midl compiler.
+      'path',
+      'pathext',
+      'systemroot',
+      'temp',
+      'tmp',
+      )
+  result = {}
+  for envvar in envvars_to_save:
+    if envvar in os.environ:
+      envvar = envvar.lower()
+      if envvar == 'path':
+        # Our own rules (for running gyp-win-tool) and other actions in
+        # Chromium rely on python being in the path. Add the path to this
+        # python here so that if it's not in the path when ninja is run
+        # later, python will still be found.
+        result[envvar.upper()] = os.path.dirname(sys.executable) + \
+            os.pathsep + os.environ[envvar]
+      else:
+        result[envvar.upper()] = os.environ[envvar]
+  for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+    if required not in result:
+      raise Exception('Environment variable "%s" '
+                      'required to be set to valid path' % required)
+  return result
+
+
+def FormatAsEnvironmentBlock(envvar_dict):
+  """Format as an 'environment block' directly suitable for CreateProcess.
+  Briefly this is a list of key=value\0, terminated by an additional \0. See
+  CreateProcess documentation for more details."""
+  block = ''
+  nul = '\0'
+  for key, value in envvar_dict.iteritems():
+    block += key + '=' + value + nul
+  block += nul
+  return block
+
+
+def CopyTool(source_path):
+  """Copies the given tool to the current directory, including a warning not
+  to edit it."""
+  with open(source_path) as source_file:
+    tool_source = source_file.readlines()
+
+  # Add header and write it out to the current directory (which should be the
+  # root build dir).
+  with open("gyp-win-tool", 'w') as tool_file:
+    tool_file.write(''.join([tool_source[0],
+                             '# Generated by setup_toolchain.py do not edit.\n']
+                            + tool_source[1:]))
+
+if len(sys.argv) != 4:
+  print('Usage setup_toolchain.py '
+        '<visual studio path> <win tool path> <win sdk path>')
+  sys.exit(2)
+vs_path = sys.argv[1]
+tool_source = sys.argv[2]
+win_sdk_path = sys.argv[3]
+
+CopyTool(tool_source)
+
+important_env_vars = ExtractImportantEnvironment()
+path = important_env_vars["PATH"].split(";")
+
+# Add 32-bit compiler path to the beginning and write the block.
+path32 = [os.path.join(vs_path, "VC\\BIN")] + \
+         [os.path.join(win_sdk_path, "bin\\x86")] + \
+         path
+important_env_vars["PATH"] = ";".join(path32)
+environ = FormatAsEnvironmentBlock(important_env_vars)
+with open('environment.x86', 'wb') as env_file:
+  env_file.write(environ)
+
+# Add 64-bit compiler path to the beginning and write the block.
+path64 = [os.path.join(vs_path, "VC\\BIN\\amd64")] + \
+         [os.path.join(win_sdk_path, "bin\\x64")] + \
+         path
+important_env_vars["PATH"] = ";".join(path64)
+environ = FormatAsEnvironmentBlock(important_env_vars)
+with open('environment.x64', 'wb') as env_file:
+  env_file.write(environ)
diff --git a/build/tree_truth.sh b/build/tree_truth.sh
new file mode 100755
index 0000000..03d0523
--- /dev/null
+++ b/build/tree_truth.sh
@@ -0,0 +1,101 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag.  If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+  oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+  if [ $? -eq 0 ] ; then
+    echo $oneline
+  fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+  ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+  ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+  ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+  ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+  local tag_sha1=$(tt_sha1_for_tag $1 $2)
+  local head_sha1=$(tt_sha1_for_head $1)
+  local display_name=$(echo $3 | sed 's#/#_#g')
+  if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+    return
+  fi
+  if [ "${tag_sha1}" = "" ] ; then
+    echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+    echo "NOTE: git tag was not found so we have no baseline."
+    echo "Here are some recent commits, but they may not be new for this build."
+    ( cd $1 && git log -n 10 --stat | cat)
+  else
+    echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+    ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+  fi
+}
+
+# Clean out the tree truth tags in all repos.  For testing.
+tt_clean_all() {
+ for project in $@; do
+   tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+   local full_path=$CHROME_SRC/../$project
+   tt_list_commits $full_path tree_truth $project
+   tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+  echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+  for p in $@; do
+    echo $project
+    (cd $CHROME_SRC/../$p && git log -n 10 --format="   %H %s   %an, %ad" | cat)
+    echo "================================================================="
+  done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
diff --git a/build/uiautomator_test.gypi b/build/uiautomator_test.gypi
new file mode 100644
index 0000000..0f341ab
--- /dev/null
+++ b/build/uiautomator_test.gypi
@@ -0,0 +1,36 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build uiautomator dexed tests jar.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name',
+#   'type': 'none',
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'variables': {
+    'output_dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+  },
+  'actions': [
+    {
+      'action_name': 'dex_<(_target_name)',
+      'message': 'Dexing <(_target_name) jar',
+      'variables': {
+        'dex_input_paths': [
+          '>@(library_dexed_jars_paths)',
+        ],
+        'output_path': '<(output_dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh
new file mode 100755
index 0000000..735733a
--- /dev/null
+++ b/build/update-linux-sandbox.sh
@@ -0,0 +1,75 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+  echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+  exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+  echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+  exit 1
+fi
+
+installsandbox() {
+  echo "(using sudo so you may be asked for your password)"
+  sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+    "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+  return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+  echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+  echo "If you are building in Release mode"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, "
+  echo "please make sure you build the chrome_sandbox target"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+  echo "installing it now."
+  installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+  exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+  echo "Your installed setuid sandbox is too old, installing it now."
+  if ! installsandbox; then
+    echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+    exit 1
+  fi
+else
+  echo "Your setuid sandbox is up to date"
+  if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+    echo -n "Make sure you have \"export "
+    echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+    echo "somewhere in your .bashrc"
+    echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+  fi
+fi
diff --git a/build/util/BUILD.gn b/build/util/BUILD.gn
new file mode 100644
index 0000000..320c5ee
--- /dev/null
+++ b/build/util/BUILD.gn
@@ -0,0 +1,21 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+action("webkit_version") {
+  script = "version.py"
+
+  lastchange_file = "LASTCHANGE.blink"
+  # TODO(brettw) move from content to this directory.
+  template_file = "//content/webkit_version.h.in"
+  inputs = [ lastchange_file, template_file ]
+
+  output_file = "$root_gen_dir/webkit_version.h"
+  outputs = [ output_file ]
+
+  args = [
+    "-f", rebase_path(lastchange_file, root_build_dir),
+    rebase_path(template_file, root_build_dir),
+    rebase_path(output_file, root_build_dir),
+  ]
+}
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
new file mode 100755
index 0000000..28a266d
--- /dev/null
+++ b/build/util/lastchange.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+
+import re
+import optparse
+import os
+import subprocess
+import sys
+
+_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
+
+class VersionInfo(object):
+  def __init__(self, url, revision):
+    self.url = url
+    self.revision = revision
+
+
+def FetchSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion branch and revision for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  try:
+    proc = subprocess.Popen(['svn', 'info'],
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+  except OSError:
+    # command is apparently either not installed or not executable.
+    return None
+  if not proc:
+    return None
+
+  attrs = {}
+  for line in proc.stdout:
+    line = line.strip()
+    if not line:
+      continue
+    key, val = line.split(': ', 1)
+    attrs[key] = val
+
+  try:
+    match = svn_url_regex.search(attrs['URL'])
+    if match:
+      url = match.group(2)
+    else:
+      url = ''
+    revision = attrs['Revision']
+  except KeyError:
+    return None
+
+  return VersionInfo(url, revision)
+
+
+def RunGitCommand(directory, command):
+  """
+  Launches git subcommand.
+
+  Errors are swallowed.
+
+  Returns:
+    A process object or None.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform == 'cygwin':
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+    return proc
+  except OSError:
+    return None
+
+
+def FetchGitRevision(directory):
+  """
+  Fetch the Git hash for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  hsh = ''
+  proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      hsh = output
+  if not hsh:
+    return None
+  pos = ''
+  proc = RunGitCommand(directory, ['show', '-s', '--format=%B', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0]
+    if proc.returncode == 0 and output:
+      for line in reversed(output.splitlines()):
+        if line.startswith('Cr-Commit-Position:'):
+          pos = line.rsplit()[-1].strip()
+  if not pos:
+    return VersionInfo('git', hsh)
+  return VersionInfo('git', '%s-%s' % (hsh, pos))
+
+
+def FetchGitSVNURLAndRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion URL and revision through Git.
+
+  Errors are swallowed.
+
+  Returns:
+    A tuple containing the Subversion URL and revision.
+  """
+  proc = RunGitCommand(directory, ['log', '-1', '--format=%b'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      # Extract the latest SVN revision and the SVN URL.
+      # The target line is the last "git-svn-id: ..." line like this:
+      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+      match = _GIT_SVN_ID_REGEX.search(output)
+      if match:
+        revision = match.group(2)
+        url_match = svn_url_regex.search(match.group(1))
+        if url_match:
+          url = url_match.group(2)
+        else:
+          url = ''
+        return url, revision
+  return None, None
+
+
+def FetchGitSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Git-SVN identifier for the local tree.
+
+  Errors are swallowed.
+  """
+  url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex)
+  if url and revision:
+    return VersionInfo(url, revision)
+  return None
+
+
+def FetchVersionInfo(default_lastchange, directory=None,
+                     directory_regex_prior_to_src_url='chrome|blink|svn'):
+  """
+  Returns the last change (in the form of a branch, revision tuple),
+  from some appropriate revision control system.
+  """
+  svn_url_regex = re.compile(
+      r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
+
+  version_info = (FetchSVNRevision(directory, svn_url_regex) or
+                  FetchGitSVNRevision(directory, svn_url_regex) or
+                  FetchGitRevision(directory))
+  if not version_info:
+    if default_lastchange and os.path.exists(default_lastchange):
+      revision = open(default_lastchange, 'r').read().strip()
+      version_info = VersionInfo(None, revision)
+    else:
+      version_info = VersionInfo(None, None)
+  return version_info
+
+def GetHeaderGuard(path):
+  """
+  Returns the header #define guard for the given file path.
+  This treats everything after the last instance of "src/" as being a
+  relevant part of the guard. If there is no "src/", then the entire path
+  is used.
+  """
+  src_index = path.rfind('src/')
+  if src_index != -1:
+    guard = path[src_index + 4:]
+  else:
+    guard = path
+  guard = guard.upper()
+  return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
+def GetHeaderContents(path, define, version):
+  """
+  Returns what the contents of the header file should be that indicate the given
+  revision. Note that the #define is specified as a string, even though it's
+  currently always a SVN revision number, in case we need to move to git hashes.
+  """
+  header_guard = GetHeaderGuard(path)
+
+  header_contents = """/* Generated by lastchange.py, do not edit.*/
+
+#ifndef %(header_guard)s
+#define %(header_guard)s
+
+#define %(define)s "%(version)s"
+
+#endif  // %(header_guard)s
+"""
+  header_contents = header_contents % { 'header_guard': header_guard,
+                                        'define': define,
+                                        'version': version }
+  return header_contents
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  parser = optparse.OptionParser(usage="lastchange.py [options]")
+  parser.add_option("-d", "--default-lastchange", metavar="FILE",
+                    help="Default last change input FILE.")
+  parser.add_option("-m", "--version-macro",
+                    help="Name of C #define when using --header. Defaults to " +
+                    "LAST_CHANGE.",
+                    default="LAST_CHANGE")
+  parser.add_option("-o", "--output", metavar="FILE",
+                    help="Write last change to FILE. " +
+                    "Can be combined with --header to write both files.")
+  parser.add_option("", "--header", metavar="FILE",
+                    help="Write last change to FILE as a C/C++ header. " +
+                    "Can be combined with --output to write both files.")
+  parser.add_option("--revision-only", action='store_true',
+                    help="Just print the SVN revision number. Overrides any " +
+                    "file-output-related options.")
+  parser.add_option("-s", "--source-dir", metavar="DIR",
+                    help="Use repository in the given directory.")
+  opts, args = parser.parse_args(argv[1:])
+
+  out_file = opts.output
+  header = opts.header
+
+  while len(args) and out_file is None:
+    if out_file is None:
+      out_file = args.pop(0)
+  if args:
+    sys.stderr.write('Unexpected arguments: %r\n\n' % args)
+    parser.print_help()
+    sys.exit(2)
+
+  if opts.source_dir:
+    src_dir = opts.source_dir
+  else:
+    src_dir = os.path.dirname(os.path.abspath(__file__))
+
+  version_info = FetchVersionInfo(opts.default_lastchange, src_dir)
+
+  if version_info.revision == None:
+    version_info.revision = '0'
+
+  if opts.revision_only:
+    print version_info.revision
+  else:
+    contents = "LASTCHANGE=%s\n" % version_info.revision
+    if not out_file and not opts.header:
+      sys.stdout.write(contents)
+    else:
+      if out_file:
+        WriteIfChanged(out_file, contents)
+      if header:
+        WriteIfChanged(header,
+                       GetHeaderContents(header, opts.version_macro,
+                                         version_info.revision))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/util/lib/common/__init__.py b/build/util/lib/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/util/lib/common/__init__.py
diff --git a/build/util/lib/common/perf_result_data_type.py b/build/util/lib/common/perf_result_data_type.py
new file mode 100644
index 0000000..67b550a
--- /dev/null
+++ b/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+             INFORMATIONAL]
+
+
+def IsValidType(datatype):
+  return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+  return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py
new file mode 100644
index 0000000..6cb058b
--- /dev/null
+++ b/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,166 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+                perf_result_data_type.DEFAULT: '*RESULT ',
+                perf_result_data_type.INFORMATIONAL: '',
+                perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+                perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  return re.sub('[\:|=/#&,]', '_', s)
+
+
+def FlattenList(values):
+  """Returns a simple list without sub-lists."""
+  ret = []
+  for entry in values:
+    if isinstance(entry, list):
+      ret.extend(FlattenList(entry))
+    else:
+      ret.append(entry)
+  return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+  histogram = json.loads(histogram_json)
+  # Handle empty histograms gracefully.
+  if not 'buckets' in histogram:
+    return 0.0, 0.0
+  count = 0
+  sum_of_logs = 0
+  for bucket in histogram['buckets']:
+    if 'high' in bucket:
+      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+    else:
+      bucket['mean'] = bucket['low']
+    if bucket['mean'] > 0:
+      sum_of_logs += math.log(bucket['mean']) * bucket['count']
+      count += bucket['count']
+
+  if count == 0:
+    return 0.0, 0.0
+
+  sum_of_squares = 0
+  geom_mean = math.exp(sum_of_logs / count)
+  for bucket in histogram['buckets']:
+    if bucket['mean'] > 0:
+      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+  return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _ValueToString(v):
+  # Special case for floats so we don't print using scientific notation.
+  if isinstance(v, float):
+    return '%f' % v
+  else:
+    return str(v)
+
+
+def _MeanAndStdDevFromList(values):
+  avg = None
+  sd = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([_ValueToString(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+      sqdiffs = [(float(v) - avg) ** 2 for v in values]
+      variance = sum(sqdiffs) / (len(values) - 1)
+      sd = math.sqrt(variance)
+    except ValueError:
+      value = ', '.join(values)
+  else:
+    value = values[0]
+  return value, avg, sd
+
+
+def PrintPages(page_list):
+  """Prints list of pages to stdout in the format required by perf tests."""
+  print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
+
+
+def PrintPerfResult(measurement, trace, values, units,
+                    result_type=perf_result_data_type.DEFAULT,
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+  This is parsed by the buildbot using:
+  http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+        On the dashboard, this maps to a particular graph. Mandatory.
+    trace: A description of the particular data point, e.g. "reference".
+        On the dashboard, this maps to a particular "line" in the graph.
+        Mandatory.
+    values: A list of numeric measured values. An N-dimensional list will be
+        flattened and treated as a simple list.
+    units: A description of the units of measure, e.g. "bytes".
+    result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+    print_to_stdout: If True, prints the output in stdout instead of returning
+        the output to caller.
+
+    Returns:
+      String of the formated perf result.
+  """
+  assert perf_result_data_type.IsValidType(result_type), \
+         'result type: %s is invalid' % result_type
+
+  trace_name = _EscapePerfResult(trace)
+
+  if (result_type == perf_result_data_type.UNIMPORTANT or
+      result_type == perf_result_data_type.DEFAULT or
+      result_type == perf_result_data_type.INFORMATIONAL):
+    assert isinstance(values, list)
+    assert '/' not in measurement
+    flattened_values = FlattenList(values)
+    assert len(flattened_values)
+    value, avg, sd = _MeanAndStdDevFromList(flattened_values)
+    output = '%s%s: %s%s%s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        # Do not show equal sign if the trace is empty. Usually it happens when
+        # measurement is enough clear to describe the result.
+        '= ' if trace_name else '',
+        value,
+        units)
+  else:
+    assert perf_result_data_type.IsHistogram(result_type)
+    assert isinstance(values, list)
+    # The histograms can only be printed individually, there's no computation
+    # across different histograms.
+    assert len(values) == 1
+    value = values[0]
+    output = '%s%s: %s= %s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        value,
+        units)
+    avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+  if avg:
+    output += '\nAvg %s: %f%s' % (measurement, avg, units)
+  if sd:
+    output += '\nSd  %s: %f%s' % (measurement, sd, units)
+  if print_to_stdout:
+    print output
+    sys.stdout.flush()
+  return output
diff --git a/build/util/lib/common/unittest_util.py b/build/util/lib/common/unittest_util.py
new file mode 100644
index 0000000..e586224
--- /dev/null
+++ b/build/util/lib/common/unittest_util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for dealing with the python unittest module."""
+
+import fnmatch
+import sys
+import unittest
+
+
+class _TextTestResult(unittest._TextTestResult):
+  """A test result class that can print formatted text results to a stream.
+
+  Results printed in conformance with gtest output format, like:
+  [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
+  [         OK ] autofill.AutofillTest.testAutofillInvalid
+  [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
+  [         OK ] autofill.AutofillTest.testFillProfile
+  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
+  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+  """
+  def __init__(self, stream, descriptions, verbosity):
+    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+    self._fails = set()
+
+  def _GetTestURI(self, test):
+    return '%s.%s.%s' % (test.__class__.__module__,
+                         test.__class__.__name__,
+                         test._testMethodName)
+
+  def getDescription(self, test):
+    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+
+  def startTest(self, test):
+    unittest.TestResult.startTest(self, test)
+    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
+
+  def addSuccess(self, test):
+    unittest.TestResult.addSuccess(self, test)
+    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
+
+  def addError(self, test, err):
+    unittest.TestResult.addError(self, test, err)
+    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def addFailure(self, test, err):
+    unittest.TestResult.addFailure(self, test, err)
+    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def getRetestFilter(self):
+    return ':'.join(self._fails)
+
+
+class TextTestRunner(unittest.TextTestRunner):
+  """Test Runner for displaying test results in textual format.
+
+  Results are displayed in conformance with google test output.
+  """
+
+  def __init__(self, verbosity=1):
+    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
+                                     verbosity=verbosity)
+
+  def _makeResult(self):
+    return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+
+def GetTestsFromSuite(suite):
+  """Returns all the tests from a given test suite."""
+  tests = []
+  for x in suite:
+    if isinstance(x, unittest.TestSuite):
+      tests += GetTestsFromSuite(x)
+    else:
+      tests += [x]
+  return tests
+
+
+def GetTestNamesFromSuite(suite):
+  """Returns a list of every test name in the given suite."""
+  return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
+
+
+def GetTestName(test):
+  """Gets the test name of the given unittest test."""
+  return '.'.join([test.__class__.__module__,
+                   test.__class__.__name__,
+                   test._testMethodName])
+
+
+def FilterTestSuite(suite, gtest_filter):
+  """Returns a new filtered tests suite based on the given gtest filter.
+
+  See http://code.google.com/p/googletest/wiki/AdvancedGuide
+  for gtest_filter specification.
+  """
+  return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
+
+
+def FilterTests(all_tests, gtest_filter):
+  """Filter a list of tests based on the given gtest filter.
+
+  Args:
+    all_tests: List of tests (unittest.TestSuite)
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of tests.
+  """
+  test_names = [GetTestName(test) for test in all_tests]
+  filtered_names = FilterTestNames(test_names, gtest_filter)
+  return [test for test in all_tests if GetTestName(test) in filtered_names]
+
+
+def FilterTestNames(all_tests, gtest_filter):
+  """Filter a list of test names based on the given gtest filter.
+
+  See http://code.google.com/p/googletest/wiki/AdvancedGuide
+  for gtest_filter specification.
+
+  Args:
+    all_tests: List of test names.
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of test names.
+  """
+  pattern_groups = gtest_filter.split('-')
+  positive_patterns = pattern_groups[0].split(':')
+  negative_patterns = None
+  if len(pattern_groups) > 1:
+    negative_patterns = pattern_groups[1].split(':')
+
+  tests = []
+  for test in all_tests:
+    # Test name must by matched by one positive pattern.
+    for pattern in positive_patterns:
+      if fnmatch.fnmatch(test, pattern):
+        break
+    else:
+      continue
+    # Test name must not be matched by any negative patterns.
+    for pattern in negative_patterns or []:
+      if fnmatch.fnmatch(test, pattern):
+        break
+    else:
+      tests += [test]
+  return tests
diff --git a/build/util/lib/common/util.py b/build/util/lib/common/util.py
new file mode 100644
index 0000000..a415b1f
--- /dev/null
+++ b/build/util/lib/common/util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic utilities for all python scripts."""
+
+import atexit
+import httplib
+import os
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import urlparse
+
+
+def GetPlatformName():
+  """Return a string to be used in paths for the platform."""
+  if IsWindows():
+    return 'win'
+  if IsMac():
+    return 'mac'
+  if IsLinux():
+    return 'linux'
+  raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def IsWindows():
+  return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def IsLinux():
+  return sys.platform.startswith('linux')
+
+
+def IsMac():
+  return sys.platform.startswith('darwin')
+
+
+def _DeleteDir(path):
+  """Deletes a directory recursively, which must exist."""
+  # Don't use shutil.rmtree because it can't delete read-only files on Win.
+  for root, dirs, files in os.walk(path, topdown=False):
+    for name in files:
+      filename = os.path.join(root, name)
+      os.chmod(filename, stat.S_IWRITE)
+      os.remove(filename)
+    for name in dirs:
+      os.rmdir(os.path.join(root, name))
+  os.rmdir(path)
+
+
+def Delete(path):
+  """Deletes the given file or directory (recursively), which must exist."""
+  if os.path.isdir(path):
+    _DeleteDir(path)
+  else:
+    os.remove(path)
+
+
+def MaybeDelete(path):
+  """Deletes the given file or directory (recurisvely), if it exists."""
+  if os.path.exists(path):
+    Delete(path)
+
+
+def MakeTempDir(parent_dir=None):
+  """Creates a temporary directory and returns an absolute path to it.
+
+  The temporary directory is automatically deleted when the python interpreter
+  exits normally.
+
+  Args:
+    parent_dir: the directory to create the temp dir in. If None, the system
+                temp dir is used.
+
+  Returns:
+    The absolute path to the temporary directory.
+  """
+  path = tempfile.mkdtemp(dir=parent_dir)
+  atexit.register(MaybeDelete, path)
+  return path
+
+
+def Unzip(zip_path, output_dir):
+  """Unzips the given zip file using a system installed unzip tool.
+
+  Args:
+    zip_path: zip file to unzip.
+    output_dir: directory to unzip the contents of the zip file. The directory
+                must exist.
+
+  Raises:
+    RuntimeError if the unzip operation fails.
+  """
+  if IsWindows():
+    unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
+  else:
+    unzip_cmd = ['unzip', '-o']
+  unzip_cmd += [zip_path]
+  if RunCommand(unzip_cmd, output_dir) != 0:
+    raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
+
+
+def Kill(pid):
+  """Terminate the given pid."""
+  if IsWindows():
+    subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
+  else:
+    os.kill(pid, signal.SIGTERM)
+
+
+def RunCommand(cmd, cwd=None):
+  """Runs the given command and returns the exit code.
+
+  Args:
+    cmd: list of command arguments.
+    cwd: working directory to execute the command, or None if the current
+         working directory should be used.
+
+  Returns:
+    The exit code of the command.
+  """
+  process = subprocess.Popen(cmd, cwd=cwd)
+  process.wait()
+  return process.returncode
+
+
+def DoesUrlExist(url):
+  """Determines whether a resource exists at the given URL.
+
+  Args:
+    url: URL to be verified.
+
+  Returns:
+    True if url exists, otherwise False.
+  """
+  parsed = urlparse.urlparse(url)
+  try:
+    conn = httplib.HTTPConnection(parsed.netloc)
+    conn.request('HEAD', parsed.path)
+    response = conn.getresponse()
+  except (socket.gaierror, socket.error):
+    return False
+  finally:
+    conn.close()
+  # Follow both permanent (301) and temporary (302) redirects.
+  if response.status == 302 or response.status == 301:
+    return DoesUrlExist(response.getheader('location'))
+  return response.status == 200
diff --git a/build/util/version.gypi b/build/util/version.gypi
new file mode 100644
index 0000000..9d295b5
--- /dev/null
+++ b/build/util/version.gypi
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'variables': {
+      'version_py_path': '<(DEPTH)/build/util/version.py',
+      'version_path': '<(DEPTH)/chrome/VERSION',
+    },
+    'version_py_path': '<(version_py_path)',
+    'version_path': '<(version_path)',
+    'version_full':
+        '<!(python <(version_py_path) -f <(version_path) -t "@MAJOR@.@MINOR@.@BUILD@.@PATCH@")',
+    'version_libchrome_short':
+        '<!(python <(version_py_path) -f <(version_path) -t "@BUILD@.@PATCH@")',
+    'version_mac_dylib':
+        '<!(python <(version_py_path) -f <(version_path) -t "@BUILD@.@PATCH_HI@.@PATCH_LO@" -e "PATCH_HI=int(PATCH)/256" -e "PATCH_LO=int(PATCH)%256")',
+  },  # variables
+}
diff --git a/build/util/version.py b/build/util/version.py
new file mode 100755
index 0000000..4d3691a
--- /dev/null
+++ b/build/util/version.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+import argparse
+import os
+import sys
+
+
+def fetch_values_from_file(values_dict, file_name):
+  """
+  Fetches KEYWORD=VALUE settings from the specified file.
+
+  Everything to the left of the first '=' is the keyword,
+  everything to the right is the value.  No stripping of
+  white space, so beware.
+
+  The file must exist, otherwise you get the Python exception from open().
+  """
+  for line in open(file_name, 'r').readlines():
+    key, val = line.rstrip('\r\n').split('=', 1)
+    values_dict[key] = val
+
+
+def fetch_values(file_list):
+  """
+  Returns a dictionary of values to be used for substitution, populating
+  the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
+
+  Explicitly adds the following value from internal calculations:
+
+    OFFICIAL_BUILD
+  """
+  CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+  if CHROME_BUILD_TYPE == '_official':
+    official_build = '1'
+  else:
+    official_build = '0'
+
+  values = dict(
+    OFFICIAL_BUILD = official_build,
+  )
+
+  for file_name in file_list:
+    fetch_values_from_file(values, file_name)
+
+  return values
+
+
+def subst_template(contents, values):
+  """
+  Returns the template with substituted values from the specified dictionary.
+
+  Keywords to be substituted are surrounded by '@':  @KEYWORD@.
+
+  No attempt is made to avoid recursive substitution.  The order
+  of evaluation is random based on the order of the keywords returned
+  by the Python dictionary.  So do NOT substitute a value that
+  contains any @KEYWORD@ strings expecting them to be recursively
+  substituted, okay?
+  """
+  for key, val in values.iteritems():
+    try:
+      contents = contents.replace('@' + key + '@', val)
+    except TypeError:
+      print repr(key), repr(val)
+  return contents
+
+
+def subst_file(file_name, values):
+  """
+  Returns the contents of the specified file_name with substituted
+  values from the specified dictionary.
+
+  This is like subst_template, except it operates on a file.
+  """
+  template = open(file_name, 'r').read()
+  return subst_template(template, values);
+
+
+def write_if_changed(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-f', '--file', action='append', default=[],
+                      help='Read variables from FILE.')
+  parser.add_argument('-i', '--input', default=None,
+                      help='Read strings to substitute from FILE.')
+  parser.add_argument('-o', '--output', default=None,
+                      help='Write substituted strings to FILE.')
+  parser.add_argument('-t', '--template', default=None,
+                      help='Use TEMPLATE as the strings to substitute.')
+  parser.add_argument('-e', '--eval', action='append', default=[],
+                      help='Evaluate VAL after reading variables. Can be used '
+                           'to synthesize variables. e.g. -e \'PATCH_HI=int('
+                           'PATCH)/256.')
+  parser.add_argument('args', nargs=argparse.REMAINDER,
+                      help='For compatibility: INPUT and OUTPUT can be '
+                           'passed as positional arguments.')
+  options = parser.parse_args()
+
+  evals = {}
+  for expression in options.eval:
+    try:
+      evals.update(dict([expression.split('=', 1)]))
+    except ValueError:
+      parser.error('-e requires VAR=VAL')
+
+  # Compatibility with old versions that considered the first two positional
+  # arguments shorthands for --input and --output.
+  while len(options.args) and (options.input is None or \
+                               options.output is None):
+    if options.input is None:
+      options.input = options.args.pop(0)
+    elif options.output is None:
+      options.output = options.args.pop(0)
+  if options.args:
+    parser.error('Unexpected arguments: %r' % options.args)
+
+  values = fetch_values(options.file)
+  for key, val in evals.iteritems():
+    values[key] = str(eval(val, globals(), values))
+
+  if options.template is not None:
+    contents = subst_template(options.template, values)
+  elif options.input:
+    contents = subst_file(options.input, values)
+  else:
+    # Generate a default set of version information.
+    contents = """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+  if options.output is not None:
+    write_if_changed(options.output, contents)
+  else:
+    print contents
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py
new file mode 100644
index 0000000..bdedd6f
--- /dev/null
+++ b/build/vs_toolchain.py
@@ -0,0 +1,201 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import pipes
+import shutil
+import subprocess
+import sys
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+
+
+import gyp
+
+
+def SetEnvironmentAndGetRuntimeDllDirs():
+  """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
+  returns the location of the VS runtime DLLs so they can be copied into
+  the output directory after gyp generation.
+  """
+  vs2013_runtime_dll_dirs = None
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+    if not os.path.exists(json_data_file):
+      Update()
+    with open(json_data_file, 'r') as tempf:
+      toolchain_data = json.load(tempf)
+
+    toolchain = toolchain_data['path']
+    version = toolchain_data['version']
+    version_is_pro = version[-1] != 'e'
+    win8sdk = toolchain_data['win8sdk']
+    wdk = toolchain_data['wdk']
+    # TODO(scottmg): The order unfortunately matters in these. They should be
+    # split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
+    # below). http://crbug.com/345992
+    vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
+
+    os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+    os.environ['GYP_MSVS_VERSION'] = version
+    # We need to make sure windows_sdk_path is set to the automated
+    # toolchain values in GYP_DEFINES, but don't want to override any
+    # otheroptions.express
+    # values there.
+    gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
+    gyp_defines_dict['windows_sdk_path'] = win8sdk
+    os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
+        for k, v in gyp_defines_dict.iteritems())
+    os.environ['WINDOWSSDKDIR'] = win8sdk
+    os.environ['WDK_DIR'] = wdk
+    # Include the VS runtime in the PATH in case it's not machine-installed.
+    runtime_path = ';'.join(vs2013_runtime_dll_dirs)
+    os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
+  return vs2013_runtime_dll_dirs
+
+
+def CopyVsRuntimeDlls(output_dir, runtime_dirs):
+  """Copies the VS runtime DLLs from the given |runtime_dirs| to the output
+  directory so that even if not system-installed, built binaries are likely to
+  be able to run.
+
+  This needs to be run after gyp has been run so that the expected target
+  output directories are already created.
+  """
+  assert sys.platform.startswith(('win32', 'cygwin'))
+
+  def copy_runtime_impl(target, source):
+    """Copy |source| to |target| if it doesn't already exist or if it need to be
+    updated.
+    """
+    if (os.path.isdir(os.path.dirname(target)) and
+        (not os.path.isfile(target) or
+          os.stat(target).st_mtime != os.stat(source).st_mtime)):
+      print 'Copying %s to %s...' % (source, target)
+      if os.path.exists(target):
+        os.unlink(target)
+      shutil.copy2(source, target)
+
+  def copy_runtime(target_dir, source_dir, dll_pattern):
+    """Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
+    exist, but the target directory does exist."""
+    for which in ('p', 'r'):
+      dll = dll_pattern % which
+      target = os.path.join(target_dir, dll)
+      source = os.path.join(source_dir, dll)
+      copy_runtime_impl(target, source)
+
+  x86, x64 = runtime_dirs
+  out_debug = os.path.join(output_dir, 'Debug')
+  out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
+  out_release = os.path.join(output_dir, 'Release')
+  out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
+  out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
+  out_release_x64 = os.path.join(output_dir, 'Release_x64')
+
+  if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
+    os.makedirs(out_debug_nacl64)
+  if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
+    os.makedirs(out_release_nacl64)
+  copy_runtime(out_debug,          x86, 'msvc%s120d.dll')
+  copy_runtime(out_release,        x86, 'msvc%s120.dll')
+  copy_runtime(out_debug_x64,      x64, 'msvc%s120d.dll')
+  copy_runtime(out_release_x64,    x64, 'msvc%s120.dll')
+  copy_runtime(out_debug_nacl64,   x64, 'msvc%s120d.dll')
+  copy_runtime(out_release_nacl64, x64, 'msvc%s120.dll')
+
+  # Copy the PGO runtime library to the release directories.
+  if os.environ.get('GYP_MSVS_OVERRIDE_PATH'):
+    pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'),
+                                       'VC', 'bin')
+    pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64')
+    pgo_runtime_dll = 'pgort120.dll'
+    source_x86 = os.path.join(pgo_x86_runtime_dir, pgo_runtime_dll)
+    if os.path.exists(source_x86):
+      copy_runtime_impl(os.path.join(out_release, pgo_runtime_dll), source_x86)
+    source_x64 = os.path.join(pgo_x64_runtime_dir, pgo_runtime_dll)
+    if os.path.exists(source_x64):
+      copy_runtime_impl(os.path.join(out_release_x64, pgo_runtime_dll),
+                        source_x64)
+
+
+def _GetDesiredVsToolchainHashes():
+  """Load a list of SHA1s corresponding to the toolchains that we want installed
+  to build with."""
+  sha1path = os.path.join(script_dir,
+                          '..', 'buildtools', 'toolchain_vs2013.hash')
+  with open(sha1path, 'rb') as f:
+    return f.read().strip().splitlines()
+
+
+def Update():
+  """Requests an update of the toolchain to the specific hashes we have at
+  this revision. The update outputs a .json of the various configuration
+  information required to pass to gyp which we use in |GetToolchainDir()|.
+  """
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+    get_toolchain_args = [
+        sys.executable,
+        os.path.join(depot_tools_path,
+                    'win_toolchain',
+                    'get_toolchain_if_necessary.py'),
+        '--output-json', json_data_file,
+      ] + _GetDesiredVsToolchainHashes()
+    subprocess.check_call(get_toolchain_args)
+
+  return 0
+
+
+def GetToolchainDir():
+  """Gets location information about the current toolchain (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  SetEnvironmentAndGetRuntimeDllDirs()
+
+  # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
+  if not 'WINDOWSSDKDIR' in os.environ:
+    default_sdk_path = 'C:\\Program Files (x86)\\Windows Kits\\8.0'
+    if os.path.isdir(default_sdk_path):
+      os.environ['WINDOWSSDKDIR'] = default_sdk_path
+
+  print '''vs_path = "%s"
+sdk_path = "%s"
+vs_version = "%s"
+wdk_dir = "%s"
+''' % (
+      os.environ['GYP_MSVS_OVERRIDE_PATH'],
+      os.environ['WINDOWSSDKDIR'],
+      os.environ['GYP_MSVS_VERSION'],
+      os.environ.get('WDK_DIR', ''))
+
+
+def main():
+  if not sys.platform.startswith(('win32', 'cygwin')):
+    return 0
+  commands = {
+      'update': Update,
+      'get_toolchain_dir': GetToolchainDir,
+      # TODO(scottmg): Add copy_dlls for GN builds (gyp_chromium calls
+      # CopyVsRuntimeDlls via import, currently).
+  }
+  if len(sys.argv) < 2 or sys.argv[1] not in commands:
+    print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
+    return 1
+  return commands[sys.argv[1]]()
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt
new file mode 100644
index 0000000..f11fa16
--- /dev/null
+++ b/build/whitespace_file.txt
@@ -0,0 +1,145 @@
+Copyright 2014 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+=====================================================================
+
+Let's make a story. Add zero+ sentences for every commit:
+
+CHÄPTER 1:
+It was a dark and blinky night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the elements. A hooded figure emerged.
+
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt," he snickered.
+
+The pause was filled with the sound of compile errors.
+
+CHAPTER 2:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed so hard.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred just the week before.
+
+Next time, there won't be any sushi. Why sushi with waffles anyway?  It's like
+adorning breakfast cereal with halibut -- shameful.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from as he attempted to wash the taste away with
+a bottle of 3000¥ sake. He tries to recall the cook's face.  Purple? Probably.
+
+CHAPTER 5:
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is when he realized that
+life goes on.
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+NO ONE SHOULD EVER USE SVN
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+ARE TOO
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+AND THEN, TIME LEAPT BACKWARDS
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
+I'm really tempted to change something above the line.
+Reeccciiiipppppeeeeeesssssss!!!!!!!!!
+PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
+WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
+
+I can feel the heat closing in, feel them out there making their moves...
+What could possibly go wrong? We've already ate our cake.
+
+Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
+
+I want a 1917 build and you will give me what I want.
+
+This sentence is false.
+
+Beauty is in the eyes of a Beholder.
+
+I'm the best at space.
+
+The first time Yossarian saw the chaplain, he fell madly in love with him.
+*
+*
+*
+Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
+the time it did me. There is a wisdom that is woe; but there is a woe that is
+madness. And there is a Catskill eagle in some souls that can alike dive down
+into the blackest gorges, and soar out of them again and become invisible in
+the sunny spaces. And even if he for ever flies within the gorge, that gorge
+is in the mountains; so that even in his lowest swoop the mountain eagle is
+still higher than other birds upon the plain, even though they soar.
+*
+*
+*
+
+I'm here to commit lines and drop rhymes
+*
+This is a line to test and try uploading a cl.
+
+And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
+the Android bots, and it was good. Except on one bot, where it was bad. And
+lo, the change was reverted, and GCC went back to 4.6, where code is slower
+and less optimized. And verily did it break the build, because artifacts had
+been created with 4.8, and alignment was no longer the same, and a great
+sadness descended upon the Android GN buildbot, and it did refuseth to build
+any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
+bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
+seasons. And so they modified the whitespace file with these immortal lines,
+and visited it upon the bots, that great destruction might be wrought upon
+their outdated binaries. In clobberus, veritas.
+
+As the git approaches, light begins to shine through the SCM thrice again...
+However, the git, is, after all, quite stupid.
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors.
+
+A herd of wild gits appears!  Time for CQ :D
+And one more for sizes.py...
+
+Sigh.
+
+It was love at first sight.  The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
+
+Cool whitespace change for git-cl land
+
+Oh god the bots are red! I'm blind! Mmmm, donuts.
diff --git a/build/win/asan.gyp b/build/win/asan.gyp
new file mode 100644
index 0000000..9107db3
--- /dev/null
+++ b/build/win/asan.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+   'targets': [
+     {
+       'target_name': 'asan_dynamic_runtime',
+       'type': 'none',
+       'variables': {
+         # Every target is going to depend on asan_dynamic_runtime, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+       },
+       'conditions': [
+         ['OS=="win"', {
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'files': [
+                 # Path is relative to this GYP file.
+                 '<(DEPTH)/<(make_clang_dir)/bin/clang_rt.asan_dynamic-i386.dll',
+               ],
+             },
+           ],
+         }],
+       ],
+     },
+   ],
+}
diff --git a/build/win/chrome_win.croc b/build/win/chrome_win.croc
new file mode 100644
index 0000000..e1e3bb7
--- /dev/null
+++ b/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, posix, or linux specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_win\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/win/compatibility.manifest b/build/win/compatibility.manifest
new file mode 100644
index 0000000..f7bc13e
--- /dev/null
+++ b/build/win/compatibility.manifest
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+    <application>
+      <!--The ID below indicates application support for Windows Vista -->
+      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+      <!--The ID below indicates application support for Windows 7 -->
+      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+      <!--The ID below indicates application support for Windows 8 -->
+      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+      <!--The ID below indicates application support for Windows 8.1 -->
+      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+    </application>
+  </compatibility>
+</assembly>
diff --git a/build/win/importlibs/create_import_lib.gypi b/build/win/importlibs/create_import_lib.gypi
new file mode 100644
index 0000000..9cb0d34
--- /dev/null
+++ b/build/win/importlibs/create_import_lib.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to create import libraries from an import description file in a consistent
+# manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'none',
+#   'sources': [
+#     'foo.imports',
+#     'bar.imports',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'create_importlib': 'path-to-script',
+#     'lib_dir': 'path-to-output-directory',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# This will generate import libraries named 'foo.lib' and 'bar.lib' in the
+# specified lib directory.
+
+{
+  'variables': {
+    'create_importlib': '<(DEPTH)/build/win/importlibs/create_importlib_win.py',
+    'lib_dir': '<(PRODUCT_DIR)/lib',
+  },
+  'rules': [
+    {
+      'rule_name': 'create_import_lib',
+      'extension': 'imports',
+      'inputs': [
+        '<(create_importlib)',
+      ],
+      'outputs': [
+        '<(lib_dir)/<(RULE_INPUT_ROOT).lib',
+      ],
+      'action': [
+        'python',
+        '<(create_importlib)',
+        '--output-file', '<@(_outputs)',
+        '<(RULE_INPUT_PATH)',
+      ],
+      'message': 'Generating import library from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 0,
+    },
+  ],
+}
diff --git a/build/win/importlibs/create_importlib_win.py b/build/win/importlibs/create_importlib_win.py
new file mode 100755
index 0000000..bb6a2f0
--- /dev/null
+++ b/build/win/importlibs/create_importlib_win.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+"""Creates an import library from an import description file."""
+import ast
+import logging
+import optparse
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+_USAGE = """\
+Usage: %prog [options] [imports-file]
+
+Creates an import library from imports-file.
+
+Note: this script uses the microsoft assembler (ml.exe) and the library tool
+    (lib.exe), both of which must be in path.
+"""
+
+
+_ASM_STUB_HEADER = """\
+; This file is autogenerated by create_importlib_win.py, do not edit.
+.386
+.MODEL FLAT, C
+.CODE
+
+; Stubs to provide mangled names to lib.exe for the
+; correct generation of import libs.
+"""
+
+
+_DEF_STUB_HEADER = """\
+; This file is autogenerated by create_importlib_win.py, do not edit.
+
+; Export declarations for generating import libs.
+"""
+
+
+_LOGGER = logging.getLogger()
+
+
+
+class _Error(Exception):
+  pass
+
+
+class _ImportLibraryGenerator(object):
+  def __init__(self, temp_dir):
+    self._temp_dir = temp_dir
+
+  def _Shell(self, cmd, **kw):
+    ret = subprocess.call(cmd, **kw)
+    _LOGGER.info('Running "%s" returned %d.', cmd, ret)
+    if ret != 0:
+      raise _Error('Command "%s" returned %d.' % (cmd, ret))
+
+  def _ReadImportsFile(self, imports_file):
+    # Slurp the imports file.
+    return ast.literal_eval(open(imports_file).read())
+
+  def _WriteStubsFile(self, import_names, output_file):
+    output_file.write(_ASM_STUB_HEADER)
+
+    for name in import_names:
+      output_file.write('%s PROC\n' % name)
+      output_file.write('%s ENDP\n' % name)
+
+    output_file.write('END\n')
+
+  def _WriteDefFile(self, dll_name, import_names, output_file):
+    output_file.write(_DEF_STUB_HEADER)
+    output_file.write('NAME %s\n' % dll_name)
+    output_file.write('EXPORTS\n')
+    for name in import_names:
+      name = name.split('@')[0]
+      output_file.write('  %s\n' % name)
+
+  def _CreateObj(self, dll_name, imports):
+    """Writes an assembly file containing empty declarations.
+
+    For each imported function of the form:
+
+    AddClipboardFormatListener@4 PROC
+    AddClipboardFormatListener@4 ENDP
+
+    The resulting object file is then supplied to lib.exe with a .def file
+    declaring the corresponding non-adorned exports as they appear on the
+    exporting DLL, e.g.
+
+    EXPORTS
+      AddClipboardFormatListener
+
+    In combination, the .def file and the .obj file cause lib.exe to generate
+    an x86 import lib with public symbols named like
+    "__imp__AddClipboardFormatListener@4", binding to exports named like
+    "AddClipboardFormatListener".
+
+    All of this is perpetrated in a temporary directory, as the intermediate
+    artifacts are quick and easy to produce, and of no interest to anyone
+    after the fact."""
+
+    # Create an .asm file to provide stdcall-like stub names to lib.exe.
+    asm_name = dll_name + '.asm'
+    _LOGGER.info('Writing asm file "%s".', asm_name)
+    with open(os.path.join(self._temp_dir, asm_name), 'wb') as stubs_file:
+      self._WriteStubsFile(imports, stubs_file)
+
+    # Invoke on the assembler to compile it to .obj.
+    obj_name = dll_name + '.obj'
+    cmdline = ['ml.exe', '/nologo', '/c', asm_name, '/Fo', obj_name]
+    self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
+
+    return obj_name
+
+  def _CreateImportLib(self, dll_name, imports, architecture, output_file):
+    """Creates an import lib binding imports to dll_name for architecture.
+
+    On success, writes the import library to output file.
+    """
+    obj_file = None
+
+    # For x86 architecture we have to provide an object file for correct
+    # name mangling between the import stubs and the exported functions.
+    if architecture == 'x86':
+      obj_file = self._CreateObj(dll_name, imports)
+
+    # Create the corresponding .def file. This file has the non stdcall-adorned
+    # names, as exported by the destination DLL.
+    def_name = dll_name + '.def'
+    _LOGGER.info('Writing def file "%s".', def_name)
+    with open(os.path.join(self._temp_dir, def_name), 'wb') as def_file:
+      self._WriteDefFile(dll_name, imports, def_file)
+
+    # Invoke on lib.exe to create the import library.
+    # We generate everything into the temporary directory, as the .exp export
+    # files will be generated at the same path as the import library, and we
+    # don't want those files potentially gunking the works.
+    dll_base_name, ext = os.path.splitext(dll_name)
+    lib_name = dll_base_name + '.lib'
+    cmdline = ['lib.exe',
+               '/machine:%s' % architecture,
+               '/def:%s' % def_name,
+               '/out:%s' % lib_name]
+    if obj_file:
+      cmdline.append(obj_file)
+
+    self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
+
+    # Copy the .lib file to the output directory.
+    shutil.copyfile(os.path.join(self._temp_dir, lib_name), output_file)
+    _LOGGER.info('Created "%s".', output_file)
+
+  def CreateImportLib(self, imports_file, output_file):
+    # Read the imports file.
+    imports = self._ReadImportsFile(imports_file)
+
+    # Creates the requested import library in the output directory.
+    self._CreateImportLib(imports['dll_name'],
+                          imports['imports'],
+                          imports.get('architecture', 'x86'),
+                          output_file)
+
+
+def main():
+  parser = optparse.OptionParser(usage=_USAGE)
+  parser.add_option('-o', '--output-file',
+                    help='Specifies the output file path.')
+  parser.add_option('-k', '--keep-temp-dir',
+                    action='store_true',
+                    help='Keep the temporary directory.')
+  parser.add_option('-v', '--verbose',
+                    action='store_true',
+                    help='Verbose logging.')
+
+  options, args = parser.parse_args()
+
+  if len(args) != 1:
+    parser.error('You must provide an imports file.')
+
+  if not options.output_file:
+    parser.error('You must provide an output file.')
+
+  options.output_file = os.path.abspath(options.output_file)
+
+  if options.verbose:
+    logging.basicConfig(level=logging.INFO)
+  else:
+    logging.basicConfig(level=logging.WARN)
+
+
+  temp_dir = tempfile.mkdtemp()
+  _LOGGER.info('Created temporary directory "%s."', temp_dir)
+  try:
+    # Create a generator and create the import lib.
+    generator = _ImportLibraryGenerator(temp_dir)
+
+    ret = generator.CreateImportLib(args[0], options.output_file)
+  except Exception, e:
+    _LOGGER.exception('Failed to create import lib.')
+    ret = 1
+  finally:
+    if not options.keep_temp_dir:
+      shutil.rmtree(temp_dir)
+      _LOGGER.info('Deleted temporary directory "%s."', temp_dir)
+
+  return ret
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/win/importlibs/filter_export_list.py b/build/win/importlibs/filter_export_list.py
new file mode 100755
index 0000000..c2489a9
--- /dev/null
+++ b/build/win/importlibs/filter_export_list.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+"""Help maintaining DLL import lists."""
+import ast
+import optparse
+import re
+import sys
+
+
+_EXPORT_RE = re.compile(r"""
+  ^\s*(?P<ordinal>[0-9]+)  # The ordinal field.
+  \s+(?P<hint>[0-9A-F]+)   # The hint field.
+  \s(?P<rva>........)      # The RVA field.
+  \s+(?P<name>[^ ]+)       # And finally the name we're really after.
+""", re.VERBOSE)
+
+
+_USAGE = r"""\
+Usage: %prog [options] [master-file]
+
+This script filters a list of exports from a DLL, generated from something
+like the following command line:
+
+C:\> dumpbin /exports user32.dll
+
+against a master list of imports built from e.g.
+
+C:\> dumpbin /exports user32.lib
+
+The point of this is to trim non-public exports from the list, and to
+normalize the names to their stdcall-mangled form for the generation of
+import libraries.
+Note that the export names from the latter incanatation are stdcall-mangled,
+e.g. they are suffixed with "@" and the number of argument bytes to the
+function.
+"""
+
+def _ReadMasterFile(master_file):
+  # Slurp the master file.
+  with open(master_file) as f:
+    master_exports = ast.literal_eval(f.read())
+
+  master_mapping = {}
+  for export in master_exports:
+    name = export.split('@')[0]
+    master_mapping[name] = export
+
+  return master_mapping
+
+
+def main():
+  parser = optparse.OptionParser(usage=_USAGE)
+  parser.add_option('-r', '--reverse',
+                    action='store_true',
+                    help='Reverse the matching, e.g. return the functions '
+                         'in the master list that aren\'t in the input.')
+
+  options, args = parser.parse_args()
+  if len(args) != 1:
+    parser.error('Must provide a master file.')
+
+  master_mapping = _ReadMasterFile(args[0])
+
+  found_exports = []
+  for line in sys.stdin:
+    match = _EXPORT_RE.match(line)
+    if match:
+      export_name = master_mapping.get(match.group('name'), None)
+      if export_name:
+          found_exports.append(export_name)
+
+  if options.reverse:
+    # Invert the found_exports list.
+    found_exports = set(master_mapping.values()) - set(found_exports)
+
+  # Sort the found exports for tidy output.
+  print '\n'.join(sorted(found_exports))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/win/importlibs/x86/user32.winxp.imports b/build/win/importlibs/x86/user32.winxp.imports
new file mode 100644
index 0000000..24403a8
--- /dev/null
+++ b/build/win/importlibs/x86/user32.winxp.imports
@@ -0,0 +1,670 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This file is used to create a custom import library for Chrome's use of
+# user32.dll exports. The set of exports defined below
+{
+  'architecture': 'x86',
+
+  # The DLL to bind to.
+  'dll_name': 'user32.dll',
+
+  # Name of the generated import library.
+  'importlib_name': 'user32.winxp.lib',
+
+  # This is the set of exports observed on a user32.dll from Windows XP SP2.
+  # The version of the DLL where these were observed is 5.1.2600.2180.
+  # Incidentally this set of exports also coincides with Windows XP SP3, where
+  # the version of the DLL is 5.1.2600.5512.
+  # Don't add new imports here unless and until the minimal supported
+  # Windows version has been bumped past Windows XP SP2+.
+  'imports': [
+    'ActivateKeyboardLayout@8',
+    'AdjustWindowRect@12',
+    'AdjustWindowRectEx@16',
+    'AllowSetForegroundWindow@4',
+    'AnimateWindow@12',
+    'AnyPopup@0',
+    'AppendMenuA@16',
+    'AppendMenuW@16',
+    'ArrangeIconicWindows@4',
+    'AttachThreadInput@12',
+    'BeginDeferWindowPos@4',
+    'BeginPaint@8',
+    'BlockInput@4',
+    'BringWindowToTop@4',
+    'BroadcastSystemMessage@20',
+    'BroadcastSystemMessageA@20',
+    'BroadcastSystemMessageExA@24',
+    'BroadcastSystemMessageExW@24',
+    'BroadcastSystemMessageW@20',
+    'CallMsgFilter@8',
+    'CallMsgFilterA@8',
+    'CallMsgFilterW@8',
+    'CallNextHookEx@16',
+    'CallWindowProcA@20',
+    'CallWindowProcW@20',
+    'CascadeChildWindows@8',
+    'CascadeWindows@20',
+    'ChangeClipboardChain@8',
+    'ChangeDisplaySettingsA@8',
+    'ChangeDisplaySettingsExA@20',
+    'ChangeDisplaySettingsExW@20',
+    'ChangeDisplaySettingsW@8',
+    'ChangeMenuA@20',
+    'ChangeMenuW@20',
+    'CharLowerA@4',
+    'CharLowerBuffA@8',
+    'CharLowerBuffW@8',
+    'CharLowerW@4',
+    'CharNextA@4',
+    'CharNextExA@12',
+    'CharNextW@4',
+    'CharPrevA@8',
+    'CharPrevExA@16',
+    'CharPrevW@8',
+    'CharToOemA@8',
+    'CharToOemBuffA@12',
+    'CharToOemBuffW@12',
+    'CharToOemW@8',
+    'CharUpperA@4',
+    'CharUpperBuffA@8',
+    'CharUpperBuffW@8',
+    'CharUpperW@4',
+    'CheckDlgButton@12',
+    'CheckMenuItem@12',
+    'CheckMenuRadioItem@20',
+    'CheckRadioButton@16',
+    'ChildWindowFromPoint@12',
+    'ChildWindowFromPointEx@16',
+    'ClientToScreen@8',
+    'ClipCursor@4',
+    'CloseClipboard@0',
+    'CloseDesktop@4',
+    'CloseWindow@4',
+    'CloseWindowStation@4',
+    'CopyAcceleratorTableA@12',
+    'CopyAcceleratorTableW@12',
+    'CopyIcon@4',
+    'CopyImage@20',
+    'CopyRect@8',
+    'CountClipboardFormats@0',
+    'CreateAcceleratorTableA@8',
+    'CreateAcceleratorTableW@8',
+    'CreateCaret@16',
+    'CreateCursor@28',
+    'CreateDesktopA@24',
+    'CreateDesktopW@24',
+    'CreateDialogIndirectParamA@20',
+    'CreateDialogIndirectParamW@20',
+    'CreateDialogParamA@20',
+    'CreateDialogParamW@20',
+    'CreateIcon@28',
+    'CreateIconFromResource@16',
+    'CreateIconFromResourceEx@28',
+    'CreateIconIndirect@4',
+    'CreateMDIWindowA@40',
+    'CreateMDIWindowW@40',
+    'CreateMenu@0',
+    'CreatePopupMenu@0',
+    'CreateWindowExA@48',
+    'CreateWindowExW@48',
+    'CreateWindowStationA@16',
+    'CreateWindowStationW@16',
+    'DdeAbandonTransaction@12',
+    'DdeAccessData@8',
+    'DdeAddData@16',
+    'DdeClientTransaction@32',
+    'DdeCmpStringHandles@8',
+    'DdeConnect@16',
+    'DdeConnectList@20',
+    'DdeCreateDataHandle@28',
+    'DdeCreateStringHandleA@12',
+    'DdeCreateStringHandleW@12',
+    'DdeDisconnect@4',
+    'DdeDisconnectList@4',
+    'DdeEnableCallback@12',
+    'DdeFreeDataHandle@4',
+    'DdeFreeStringHandle@8',
+    'DdeGetData@16',
+    'DdeGetLastError@4',
+    'DdeImpersonateClient@4',
+    'DdeInitializeA@16',
+    'DdeInitializeW@16',
+    'DdeKeepStringHandle@8',
+    'DdeNameService@16',
+    'DdePostAdvise@12',
+    'DdeQueryConvInfo@12',
+    'DdeQueryNextServer@8',
+    'DdeQueryStringA@20',
+    'DdeQueryStringW@20',
+    'DdeReconnect@4',
+    'DdeSetQualityOfService@12',
+    'DdeSetUserHandle@12',
+    'DdeUnaccessData@4',
+    'DdeUninitialize@4',
+    'DefDlgProcA@16',
+    'DefDlgProcW@16',
+    'DefFrameProcA@20',
+    'DefFrameProcW@20',
+    'DefMDIChildProcA@16',
+    'DefMDIChildProcW@16',
+    'DefRawInputProc@12',
+    'DefWindowProcA@16',
+    'DefWindowProcW@16',
+    'DeferWindowPos@32',
+    'DeleteMenu@12',
+    'DeregisterShellHookWindow@4',
+    'DestroyAcceleratorTable@4',
+    'DestroyCaret@0',
+    'DestroyCursor@4',
+    'DestroyIcon@4',
+    'DestroyMenu@4',
+    'DestroyWindow@4',
+    'DialogBoxIndirectParamA@20',
+    'DialogBoxIndirectParamW@20',
+    'DialogBoxParamA@20',
+    'DialogBoxParamW@20',
+    'DisableProcessWindowsGhosting@0',
+    'DispatchMessageA@4',
+    'DispatchMessageW@4',
+    'DlgDirListA@20',
+    'DlgDirListComboBoxA@20',
+    'DlgDirListComboBoxW@20',
+    'DlgDirListW@20',
+    'DlgDirSelectComboBoxExA@16',
+    'DlgDirSelectComboBoxExW@16',
+    'DlgDirSelectExA@16',
+    'DlgDirSelectExW@16',
+    'DragDetect@12',
+    'DragObject@20',
+    'DrawAnimatedRects@16',
+    'DrawCaption@16',
+    'DrawEdge@16',
+    'DrawFocusRect@8',
+    'DrawFrame@16',
+    'DrawFrameControl@16',
+    'DrawIcon@16',
+    'DrawIconEx@36',
+    'DrawMenuBar@4',
+    'DrawStateA@40',
+    'DrawStateW@40',
+    'DrawTextA@20',
+    'DrawTextExA@24',
+    'DrawTextExW@24',
+    'DrawTextW@20',
+    'EditWndProc@16',
+    'EmptyClipboard@0',
+    'EnableMenuItem@12',
+    'EnableScrollBar@12',
+    'EnableWindow@8',
+    'EndDeferWindowPos@4',
+    'EndDialog@8',
+    'EndMenu@0',
+    'EndPaint@8',
+    'EndTask@12',
+    'EnumChildWindows@12',
+    'EnumClipboardFormats@4',
+    'EnumDesktopWindows@12',
+    'EnumDesktopsA@12',
+    'EnumDesktopsW@12',
+    'EnumDisplayDevicesA@16',
+    'EnumDisplayDevicesW@16',
+    'EnumDisplayMonitors@16',
+    'EnumDisplaySettingsA@12',
+    'EnumDisplaySettingsExA@16',
+    'EnumDisplaySettingsExW@16',
+    'EnumDisplaySettingsW@12',
+    'EnumPropsA@8',
+    'EnumPropsExA@12',
+    'EnumPropsExW@12',
+    'EnumPropsW@8',
+    'EnumThreadWindows@12',
+    'EnumWindowStationsA@8',
+    'EnumWindowStationsW@8',
+    'EnumWindows@8',
+    'EqualRect@8',
+    'ExcludeUpdateRgn@8',
+    'ExitWindowsEx@8',
+    'FillRect@12',
+    'FindWindowA@8',
+    'FindWindowExA@16',
+    'FindWindowExW@16',
+    'FindWindowW@8',
+    'FlashWindow@8',
+    'FlashWindowEx@4',
+    'FrameRect@12',
+    'FreeDDElParam@8',
+    'GetActiveWindow@0',
+    'GetAltTabInfo@20',
+    'GetAltTabInfoA@20',
+    'GetAltTabInfoW@20',
+    'GetAncestor@8',
+    'GetAsyncKeyState@4',
+    'GetCapture@0',
+    'GetCaretBlinkTime@0',
+    'GetCaretPos@4',
+    'GetClassInfoA@12',
+    'GetClassInfoExA@12',
+    'GetClassInfoExW@12',
+    'GetClassInfoW@12',
+    'GetClassLongA@8',
+    'GetClassLongW@8',
+    'GetClassNameA@12',
+    'GetClassNameW@12',
+    'GetClassWord@8',
+    'GetClientRect@8',
+    'GetClipCursor@4',
+    'GetClipboardData@4',
+    'GetClipboardFormatNameA@12',
+    'GetClipboardFormatNameW@12',
+    'GetClipboardOwner@0',
+    'GetClipboardSequenceNumber@0',
+    'GetClipboardViewer@0',
+    'GetComboBoxInfo@8',
+    'GetCursor@0',
+    'GetCursorInfo@4',
+    'GetCursorPos@4',
+    'GetDC@4',
+    'GetDCEx@12',
+    'GetDesktopWindow@0',
+    'GetDialogBaseUnits@0',
+    'GetDlgCtrlID@4',
+    'GetDlgItem@8',
+    'GetDlgItemInt@16',
+    'GetDlgItemTextA@16',
+    'GetDlgItemTextW@16',
+    'GetDoubleClickTime@0',
+    'GetFocus@0',
+    'GetForegroundWindow@0',
+    'GetGUIThreadInfo@8',
+    'GetGuiResources@8',
+    'GetIconInfo@8',
+    'GetInputDesktop@0',
+    'GetInputState@0',
+    'GetKBCodePage@0',
+    'GetKeyNameTextA@12',
+    'GetKeyNameTextW@12',
+    'GetKeyState@4',
+    'GetKeyboardLayout@4',
+    'GetKeyboardLayoutList@8',
+    'GetKeyboardLayoutNameA@4',
+    'GetKeyboardLayoutNameW@4',
+    'GetKeyboardState@4',
+    'GetKeyboardType@4',
+    'GetLastActivePopup@4',
+    'GetLastInputInfo@4',
+    'GetLayeredWindowAttributes@16',
+    'GetListBoxInfo@4',
+    'GetMenu@4',
+    'GetMenuBarInfo@16',
+    'GetMenuCheckMarkDimensions@0',
+    'GetMenuContextHelpId@4',
+    'GetMenuDefaultItem@12',
+    'GetMenuInfo@8',
+    'GetMenuItemCount@4',
+    'GetMenuItemID@8',
+    'GetMenuItemInfoA@16',
+    'GetMenuItemInfoW@16',
+    'GetMenuItemRect@16',
+    'GetMenuState@12',
+    'GetMenuStringA@20',
+    'GetMenuStringW@20',
+    'GetMessageA@16',
+    'GetMessageExtraInfo@0',
+    'GetMessagePos@0',
+    'GetMessageTime@0',
+    'GetMessageW@16',
+    'GetMonitorInfoA@8',
+    'GetMonitorInfoW@8',
+    'GetMouseMovePointsEx@20',
+    'GetNextDlgGroupItem@12',
+    'GetNextDlgTabItem@12',
+    'GetOpenClipboardWindow@0',
+    'GetParent@4',
+    'GetPriorityClipboardFormat@8',
+    'GetProcessDefaultLayout@4',
+    'GetProcessWindowStation@0',
+    'GetPropA@8',
+    'GetPropW@8',
+    'GetQueueStatus@4',
+    'GetRawInputBuffer@12',
+    'GetRawInputData@20',
+    'GetRawInputDeviceInfoA@16',
+    'GetRawInputDeviceInfoW@16',
+    'GetRawInputDeviceList@12',
+    'GetRegisteredRawInputDevices@12',
+    'GetScrollBarInfo@12',
+    'GetScrollInfo@12',
+    'GetScrollPos@8',
+    'GetScrollRange@16',
+    'GetShellWindow@0',
+    'GetSubMenu@8',
+    'GetSysColor@4',
+    'GetSysColorBrush@4',
+    'GetSystemMenu@8',
+    'GetSystemMetrics@4',
+    'GetTabbedTextExtentA@20',
+    'GetTabbedTextExtentW@20',
+    'GetThreadDesktop@4',
+    'GetTitleBarInfo@8',
+    'GetTopWindow@4',
+    'GetUpdateRect@12',
+    'GetUpdateRgn@12',
+    'GetUserObjectInformationA@20',
+    'GetUserObjectInformationW@20',
+    'GetUserObjectSecurity@20',
+    'GetWindow@8',
+    'GetWindowContextHelpId@4',
+    'GetWindowDC@4',
+    'GetWindowInfo@8',
+    'GetWindowLongA@8',
+    'GetWindowLongW@8',
+    'GetWindowModuleFileName@12',
+    'GetWindowModuleFileNameA@12',
+    'GetWindowModuleFileNameW@12',
+    'GetWindowPlacement@8',
+    'GetWindowRect@8',
+    'GetWindowRgn@8',
+    'GetWindowRgnBox@8',
+    'GetWindowTextA@12',
+    'GetWindowTextLengthA@4',
+    'GetWindowTextLengthW@4',
+    'GetWindowTextW@12',
+    'GetWindowThreadProcessId@8',
+    'GetWindowWord@8',
+    'GrayStringA@36',
+    'GrayStringW@36',
+    'HideCaret@4',
+    'HiliteMenuItem@16',
+    'IMPGetIMEA@8',
+    'IMPGetIMEW@8',
+    'IMPQueryIMEA@4',
+    'IMPQueryIMEW@4',
+    'IMPSetIMEA@8',
+    'IMPSetIMEW@8',
+    'ImpersonateDdeClientWindow@8',
+    'InSendMessage@0',
+    'InSendMessageEx@4',
+    'InflateRect@12',
+    'InsertMenuA@20',
+    'InsertMenuItemA@16',
+    'InsertMenuItemW@16',
+    'InsertMenuW@20',
+    'InternalGetWindowText@12',
+    'IntersectRect@12',
+    'InvalidateRect@12',
+    'InvalidateRgn@12',
+    'InvertRect@8',
+    'IsCharAlphaA@4',
+    'IsCharAlphaNumericA@4',
+    'IsCharAlphaNumericW@4',
+    'IsCharAlphaW@4',
+    'IsCharLowerA@4',
+    'IsCharLowerW@4',
+    'IsCharUpperA@4',
+    'IsCharUpperW@4',
+    'IsChild@8',
+    'IsClipboardFormatAvailable@4',
+    'IsDialogMessage@8',
+    'IsDialogMessageA@8',
+    'IsDialogMessageW@8',
+    'IsDlgButtonChecked@8',
+    'IsGUIThread@4',
+    'IsHungAppWindow@4',
+    'IsIconic@4',
+    'IsMenu@4',
+    'IsRectEmpty@4',
+    'IsWinEventHookInstalled@4',
+    'IsWindow@4',
+    'IsWindowEnabled@4',
+    'IsWindowUnicode@4',
+    'IsWindowVisible@4',
+    'IsZoomed@4',
+    'KillTimer@8',
+    'LoadAcceleratorsA@8',
+    'LoadAcceleratorsW@8',
+    'LoadBitmapA@8',
+    'LoadBitmapW@8',
+    'LoadCursorA@8',
+    'LoadCursorFromFileA@4',
+    'LoadCursorFromFileW@4',
+    'LoadCursorW@8',
+    'LoadIconA@8',
+    'LoadIconW@8',
+    'LoadImageA@24',
+    'LoadImageW@24',
+    'LoadKeyboardLayoutA@8',
+    'LoadKeyboardLayoutW@8',
+    'LoadMenuA@8',
+    'LoadMenuIndirectA@4',
+    'LoadMenuIndirectW@4',
+    'LoadMenuW@8',
+    'LoadStringA@16',
+    'LoadStringW@16',
+    'LockSetForegroundWindow@4',
+    'LockWindowUpdate@4',
+    'LockWorkStation@0',
+    'LookupIconIdFromDirectory@8',
+    'LookupIconIdFromDirectoryEx@20',
+    'MapDialogRect@8',
+    'MapVirtualKeyA@8',
+    'MapVirtualKeyExA@12',
+    'MapVirtualKeyExW@12',
+    'MapVirtualKeyW@8',
+    'MapWindowPoints@16',
+    'MenuItemFromPoint@16',
+    'MessageBeep@4',
+    'MessageBoxA@16',
+    'MessageBoxExA@20',
+    'MessageBoxExW@20',
+    'MessageBoxIndirectA@4',
+    'MessageBoxIndirectW@4',
+    'MessageBoxTimeoutA@24',
+    'MessageBoxTimeoutW@24',
+    'MessageBoxW@16',
+    'ModifyMenuA@20',
+    'ModifyMenuW@20',
+    'MonitorFromPoint@12',
+    'MonitorFromRect@8',
+    'MonitorFromWindow@8',
+    'MoveWindow@24',
+    'MsgWaitForMultipleObjects@20',
+    'MsgWaitForMultipleObjectsEx@20',
+    'NotifyWinEvent@16',
+    'OemKeyScan@4',
+    'OemToCharA@8',
+    'OemToCharBuffA@12',
+    'OemToCharBuffW@12',
+    'OemToCharW@8',
+    'OffsetRect@12',
+    'OpenClipboard@4',
+    'OpenDesktopA@16',
+    'OpenDesktopW@16',
+    'OpenIcon@4',
+    'OpenInputDesktop@12',
+    'OpenWindowStationA@12',
+    'OpenWindowStationW@12',
+    'PackDDElParam@12',
+    'PaintDesktop@4',
+    'PeekMessageA@20',
+    'PeekMessageW@20',
+    'PostMessageA@16',
+    'PostMessageW@16',
+    'PostQuitMessage@4',
+    'PostThreadMessageA@16',
+    'PostThreadMessageW@16',
+    'PrintWindow@12',
+    'PrivateExtractIconsA@32',
+    'PrivateExtractIconsW@32',
+    'PtInRect@12',
+    'RealChildWindowFromPoint@12',
+    'RealGetWindowClass@12',
+    'RealGetWindowClassA@12',
+    'RealGetWindowClassW@12',
+    'RedrawWindow@16',
+    'RegisterClassA@4',
+    'RegisterClassExA@4',
+    'RegisterClassExW@4',
+    'RegisterClassW@4',
+    'RegisterClipboardFormatA@4',
+    'RegisterClipboardFormatW@4',
+    'RegisterDeviceNotificationA@12',
+    'RegisterDeviceNotificationW@12',
+    'RegisterHotKey@16',
+    'RegisterRawInputDevices@12',
+    'RegisterShellHookWindow@4',
+    'RegisterWindowMessageA@4',
+    'RegisterWindowMessageW@4',
+    'ReleaseCapture@0',
+    'ReleaseDC@8',
+    'RemoveMenu@12',
+    'RemovePropA@8',
+    'RemovePropW@8',
+    'ReplyMessage@4',
+    'ReuseDDElParam@20',
+    'ScreenToClient@8',
+    'ScrollDC@28',
+    'ScrollWindow@20',
+    'ScrollWindowEx@32',
+    'SendDlgItemMessageA@20',
+    'SendDlgItemMessageW@20',
+    'SendIMEMessageExA@8',
+    'SendIMEMessageExW@8',
+    'SendInput@12',
+    'SendMessageA@16',
+    'SendMessageCallbackA@24',
+    'SendMessageCallbackW@24',
+    'SendMessageTimeoutA@28',
+    'SendMessageTimeoutW@28',
+    'SendMessageW@16',
+    'SendNotifyMessageA@16',
+    'SendNotifyMessageW@16',
+    'SetActiveWindow@4',
+    'SetCapture@4',
+    'SetCaretBlinkTime@4',
+    'SetCaretPos@8',
+    'SetClassLongA@12',
+    'SetClassLongW@12',
+    'SetClassWord@12',
+    'SetClipboardData@8',
+    'SetClipboardViewer@4',
+    'SetCursor@4',
+    'SetCursorPos@8',
+    'SetDebugErrorLevel@4',
+    'SetDeskWallpaper@4',
+    'SetDlgItemInt@16',
+    'SetDlgItemTextA@12',
+    'SetDlgItemTextW@12',
+    'SetDoubleClickTime@4',
+    'SetFocus@4',
+    'SetForegroundWindow@4',
+    'SetKeyboardState@4',
+    'SetLastErrorEx@8',
+    'SetLayeredWindowAttributes@16',
+    'SetMenu@8',
+    'SetMenuContextHelpId@8',
+    'SetMenuDefaultItem@12',
+    'SetMenuInfo@8',
+    'SetMenuItemBitmaps@20',
+    'SetMenuItemInfoA@16',
+    'SetMenuItemInfoW@16',
+    'SetMessageExtraInfo@4',
+    'SetMessageQueue@4',
+    'SetParent@8',
+    'SetProcessDefaultLayout@4',
+    'SetProcessWindowStation@4',
+    'SetPropA@12',
+    'SetPropW@12',
+    'SetRect@20',
+    'SetRectEmpty@4',
+    'SetScrollInfo@16',
+    'SetScrollPos@16',
+    'SetScrollRange@20',
+    'SetShellWindow@4',
+    'SetSysColors@12',
+    'SetSystemCursor@8',
+    'SetThreadDesktop@4',
+    'SetTimer@16',
+    'SetUserObjectInformationA@16',
+    'SetUserObjectInformationW@16',
+    'SetUserObjectSecurity@12',
+    'SetWinEventHook@28',
+    'SetWindowContextHelpId@8',
+    'SetWindowLongA@12',
+    'SetWindowLongW@12',
+    'SetWindowPlacement@8',
+    'SetWindowPos@28',
+    'SetWindowRgn@12',
+    'SetWindowTextA@8',
+    'SetWindowTextW@8',
+    'SetWindowWord@12',
+    'SetWindowsHookA@8',
+    'SetWindowsHookExA@16',
+    'SetWindowsHookExW@16',
+    'SetWindowsHookW@8',
+    'ShowCaret@4',
+    'ShowCursor@4',
+    'ShowOwnedPopups@8',
+    'ShowScrollBar@12',
+    'ShowWindow@8',
+    'ShowWindowAsync@8',
+    'SubtractRect@12',
+    'SwapMouseButton@4',
+    'SwitchDesktop@4',
+    'SwitchToThisWindow@8',
+    'SystemParametersInfoA@16',
+    'SystemParametersInfoW@16',
+    'TabbedTextOutA@32',
+    'TabbedTextOutW@32',
+    'TileChildWindows@8',
+    'TileWindows@20',
+    'ToAscii@20',
+    'ToAsciiEx@24',
+    'ToUnicode@24',
+    'ToUnicodeEx@28',
+    'TrackMouseEvent@4',
+    'TrackPopupMenu@28',
+    'TrackPopupMenuEx@24',
+    'TranslateAccelerator@12',
+    'TranslateAcceleratorA@12',
+    'TranslateAcceleratorW@12',
+    'TranslateMDISysAccel@8',
+    'TranslateMessage@4',
+    'UnhookWinEvent@4',
+    'UnhookWindowsHook@8',
+    'UnhookWindowsHookEx@4',
+    'UnionRect@12',
+    'UnloadKeyboardLayout@4',
+    'UnpackDDElParam@16',
+    'UnregisterClassA@8',
+    'UnregisterClassW@8',
+    'UnregisterDeviceNotification@4',
+    'UnregisterHotKey@8',
+    'UpdateLayeredWindow@36',
+    'UpdateWindow@4',
+    'UserHandleGrantAccess@12',
+    'ValidateRect@8',
+    'ValidateRgn@8',
+    'VkKeyScanA@4',
+    'VkKeyScanExA@8',
+    'VkKeyScanExW@8',
+    'VkKeyScanW@4',
+    'WINNLSEnableIME@8',
+    'WINNLSGetEnableStatus@4',
+    'WINNLSGetIMEHotkey@4',
+    'WaitForInputIdle@8',
+    'WaitMessage@0',
+    'WinHelpA@16',
+    'WinHelpW@16',
+    'WindowFromDC@4',
+    'WindowFromPoint@8',
+    'keybd_event@16',
+    'mouse_event@20',
+    'wsprintfA',
+    'wsprintfW',
+    'wvsprintfA@12',
+    'wvsprintfW@12',
+  ]
+}
diff --git a/build/win/importlibs/x86/user32.winxp.lib b/build/win/importlibs/x86/user32.winxp.lib
new file mode 100644
index 0000000..deb5577
--- /dev/null
+++ b/build/win/importlibs/x86/user32.winxp.lib
Binary files differ
diff --git a/build/win/install-build-deps.py b/build/win/install-build-deps.py
new file mode 100755
index 0000000..d9e50b6
--- /dev/null
+++ b/build/win/install-build-deps.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil
+import sys
+import os
+
+def patch_msbuild():
+  """VS2010 MSBuild has a ULDI bug that we patch here. See http://goo.gl/Pn8tj.
+  """
+  source_path = os.path.join(os.environ['ProgramFiles(x86)'],
+                             "MSBuild",
+                             "Microsoft.Cpp",
+                             "v4.0",
+                             "Microsoft.CppBuild.targets")
+  backup_path = source_path + ".backup"
+  if not os.path.exists(backup_path):
+    try:
+      print "Backing up %s..." % source_path
+      shutil.copyfile(source_path, backup_path)
+    except IOError:
+      print "Could not back up %s to %s. Run as Administrator?" % (
+          source_path, backup_path)
+      return 1
+
+  source = open(source_path).read()
+  base = ('''<Target Name="GetResolvedLinkObjs" Returns="@(ObjFullPath)" '''
+          '''DependsOnTargets="$(CommonBuildOnlyTargets);ComputeCLOutputs;'''
+          '''ResolvedLinkObjs"''')
+  find = base + '>'
+  replace = base + ''' Condition="'$(ConfigurationType)'=='StaticLibrary'">'''
+  result = source.replace(find, replace)
+
+  if result != source:
+    open(source_path, "w").write(result)
+    print "Patched %s." % source_path
+  return 0
+
+
+def main():
+  return patch_msbuild()
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py
new file mode 100755
index 0000000..281668f
--- /dev/null
+++ b/build/win/reorder-imports.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+def reorder_imports(input_dir, output_dir, architecture):
+  """Run swapimports.exe on the initial chrome.exe, and write to the output
+  directory. Also copy over any related files that might be needed
+  (pdbs, manifests etc.).
+  """
+
+  input_image = os.path.join(input_dir, 'chrome.exe')
+  output_image = os.path.join(output_dir, 'chrome.exe')
+
+  swap_exe = os.path.join(
+    __file__,
+    '..\\..\\..\\third_party\\syzygy\\binaries\\exe\\swapimport.exe')
+
+  args = [swap_exe, '--input-image=%s' % input_image,
+      '--output-image=%s' % output_image, '--overwrite', '--no-logo']
+
+  if architecture == 'x64':
+    args.append('--x64');
+
+  args.append('chrome_elf.dll');
+
+  subprocess.call(args)
+
+  for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
+    shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
+  return 0
+
+
+def main(argv):
+  usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
+      metavar='DIR')
+  parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
+      metavar='DIR')
+  parser.add_option('-a', '--arch', help='architecture of build (optional)',
+      default='ia32')
+  opts, args = parser.parse_args()
+
+  if not opts.input or not opts.output:
+    parser.error('Please provide and input and output directory')
+  return reorder_imports(opts.input, opts.output, opts.arch)
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/win_precompile.gypi b/build/win_precompile.gypi
new file mode 100644
index 0000000..fb86076
--- /dev/null
+++ b/build/win_precompile.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file to make targets in your .gyp use the default
+# precompiled header on Windows, in debug builds only as the official
+# builders blow up (out of memory) if precompiled headers are used for
+# release builds.
+
+{
+  'conditions': [
+    ['OS=="win" and chromium_win_pch==1', {
+        'target_defaults': {
+          'msvs_precompiled_header': '<(DEPTH)/build/precompile.h',
+          'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc',
+          'sources': ['<(DEPTH)/build/precompile.cc'],
+        }
+      }],
+  ],
+}